code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE PolyKinds, KindSignatures #-}
module T11203 where
data SameKind :: k -> k -> *
data Q (a :: k1) (b :: k2) c = MkQ (SameKind a b)
| shlevy/ghc | testsuite/tests/polykinds/T11203.hs | bsd-3-clause | 145 | 0 | 8 | 33 | 51 | 32 | 19 | -1 | -1 |
{-# LANGUAGE GADTs #-}
module Ex1 where
-- | A very simple newtype wrapper
newtype Wrap a = Wrap a
-- | A type-indexed representation of a type
data STy ty where
SIntTy :: STy Int
SBoolTy :: STy Bool
SMaybeTy :: STy a -> STy (Maybe a)
-- | Produce a "zero" of that type
zero :: STy ty -> ty
zero SIntTy = 0
zero SBoolTy = False
zero (SMaybeTy _) = Nothing
| bwmcadams/lambdaconf-2015 | speakers/goldfirere/exercises/Ex1.hs | artistic-2.0 | 380 | 0 | 9 | 99 | 101 | 56 | 45 | 11 | 1 |
{-# LANGUAGE CPP #-}
module Application (getAlias) where
import Prelude ()
import BasicPrelude
import Control.Error (EitherT(..), eitherT, noteT, hoistMaybe, hushT)
import qualified Data.Text as T
import Network.Wai (Application, queryString)
import Network.HTTP.Types (ok200, badRequest400, notFound404)
import Network.Wai.Util (stringHeaders, json, queryLookup)
import Network.URI (URI(..), parseAbsoluteURI)
import Database.SQLite.Simple (query, Connection, setTrace)
import qualified Ripple.Federation as Ripple
import Scrape
import Records
import MustacheTemplates
#include "PathHelpers.hs"
Just [cors] = stringHeaders [("Access-Control-Allow-Origin", "*")]
listToEitherT :: (Monad m) => e -> [a] -> EitherT e m a
listToEitherT e = noteT' e . listToMaybe
noteT' :: (Monad m) => e -> Maybe a -> EitherT e m a
noteT' e = noteT e . hoistMaybe
getAlias :: URI -> Connection -> Application
getAlias _ db req = eitherT err return $ do
liftIO $ setTrace db (Just print)
q <- (,) <$> fromQ "domain" <*> fromQ "destination"
domain <- listToEitherT nodomain =<< query' "SELECT domain,pattern,proxy_url FROM domains WHERE domain LIKE ? LIMIT 1" [fst q]
alias <- query' "SELECT alias,domain,ripple,dt FROM aliases WHERE domain LIKE ? AND alias LIKE ? LIMIT 1" q
a <- noteT noalias $
(hoistMaybe $ listToMaybe alias) <|>
(do
proxy <- hoistMaybe $ proxy domain
resolved <- hushT $ EitherT $ liftIO $
Ripple.resolveAgainst (Ripple.Alias (snd q) (fst q)) proxy
return Alias {
alias = Ripple.destination $ Ripple.alias resolved,
domain = Ripple.domain $ Ripple.alias resolved,
ripple = Ripple.ripple $ resolved,
dt = Ripple.dt $ resolved
}
) <|>
(do
pat <- hoistMaybe $ pattern domain
uri <- hoistMaybe $ parseAbsoluteURI
(textToString $ T.replace (s"%s") (snd q) pat)
result <- hushT $ EitherT $ liftIO $ scrapeRippleAddress uri
address <- hoistMaybe $ readMay $ decodeUtf8 result
return Alias {
alias = snd q,
domain = fst q,
ripple = address,
dt = Nothing
}
)
json ok200 [cors] (a :: Alias)
where
query' sql = liftIO . query db (s sql)
nodomain = Error NoSuchDomain "That domain is not served here."
noalias = Error NoSuchUser "No such alias on that domain."
err e@(Error NoSuchUser _) = json notFound404 [cors] e
err e = json badRequest400 [cors] e
fromQ k = noteT' (Error InvalidParams ("No " ++ k ++ " provided.")) $
queryLookup k (queryString req)
| singpolyma/RippleUnion-Federation | Application.hs | isc | 2,448 | 32 | 20 | 467 | 871 | 455 | 416 | 57 | 2 |
fibs :: [Integer]
fibs = 0 : 1 : zipWith (+) fibs (tail fibs) | MartinThoma/LaTeX-examples | documents/Programmierparadigmen/scripts/haskell/fibonacci-stream.hs | mit | 61 | 0 | 8 | 13 | 37 | 20 | 17 | 2 | 1 |
-- Problems/Problem001.hs
module Problems.Problem001 (p1) where
main = print p1
p1 :: Int
p1 = sum $ filter (\n -> n `mod` 3 == 0 || n `mod` 5 == 0) [1..999]
| Sgoettschkes/learning | haskell/ProjectEuler/src/Problems/Problem001.hs | mit | 160 | 0 | 13 | 34 | 76 | 44 | 32 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module UnitTest.RequestParse.MessengerCode where
import Data.Aeson (Value)
import Data.Yaml.TH (decodeFile)
import Test.Tasty as Tasty
import Web.Facebook.Messenger
import UnitTest.Internal
--------------------
-- MESSENGER CODE --
--------------------
messengerCodeTests :: TestTree
messengerCodeTests = Tasty.testGroup "Messenger Code Requests"
[ messengerCodeTest
, messengerCodeRef
]
messengerCodeVal :: Value
messengerCodeVal = $$(decodeFile "test/json/request/messenger_code_request.json")
messengerCodeTest :: TestTree
messengerCodeTest = parseTest "Messenger code" messengerCodeVal messengerCode
messengerCodeMaxVal :: Value
messengerCodeMaxVal = $$(decodeFile "test/json/request/messenger_code_request_ref.json")
messengerCodeRef :: TestTree
messengerCodeRef = parseTest "Maximal messenger code w/ parameter" messengerCodeMaxVal
$ MessengerCodeRequest (Just 1500) $ Just "billboard-ad"
| Vlix/facebookmessenger | test/UnitTest/RequestParse/MessengerCode.hs | mit | 969 | 0 | 9 | 125 | 159 | 91 | 68 | -1 | -1 |
{- |
Module : Language.Egison.Math.Arith
Licence : MIT
This module defines some basic arithmetic operations for Egison's computer
algebra system.
-}
module Language.Egison.Math.Arith
( mathPlus
, mathMinus
, mathMult
, mathDiv
, mathPower
, mathNumerator
, mathDenominator
) where
import Language.Egison.Math.Expr
import Language.Egison.Math.Normalize
mathPlus :: ScalarData -> ScalarData -> ScalarData
mathPlus (Div m1 n1) (Div m2 n2) = mathNormalize' $ Div (mathPlusPoly (mathMultPoly m1 n2) (mathMultPoly m2 n1)) (mathMultPoly n1 n2)
mathPlusPoly :: PolyExpr -> PolyExpr -> PolyExpr
mathPlusPoly (Plus ts1) (Plus ts2) = Plus (ts1 ++ ts2)
mathMinus :: ScalarData -> ScalarData -> ScalarData
mathMinus s1 s2 = mathPlus s1 (mathNegate s2)
mathMult :: ScalarData -> ScalarData -> ScalarData
mathMult (Div m1 n1) (Div m2 n2) = mathNormalize' $ Div (mathMultPoly m1 m2) (mathMultPoly n1 n2)
mathMultPoly :: PolyExpr -> PolyExpr -> PolyExpr
mathMultPoly (Plus []) (Plus _) = Plus []
mathMultPoly (Plus _) (Plus []) = Plus []
mathMultPoly (Plus ts1) (Plus ts2) = foldl mathPlusPoly (Plus []) (map (\(Term a xs) -> Plus (map (\(Term b ys) -> Term (a * b) (xs ++ ys)) ts2)) ts1)
mathDiv :: ScalarData -> ScalarData -> ScalarData
mathDiv s (Div p1 p2) = mathMult s (Div p2 p1)
mathPower :: ScalarData -> Integer -> ScalarData
mathPower _ 0 = SingleTerm 1 []
mathPower s 1 = s
mathPower s n | n >= 2 = mathMult s (mathPower s (n - 1))
mathNumerator :: ScalarData -> ScalarData
mathNumerator (Div m _) = Div m (Plus [Term 1 []])
mathDenominator :: ScalarData -> ScalarData
mathDenominator (Div _ n) = Div n (Plus [Term 1 []])
| egison/egison | hs-src/Language/Egison/Math/Arith.hs | mit | 1,697 | 0 | 17 | 345 | 663 | 341 | 322 | 32 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Turnip.Eval.TH (genDecls, genLibLoadFunction, entry, TypeT(..), Sig(..)) where
import qualified Turnip.Eval.Types as Eval
import Turnip.Eval.Util
import Language.Haskell.TH
import Control.Monad
import Data.Char
-- |Function signature has n params and one return value
data Sig = Sig [TypeT] TypeT
data TypeT = NumberT | StringT | BooleanT
-- |Transforms a type information value into a data construct name
typeToName :: TypeT -> Name
typeToName t = case t of
NumberT -> 'Eval.Number
StringT -> 'Eval.Str
BooleanT -> 'Eval.Boolean
-- |Transforms a constructor Name into a pattern match for a newly introduced name
toPatName :: Name -> Q (Pat, Name)
toPatName p = do
name <- newName "x"
liftM2 (,) (conP p [varP name]) (pure name)
typeToMatch :: TypeT -> Q (Pat, Name)
typeToMatch = toPatName . typeToName
type Entry = (Sig, String, Name, Name)
entry :: Sig -> String -> Name -> Q Entry
entry sig luaName origName = do
tempName <- newName ("lua" ++ toSafeSuffix luaName)
return (sig, luaName, tempName, origName)
where
toSafeSuffix = concat . map (show . ord)
genDecls :: [Entry] -> Q [Dec]
genDecls es = concat <$> mapM (\(sig, _, tempName, origName) -> genDec sig tempName origName) es
genLibLoadFunction :: [Entry] -> Q [Dec]
genLibLoadFunction entries = do
let funs = ListE <$> mapM toModuleItem entries
[d|
loadBaseLibraryGen :: String -> Eval.LuaM ()
loadBaseLibraryGen modName = addNativeModule modName $(funs)
|]
where
toModuleItem :: Entry -> Q Exp
toModuleItem (_, luaName, tempName, _) = [e| (luaName, Eval.BuiltinFunction $(varE tempName)) |]
-- |Generates a declaration of a function compatible with Lua interface
-- @param tempName - the new name
genDec :: Sig -> Name -> Name -> Q [Dec]
genDec (Sig paramTs returnT) tempName origName = do
matches <- mapM typeToMatch paramTs
let listOfPatterns = map fst matches
let
-- the generated function accepts one param, with a pattern of (t1 : t2 : t3 : ... : _),
-- where t1..tn are the types in its signature
inputPattern :: Q Pat
inputPattern = return $ foldr (\par pat -> ConP '(:) [par, pat]) WildP listOfPatterns
let
params :: Q [Exp]
params = mapM (varE . snd) matches
let
app :: Q Exp
app = foldl AppE (VarE origName) <$> params
-- this rather convoluted body just means to use appropriate type wrapper for the function's
-- return type
let body = normalB [| return $ [ $(appE (conE $ typeToName returnT) app) ] |]
-- error body is used when the static signature doesn't match the passed arguments
-- TODO: maybe it could be useful to allow writing native functions accepting any value
-- or generally being more flexible in their arguments; I'll leave that for the future
-- public API.
let errorBody = normalB [| throwErrorStr "The arguments are wrong for this native function." |]
-- Generate the function body and a signature for it
sigDec <- sigD tempName [t| Eval.NativeFunction |]
bodyDec <-
funD tempName [
clause [inputPattern] body [],
clause [wildP] errorBody []
]
return [sigDec, bodyDec]
| bananu7/Turnip | src/Turnip/Eval/TH.hs | mit | 3,274 | 0 | 15 | 767 | 794 | 435 | 359 | 57 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <jmillikin@gmail.com>
--
-- See license.txt for details
module OptionsTests.Util
( suite_Util
) where
#if defined(OPTIONS_ENCODING_UTF8)
import Data.Bits
import qualified Data.ByteString.Char8 as Char8
import Data.Char (chr, ord)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Test.Chell.QuickCheck
import Test.QuickCheck (Property, forAll)
import Test.QuickCheck.Gen
#endif
import Test.Chell
import Options.Util
suite_Util :: Suite
suite_Util = suite "util"
test_ValidFieldName
test_ValidShortFlag
test_ValidLongFlag
test_HasDuplicates
#if defined(OPTIONS_ENCODING_UTF8)
property "decodeUtf8" prop_DecodeUtf8
#endif
test_ValidFieldName :: Test
test_ValidFieldName = assertions "validFieldName" $ do
$expect (validFieldName "a")
$expect (validFieldName "abc")
$expect (validFieldName "_abc_")
$expect (validFieldName "abc'")
$expect (validFieldName "\12354")
$expect (not (validFieldName ""))
$expect (not (validFieldName "'a"))
$expect (not (validFieldName "a b"))
$expect (not (validFieldName "Ab"))
test_ValidShortFlag :: Test
test_ValidShortFlag = assertions "validShortFlag" $ do
$expect (validShortFlag 'a')
$expect (validShortFlag 'A')
$expect (validShortFlag '0')
$expect (validShortFlag '\12354')
$expect (not (validShortFlag ' '))
$expect (not (validShortFlag '-'))
test_ValidLongFlag :: Test
test_ValidLongFlag = assertions "validLongFlag" $ do
$expect (validLongFlag "a")
$expect (validLongFlag "A")
$expect (validLongFlag "abc")
$expect (validLongFlag "0")
$expect (validLongFlag "012")
$expect (validLongFlag "a-b")
$expect (validLongFlag "a_b")
$expect (validLongFlag "\12354bc")
$expect (not (validLongFlag ""))
$expect (not (validLongFlag "a b"))
$expect (not (validLongFlag "a+b"))
$expect (not (validLongFlag "-"))
$expect (not (validLongFlag "--"))
test_HasDuplicates :: Test
test_HasDuplicates = assertions "hasDuplicates" $ do
$expect (not (hasDuplicates ([] :: [Char])))
$expect (not (hasDuplicates ['a', 'b']))
$expect (hasDuplicates ['a', 'b', 'a'])
#if defined(OPTIONS_ENCODING_UTF8)
prop_DecodeUtf8 :: Property
prop_DecodeUtf8 = forAll example prop where
example = do
chunks <- listOf genChunk
let utf = concat [x | (x, _) <- chunks]
let chars = concat [x | (_, x) <- chunks]
return (Char8.pack utf, chars)
genChunk = do
unichr <- genUnichar
let utf = Char8.unpack (Text.encodeUtf8 (Text.singleton unichr))
nBytes <- choose (1, length utf)
let truncUtf = take nBytes utf
return $ if nBytes == length utf
then (utf, [unichr])
else (truncUtf, map (\c -> chr (ord c + 0xDC00)) truncUtf)
prop (bytes, expected) = decodeUtf8 bytes == expected
genUnichar :: Gen Char
genUnichar = chr `fmap` excluding reserved (oneof planes) where
excluding :: [a -> Bool] -> Gen a -> Gen a
excluding bad gen = loop where
loop = do
x <- gen
if or (map ($ x) bad)
then loop
else return x
reserved = [lowSurrogate, highSurrogate, noncharacter]
lowSurrogate c = c >= 0xDC00 && c <= 0xDFFF
highSurrogate c = c >= 0xD800 && c <= 0xDBFF
noncharacter c = masked == 0xFFFE || masked == 0xFFFF where
masked = c .&. 0xFFFF
ascii = choose (0,0x7F)
plane0 = choose (0xF0, 0xFFFF)
plane1 = oneof [ choose (0x10000, 0x10FFF)
, choose (0x11000, 0x11FFF)
, choose (0x12000, 0x12FFF)
, choose (0x13000, 0x13FFF)
, choose (0x1D000, 0x1DFFF)
, choose (0x1F000, 0x1FFFF)
]
plane2 = oneof [ choose (0x20000, 0x20FFF)
, choose (0x21000, 0x21FFF)
, choose (0x22000, 0x22FFF)
, choose (0x23000, 0x23FFF)
, choose (0x24000, 0x24FFF)
, choose (0x25000, 0x25FFF)
, choose (0x26000, 0x26FFF)
, choose (0x27000, 0x27FFF)
, choose (0x28000, 0x28FFF)
, choose (0x29000, 0x29FFF)
, choose (0x2A000, 0x2AFFF)
, choose (0x2B000, 0x2BFFF)
, choose (0x2F000, 0x2FFFF)
]
plane14 = choose (0xE0000, 0xE0FFF)
planes = [ascii, plane0, plane1, plane2, plane14]
#endif
| shlevy/haskell-options | tests/OptionsTests/Util.hs | mit | 4,380 | 32 | 18 | 1,009 | 1,504 | 764 | 740 | 52 | 1 |
{-|
Utility functions that are used in multiple "Nebula" modules are
placed in here. This module should be safe for importing into any
other "Nebula" module.
-}
module Nebula.Util
(
-- * Validation functions
isValidHash
, isValidUUID
-- * Miscellaneous
, getTime
-- * REPL utilities
, toBytes, fromBytes
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.UUID as UUID
import qualified Data.Int as Int
import qualified Data.Time.Exts.Unix as Time
{- This module contains various utility functions to be utilised across
Nebula.
It's useful to determine whether something is a valid SHA-256
identifier.
First, the valid characters and the expected length of a SHA-256 hash
(keeping in mind that in Nebula, a SHA-256 hash is always referred to
as in its hex-encoded form).
-}
hexAlphabet = "0123456789abcdef"
sha256Length = 64
-- | Verify that the String represents a /possible/ valid SHA-256
-- identifier. This function therefore checks that the length is
-- appropriate and that only hex digits are present.
isValidHash :: String -> Bool
isValidHash x = (length $ filter (flip elem hexAlphabet) x) == sha256Length
{- It's also useful to validate a UUID. For this, the UUID parser from
the UUID package is used. -}
-- | Verify that the String represents a valid UUID. Right now, the
-- string may be any of the versions of UUID.
isValidUUID :: String -> Bool
isValidUUID uuid = case UUID.fromString uuid of
Nothing -> False
Just _ -> True
{- Nebula also uses integer Unix timestamps. -}
-- | getTime returns the current Unix time in nanoseconds. Due to
-- platform issues, this may only be valid to microsecond resolution.
getTime :: IO Int.Int64
getTime = fmap Time.unixBase Time.getCurrentUnixDateTimeNanos
{- The following are useful for working with and converting between
Strings and ByteStrings in the REPL. -}
-- |Convert a String to a ByteString.
toBytes = BS.pack
-- |Convert a ByteString into a String.
fromBytes = BS.unpack
| kisom/nebulahs | src/Nebula/Util.hs | mit | 2,058 | 0 | 10 | 419 | 205 | 126 | 79 | 22 | 2 |
{-# htermination msum :: [Maybe a] -> Maybe a #-}
import Monad
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Monad_msum_3.hs | mit | 63 | 0 | 3 | 12 | 5 | 3 | 2 | 1 | 0 |
-- ================================================================================================
-- http://elemarjr.net/2012/06/09/haskell-para-programadores-c-parte-1-primeiros-passos/
-- H:
sum [1..100]
-- C#:
Enumerable.Range(1, 100).Sum()
-- H:
[1..10]
-- C#:
Enumberable.Range(1, 100)
-- H:
[1,2,3,4,5]
-- C#:
new[] {1,2,3,4,5}
-- H:
head [1,2,3,4,5]
-- C#:
new[] {1, 2, 3, 4, 5}.First()
-- H:
tail [1,2,3,4,5]
-- C#:
new[] {1, 2, 3, 4, 5}.Skip(1)
-- H:
drop 3 [1,2,3,4,5]
-- C#:
new[] {1, 2, 3, 4, 5}.Skip(3)
-- H:
length [1,2,3,4,5]
-- C#:
new[] {1, 2, 3, 4, 5}.Count()
-- H:
sum [1,2,3,4,5]
-- C#:
new[] {1, 2, 3, 4, 5}.Sum()
-- H:
product [1,2,3,4,5] -- or -- foldl' (\a b -> a * b) 1 [1,2,3,4,5] (foldl' is tail optimized, so in this case works for arbitrarily large lists)
-- C#:
new[] {1, 2, 3, 4, 5}.Aggregate((a, b) => a*b)
-- H:
[1,2,3] ++ [4,5]
-- C#:
new[] {1,2,3}.Union(new[] {4,5});
-- H:
reverse [1,2,3,4,5]
-- C#:
{1,2,3,4,5}.Reverse()
-- ================================================================================================
-- http://elemarjr.net/2012/06/10/haskell-para-programadores-c-parte-2-aplicao-e-escrita-de-funes/
-- H:
foo x
C# foo(x)
-- H:
foo a b
-- C#:
foo(a,b)
-- H:
foo (fee x) -- or -- foo $ fee x
-- C#:
foo(fee(x))
-- H:
foo a (fee b)
-- C#:
foo(a,fee(b))
-- H:
add a b = a + b
-- C#:
Func<int, int, int> add = (a, b) => a + b;
-- H:
inc a = add a 1
-- C#:
Func<int, int> inc = a => add(a, 1);
-- H:
double x = x + x
-- C#:
Func<int, int> @double = x => x + x;
-- H:
quadruple x = double (double x)
-- C#:
Func<int, int> quadruple = x => @double(@double(x));
-- ================================================================================================
-- http://elemarjr.net/2012/06/12/haskell-para-programadores-c-parte-3-tipos-tipos-tipos-e-classes/
-- H:
5 :: Int
-- C#:
(int) 5
-- H:
"Elemar" :: String
-- C#:
(string) "Elemar"
-- H:
True :: Bool
-- C#:
(bool) true
-- H:
:type <expr> -- or -- :t <expr>
-- C#:
<expr>.GetType()
-- H:
:type 5
-- C#:
5.GetType()
-- H:
[1,2,3,4,5] :: [Int]
-- C#:
new int[] {1,2,3,4,5}
-- H:
[[1,2,3],[4,5]] :: [[Int]]
-- C#:
new int[][] {new int[] {1,2,3}, new int[] {4,5}}
-- H:
(1, "Elemar Jr", True) -- type (Int, String, Bool)
-- C#:
Tuple.Create(1, "Elemar Jr", true) -- type Tuple<int, string, bool>
-- H:
inc :: Int -> Int
inc n = n + 1 -- or -- inc = \n -> n + 1
add :: Int -> Int -> Int
add a b = a + b -- or -- add = \a -> \b -> a + b
-- C#:
Func<int, int> inc = n => n + 1;
Func<int, Func<int, int>> add = a => b => a + b;
-- H:
--(0) (1) (2)
last :: [a] -> a
last = \vs -> head (reverse vs)
-- (3) \_____(4)______/
-- C#:
-- (2) (0) (1) (3)
public static T Last<T>(this IEnumerable<T> source)
{
return source.Reverse().First();
} -- \_________(4)__________/
-- ================================================================================================
-- http://elemarjr.net/2012/06/13/haskell-para-programadores-c-parte-4-respostas-condicionais/
-- H
isDigit :: Char -> Bool
isDigit c = c >= '0' && c <= '9'
-- C#
public static bool IsDigit(char c)
{
return c >= '0' && c <= '9';
}
-- H
abs :: Int -> Int
abs n = if n < 0 then -n else n
--or--
abs n
| n < 0 = -n
| otherwise = n
-- C#
public static int Abs(int n)
{
return n < 0 ? -n : n;
}
-- H
sign :: Int -> Int
sign n = if n < 0 then -1 else if (n > 0) then 1 else 0
-- or --
sign n
| n < 0 = -1
| n > 0 = 1
| otherwise = 0
-- C#
public static int Sign(int input)
{
if (input < 0) return -1;
if (input > 0) return 1;
return 0;
}
-- H
not :: Bool -> Bool
not a = if a then False else True
not a
| a == True = False
| otherwise = True
-- C#
not True = False
not False = True
-- C#
public static bool And(bool a, bool b)
{
if (a) return b;
return false;
}
-- H
and :: Bool -> Bool -> Bool
and True True = True
and True False = False
and False True = True
and False False = False
and True True = True
and _ _ = False
and True b = b
and False _ = False
and a b
| a == b = a
| otherwise = False
True `and` True = True
_ `and` _ = False
-- ================================================================================================
-- http://elemarjr.net/2012/06/14/haskell-para-programadores-c-parte-5-list-comprehensions/
-- H
[1,2,3,4,5] -- or -- 1 : (2 : (3 : (4 : (5 : []))))
-- C#
public static IEnumerable<int> OneToFive()
{
yield return 1;
yield return 2;
yield return 3;
yield return 4;
yield return 5;
}
-- H
[1..5]
-- C#
Enumerable.Range(1,5)
-- H
[1..]
take 5 [1..]
-- C#
public static IEnumerable<int> Infinite()
{
var result = 1;
while (true)
yield return result++;
}
Infinite().Take(5);
-- H
[x^2 | x <- [1..5]]
-- C#
Enumerable.Range(1,5).Select(x => x * x)
--or--
from x in Enumerable.Range(1,5) select x * x
-- H
[(x, y) | x <- [1..3], y <- [4..5]]
-- C#
from x Enumerable.Range(1,3)
from y Enumerable.Range(4,5)
select new {x,y}
--or--
Enumerable.Range(1,3).SelectMany(x => Enumerable.Range(4,5), (x, y) = new {x,y})
--or--
public static IEnumerable<Tuple<int, int>> Pairs()
{
for (var x = 1; x <= 3; x++)
for (var y = 1; y <= 3; y++)
yield return new Tuple<int, int>(x, y);
}
-- H
[x | x <- [1..50], even x]
--or--
[x | x <- [1..50], x `mod` 2 == 0]
-- C#
Enumerable.Range(1, 50).Where(x => x%2 == 0)
--or--
from x in Enumerate.Range(1,50)
where x % 2 == 0
select x
-- H
factors n = [x | x <- [1..n], n `mod` x == 0]
isPrime n = (factors n) == [1, n]
primesUntil n = [x | x <- [1..n], isPrime x]
-- C#
public static IEnumerable<int> Factors(int n)
{
return from x in Enumerable.Range(1, n)
where n%x == 0
select x;
}
public static bool IsPrime(int n)
{
return Factors(n).SequenceEqual(new [] {1, n});
}
public static IEnumerable<int> PrimesUntil(int n)
{
return from x in Enumerable.Range(1, n)
where IsPrime(x)
select x;
}
-- H
isLower c = c >= 'a' && c <= 'z'
lowerCount s = sum [ 1 | c <- s, isLower c]
-- C#
(from c in s where Char.IsLower(c) select 1).Count()
-- ================================================================================================
-- http://elemarjr.net/2012/06/15/haskell-para-programadores-c-parte-6-recurso/
factorial' :: Int -> Int
factorial' 0 = 1
factorial' (n+1) = (n+1) * factorial' n
public static int Factorial(int n)
{
if (n == 0) return 1;
return Factorial(n - 1)*Factorial(n);
}
product' :: Num a => [a] -> a
product' [] = 1
product' (n:ns) = n * product' ns
public static int Product(this IEnumerable source)
{
if (!source.GetEnumerator().MoveNext()) return 1;
return source.First() + source.Skip(1).Product();
}
length' :: [a] -> Int
length' [] = 0
length' (_:xs) = 1 + length' xs
public static int Length(this IEnumerable source)
{
if (!source.GetEnumerator().MoveNext()) return 0;
return 1 + source.Skip(1).Length();
}
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = reverse' xs ++ [x]
public static IEnumerable Reverse(this IEnumerable source)
{
if (!source.GetEnumerator().MoveNext())
return Enumerable.Empty();
return source.Skip(1).Reverse().Union(source.Take(1));
}
-- ================================================================================================
-- http://elemarjr.net/2012/06/16/haskell-para-programadores-c-parte-7-higher-order-functions/
twice :: (a -> a) -> a -> a
twice f x = f (f x)
static T Twice<T>(Func<T, T> f, T x)
{
return f(f(x));
}
-- map => Select
map (\a -> a + 1) [1..5]
Enumerable.Range(1, 5).Select(a => a + 1);
length' :: [a] -> Int
length' xs = sum (map (\_ -> 1) xs)
static int Length<T>(IEnumerable<T> source)
{
return source.Select(a => 1).Sum();
}
-- filter => Where
filter :: (a -> Bool) -> [a] -> a
filter p xs = [x | x <- xs, p x]
Enumerable.Range(1, 10).Where(x => x%2 == 0);
Enumerable.Range(1, 10).Where(x => x > 5);
new string("String With Spaces".Where(c => c != ' ').ToArray());
-- foldr => Aggregate
product' :: Num a => [a] -> a
product'= foldr (*) 1
sum' :: Num a => [a] -> a
sum'= foldr (+) 0
and' :: [Bool] -> Bool
and'= foldr (&&) True
or' :: [Bool] -> Bool
or'= foldr (||) False
Func<IEnumerable<int>, int> sum = xs => xs.Aggregate(0, (a, b) => a + b);
Func<IEnumerable<int>, int> product = xs => xs.Aggregate(1, (a, b) => a * b);
Func<IEnumerable<bool>, bool> and = xs => xs.Aggregate(true, (a, b) => a && b);
Func<IEnumerable<bool>, bool> or = xs => xs.Aggregate(false, (a, b) => a || b);
-- all, any, takeWhile, dropWhile => All, Any, TakeWhile, SkipWhile
-- ================================================================================================
-- http://elemarjr.net/2012/06/18/haskell-para-programadores-c-parte-8-composition/
combine (a->b) -> (c->a) -> (c->b)
combine fa fb = \arg -> fa (fb arg)
-- or --
-- Just use the . operator
not x = if x then False else True
even x = x `mod` 2 == 0
odd = combine not even --or-- odd = not . even
public static Func<TC, TB> Combine<TA, TB, TC>(this Func<TA, TB> fa, Func<TC, TA> fb)
{
return arg => fa(fb(arg));
}
Func<bool, bool> not = a => !a;
Func<int, bool> even = x => x % 2 == 0;
var odd = Combine(not, even);
-- ================================================================================================
-- http://elemarjr.net/2012/06/30/haskell-para-programadores-c-parte-9-modules/
module Geometry
(
cubeVolume,
cubeArea,
cuboidVolume,
cuboidArea
)
where
cubeVolume :: Float -> Float
cubeVolume side = cuboidVolume side side side
cubeArea :: Float -> Float
cubeArea side = cuboidArea side side side
cuboidVolume :: Float -> Float -> Float -> Float
cuboidVolume a b c = rectArea a b * c
cuboidArea :: Float -> Float -> Float -> Float
cuboidArea a b c = rectArea a b * 2 + rectArea a c * 2 + rectArea c b * 2
rectArea :: Float -> Float -> Float
rectArea a b = a * b
public static class Geometry
{
public static float CubeVolume(float side)
{
return CuboidVolume(side, side, side);
}
public static float CubeArea(float side)
{
return CuboidArea(side, side, side);
}
public static float CuboidVolume(float a, float b, float c)
{
return RectArea(a, b)*c;
}
public static float CuboidArea(float a, float b, float c)
{
return RectArea(a, b)*2 + RectArea(a, c)*2 + RectArea(c, b)*2;
}
private static float RectArea(float a, float b)
{
return a*b;
}
}
| feliposz/learning-stuff | haskell/haskell and c# (linq).hs | mit | 10,676 | 390 | 28 | 2,347 | 4,686 | 2,561 | 2,125 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Game.Play.Types where
import Control.Lens (makeLenses)
import Control.Monad.Except (ExceptT)
import Control.Monad.Random (Rand, StdGen)
import Control.Monad.Trans.RWS (RWST)
import Game.Api.Types (GameError)
import Game.Types (Game, Player)
type WithGame a = RWST Seating () Game (ExceptT GameError (Rand StdGen)) a
type WithPlayer a = ExceptT GameError (Rand StdGen) a
data Seating = Seating
{ _seatMe :: Player
, _seatLeft :: [Player] -- players sitting to the left
, _seatRight :: [Player] -- players sitting to the right
}
makeLenses ''Seating
| rubenmoor/skull | skull-server/src/Game/Play/Types.hs | mit | 705 | 0 | 9 | 197 | 176 | 106 | 70 | 15 | 0 |
--getcontents is an I/O action that teads from standard input
-- until it encounters and end of file character
--it does lazy I/O
import Data.Char
main = do
--contents <- getContents
--putStr (shortLinesOnly contents)
--the pattern of getting contents and transform it is so common that
--it has been wrapped on the interact function
interact shortLinesOnly
shortLinesOnly :: String -> String
shortLinesOnly input =
let allLines = lines input
shortLines = filter (\line -> length line < 10) allLines
result = unlines shortLines
in result | luisgepeto/HaskellLearning | 09 Input and Output/02_files_and_streams.hs | mit | 588 | 0 | 13 | 135 | 88 | 47 | 41 | 9 | 1 |
{-# LANGUAGE FlexibleContexts, OverlappingInstances #-}
module Tests.Marshal (
tests
) where
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import GHCJS.Marshal.Pure (PFromJSRef(..), PToJSRef(..))
import GHCJS.Marshal (FromJSRef(..), ToJSRef(..))
import Tests.QuickCheckUtils (eq)
import Test.QuickCheck.Monadic (run, monadicIO)
import Test.QuickCheck (once, Arbitrary(..), Property)
import Data.Int (Int32, Int16, Int8)
import Data.Word (Word32, Word16, Word8)
import Data.Text (Text)
import qualified Data.Text as T (unpack, pack)
import Data.JSString (JSString)
newtype TypeName a = TypeName String
pure_to_from_jsref' :: (PToJSRef a, PFromJSRef a, Eq a) => a -> Bool
pure_to_from_jsref' a = pFromJSRef (pToJSRef a) == a
pure_to_from_jsref :: (PToJSRef a, PFromJSRef a, Eq a) => TypeName a -> a -> Bool
pure_to_from_jsref _ = pure_to_from_jsref'
pure_to_from_jsref_maybe :: (PToJSRef a, PFromJSRef a, Eq a) => TypeName a -> Maybe a -> Bool
pure_to_from_jsref_maybe _ = pure_to_from_jsref'
to_from_jsref' :: (ToJSRef a, FromJSRef a, Eq a) => a -> Property
to_from_jsref' a = monadicIO $ do
b <- run $ toJSRef a >>= fromJSRefUnchecked
return $ b == a
to_from_jsref :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> a -> Property
to_from_jsref _ = to_from_jsref'
to_from_jsref_maybe :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> Maybe a -> Property
to_from_jsref_maybe _ = to_from_jsref'
to_from_jsref_list :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> [a] -> Property
to_from_jsref_list _ = to_from_jsref'
to_from_jsref_list_maybe :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> [Maybe a] -> Property
to_from_jsref_list_maybe _ = to_from_jsref'
to_from_jsref_list_list :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> [[a]] -> Property
to_from_jsref_list_list _ = to_from_jsref'
to_from_jsref_maybe_list :: (ToJSRef a, FromJSRef a, Eq a) => TypeName a -> Maybe [a] -> Property
to_from_jsref_maybe_list _ = to_from_jsref'
pureMarshalTestGroup :: (PToJSRef a, PFromJSRef a, ToJSRef a, FromJSRef a, Eq a, Show a, Arbitrary a) => TypeName a -> Test
pureMarshalTestGroup t@(TypeName n) =
testGroup n [
testProperty "pure_to_from_jsref" (pure_to_from_jsref t),
testProperty "pure_to_from_jsref_maybe" (pure_to_from_jsref_maybe t),
testProperty "to_from_jsref" (to_from_jsref t),
testProperty "to_from_jsref_maybe" (to_from_jsref_maybe t),
testProperty "to_from_jsref_list" (to_from_jsref_list t),
testProperty "to_from_jsref_list_maybe" (to_from_jsref_list_maybe t),
testProperty "to_from_jsref_list_list" (once $ to_from_jsref_list_list t),
testProperty "to_from_jsref_maybe_list" (to_from_jsref_maybe_list t)
]
marshalTestGroup :: (ToJSRef a, FromJSRef a, Eq a, Show a, Arbitrary a) => TypeName a -> Test
marshalTestGroup t@(TypeName n) =
testGroup n [testProperty "to_from_jsref" (to_from_jsref t)]
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
shrink = map T.pack . shrink . T.unpack
tests :: Test
tests =
testGroup "Marshal" [
pureMarshalTestGroup (TypeName "Bool" :: TypeName Bool ),
pureMarshalTestGroup (TypeName "Int" :: TypeName Int ),
pureMarshalTestGroup (TypeName "Int8" :: TypeName Int8 ),
pureMarshalTestGroup (TypeName "Int16" :: TypeName Int16 ),
pureMarshalTestGroup (TypeName "Int32" :: TypeName Int32 ),
pureMarshalTestGroup (TypeName "Word" :: TypeName Word ),
pureMarshalTestGroup (TypeName "Word8" :: TypeName Word8 ),
pureMarshalTestGroup (TypeName "Word16" :: TypeName Word16 ),
pureMarshalTestGroup (TypeName "Word32" :: TypeName Word32 ),
pureMarshalTestGroup (TypeName "Float" :: TypeName Float ),
pureMarshalTestGroup (TypeName "Double" :: TypeName Double ),
pureMarshalTestGroup (TypeName "[Char]" :: TypeName [Char] ),
pureMarshalTestGroup (TypeName "Text" :: TypeName Text ),
pureMarshalTestGroup (TypeName "JSString" :: TypeName JSString)
]
| tavisrudd/ghcjs-base | test/Tests/Marshal.hs | mit | 4,120 | 0 | 11 | 753 | 1,295 | 684 | 611 | 72 | 1 |
module Feature.AuthSpec where
-- {{{ Imports
import Text.Heredoc
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import SpecHelper
import Network.Wai (Application)
import Protolude hiding (get)
-- }}}
spec :: SpecWith Application
spec = describe "authorization" $ do
let single = ("Accept","application/vnd.pgrst.object+json")
it "denies access to tables that anonymous does not own" $
get "/authors_only" `shouldRespondWith` [json| {
"hint":null,
"details":null,
"code":"42501",
"message":"permission denied for relation authors_only"} |]
{ matchStatus = 401
, matchHeaders = ["WWW-Authenticate" <:> "Bearer"]
}
it "denies access to tables that postgrest_test_author does not own" $
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0" in
request methodGet "/private_table" [auth] ""
`shouldRespondWith` [json| {
"hint":null,
"details":null,
"code":"42501",
"message":"permission denied for relation private_table"} |]
{ matchStatus = 403
, matchHeaders = []
}
it "returns jwt functions as jwt tokens" $
request methodPost "/rpc/login" [single]
[json| { "id": "jdoe", "pass": "1234" } |]
`shouldRespondWith` [json| {"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xuYW1lIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.P2G9EVSVI22MWxXWFuhEYd9BZerLS1WDlqzdqplM15s"} |]
{ matchStatus = 200
, matchHeaders = [matchContentTypeSingular]
}
it "sql functions can encode custom and standard claims" $
request methodPost "/rpc/jwt_test" [single] "{}"
`shouldRespondWith` [json| {"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJqb2UiLCJzdWIiOiJmdW4iLCJhdWQiOiJldmVyeW9uZSIsImV4cCI6MTMwMDgxOTM4MCwibmJmIjoxMzAwODE5MzgwLCJpYXQiOjEzMDA4MTkzODAsImp0aSI6ImZvbyIsInJvbGUiOiJwb3N0Z3Jlc3RfdGVzdCIsImh0dHA6Ly9wb3N0Z3Jlc3QuY29tL2ZvbyI6dHJ1ZX0.IHF16ZSU6XTbOnUWO8CCpUn2fJwt8P00rlYVyXQjpWc"} |]
{ matchStatus = 200
, matchHeaders = [matchContentTypeSingular]
}
it "sql functions can read custom and standard claims variables" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJmdW4iLCJqdGkiOiJmb28iLCJuYmYiOjEzMDA4MTkzODAsImV4cCI6OTk5OTk5OTk5OSwiaHR0cDovL3Bvc3RncmVzdC5jb20vZm9vIjp0cnVlLCJpc3MiOiJqb2UiLCJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWF0IjoxMzAwODE5MzgwLCJhdWQiOiJldmVyeW9uZSJ9.AQmCA7CMScvfaDRMqRPeUY6eNf--69gpW-kxaWfq9X0"
request methodPost "/rpc/reveal_big_jwt" [auth] "{}"
`shouldRespondWith` [str|[{"iss":"joe","sub":"fun","aud":"everyone","exp":9999999999,"nbf":1300819380,"iat":1300819380,"jti":"foo","http://postgrest.com/foo":true}]|]
it "allows users with permissions to see their tables" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
it "works with tokens which have extra fields" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIiwia2V5MSI6InZhbHVlMSIsImtleTIiOiJ2YWx1ZTIiLCJrZXkzIjoidmFsdWUzIiwiYSI6MSwiYiI6MiwiYyI6M30.GfydCh-F4wnM379xs0n1zUgalwJIsb6YoBapCo8HlFk"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
-- this test will stop working 9999999999s after the UNIX EPOCH
it "succeeds with an unexpired token" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjk5OTk5OTk5OTksInJvbGUiOiJwb3N0Z3Jlc3RfdGVzdF9hdXRob3IiLCJpZCI6Impkb2UifQ.QaPPLWTuyydMu_q7H4noMT7Lk6P4muet1OpJXF6ofhc"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
it "fails with an expired token" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE0NDY2NzgxNDksInJvbGUiOiJwb3N0Z3Jlc3RfdGVzdF9hdXRob3IiLCJpZCI6Impkb2UifQ.enk_qZ_u6gZsXY4R8bREKB_HNExRpM0lIWSLktk9JJQ"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` [json| {"message":"JWT expired"} |]
{ matchStatus = 401
, matchHeaders = [
"WWW-Authenticate" <:>
"Bearer error=\"invalid_token\", error_description=\"JWT expired\""
]
}
it "hides tables from users with invalid JWT" $ do
let auth = authHeaderJWT "ey9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` [json| {"message":"JWT invalid"} |]
{ matchStatus = 401
, matchHeaders = [
"WWW-Authenticate" <:>
"Bearer error=\"invalid_token\", error_description=\"JWT invalid\""
]
}
it "should fail when jwt contains no claims" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.lu-rG8aSCiw-aOlN0IxpRGz5r7Jwq7K9r3tuMPUpytI"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 401
it "hides tables from users with JWT that contain no claims about role" $ do
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6Impkb2UifQ.Jneso9X519Vh0z7i9PbXIu7W1HEoq9RRw9BBbyQKFCQ"
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 401
it "recovers after 401 error with logged in user" $ do
_ <- post "/authors_only" [json| { "owner": "jdoe", "secret": "test content" } |]
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0"
_ <- request methodPost "/rpc/problem" [auth] ""
request methodGet "/authors_only" [auth] ""
`shouldRespondWith` 200
describe "custom pre-request proc acting on id claim" $ do
it "able to switch to postgrest_test_author role (id=1)" $
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6MX0.mI2HNoOum6xM3sc4oHLxU4yLv-_WV5W1kqBfY_wEvLw" in
request methodPost "/rpc/get_current_user" [auth]
[json| {} |]
`shouldRespondWith` [str|"postgrest_test_author"|]
{ matchStatus = 200
, matchHeaders = []
}
it "able to switch to postgrest_test_default_role (id=2)" $
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6Mn0.W7jLsG-zswM91AJkCvZeIMHrnz7_6ceY2jnscVl3Yhk" in
request methodPost "/rpc/get_current_user" [auth]
[json| {} |]
`shouldRespondWith` [str|"postgrest_test_default_role"|]
{ matchStatus = 200
, matchHeaders = []
}
it "raises error (id=3)" $
let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6M30.15Gy8PezQhJIaHYDJVLa-Gmz9T3sJnW66EKAYIsXc7c" in
request methodPost "/rpc/get_current_user" [auth]
[json| {} |]
`shouldRespondWith` [str|{"hint":"Please contact administrator","details":null,"code":"P0001","message":"Disabled ID --> 3"}|]
{ matchStatus = 400
, matchHeaders = []
}
| Skyfold/postgrest | test/Feature/AuthSpec.hs | mit | 7,292 | 0 | 17 | 1,263 | 987 | 534 | 453 | -1 | -1 |
module VM
( Instruction (..)
, runVM
, takeResult
, takePC
, testRunVM ) where
import Control.Applicative ((<$>))
import Data.Array
import Data.Char
import Data.Maybe
import Data.Monoid
import Data.Foldable (foldMap)
import VM.Machine
import VM.Instruction
runVM :: [Instruction] -> Machine
runVM is = run (toArray $ instMorph <$> is) (setMain . setCounter 0 . prepare is $ initMachine)
setMain :: Machine -> Machine
setMain m = setCounter main m
where main = lookupL "main" $ takeL m
run :: Array PC (Machine -> Machine) -> Machine -> Machine
run is m | end pc = m
| otherwise = run is (is ! pc $ m)
where pc = fromInteger . takePC $ m
end = ((snd . bounds) is ==)
prepare :: [Instruction] -> Machine -> Machine
prepare is = appEndo . getDual $ foldMap (Dual . Endo . setLabel) is
toArray :: [a] -> Array PC a
toArray ls = listArray (0,toInteger $ length ls) ls
--- Bellow is for debugging ---
testRunVM :: [Instruction] -> IO Machine
testRunVM is = testRun (toArray $ instMorph <$> is) (setMain . setCounter 0 . prepare is $ initMachine)
testRun :: Array PC (Machine -> Machine) -> Machine -> IO Machine
testRun = testRun' Nothing
-- TODO: refactoring
testRun' :: Maybe PC -> Array PC (Machine -> Machine) -> Machine -> IO Machine
testRun' bp is m = do let pc = fromInteger . takePC $ m
end = ((snd . bounds) is ==)
input <- if isNothing bp || bp == Just pc
then runDebugger pc m
else return "next"
if input == "exit" || end pc
then return m
else if bp == Just pc || (isNothing bp && input == "next")
then testRun' Nothing is (is ! pc $ m)
else if any isNumber input
then testRun' (Just . read $ filter isNumber input) is (is ! pc $ m)
else testRun' bp is (is ! pc $ m)
runDebugger :: PC -> Machine -> IO String
runDebugger pc m = do putStrLn $ "[DEUBG] Now in " ++ show pc ++ " | ds: " ++ show ds ++ " | input command"
putStr "> "
input <- getLine
case input of
"print m" -> print m >> runDebugger pc m
"print ds" -> print (takeDS m) >> runDebugger pc m
"print mem" -> print (takeMem m) >> runDebugger pc m
"print cs" -> print (takeCS m) >> runDebugger pc m
"print lds" -> print (takeLDS m) >> runDebugger pc m
"print labels" -> print (takeL m) >> runDebugger pc m
"next" -> return "next"
"" -> return ""
"exit" -> return "exit"
('s':'e':'t':'b':'p':bp) -> return bp
_ -> runDebugger pc m
where ds = takeDS m
| taiki45/hs-vm | src/VM.hs | mit | 3,078 | 0 | 15 | 1,214 | 1,019 | 511 | 508 | 62 | 11 |
module Chapter07.Arith2 where
add :: Int -> Int -> Int
add x y = x + y
addPF :: Int -> Int -> Int
addPF = (+)
addOne :: Int -> Int
addOne = \x -> x + 1
addOnePF :: Int -> Int
addOnePF = (+1) | brodyberg/LearnHaskell | HaskellProgramming.hsproj/Chapter07/Arith2.hs | mit | 215 | 0 | 6 | 71 | 94 | 54 | 40 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html
module Stratosphere.Resources.DMSEventSubscription where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for DMSEventSubscription. See
-- 'dmsEventSubscription' for a more convenient constructor.
data DMSEventSubscription =
DMSEventSubscription
{ _dMSEventSubscriptionEnabled :: Maybe (Val Bool)
, _dMSEventSubscriptionEventCategories :: Maybe (ValList Text)
, _dMSEventSubscriptionSnsTopicArn :: Val Text
, _dMSEventSubscriptionSourceIds :: Maybe (ValList Text)
, _dMSEventSubscriptionSourceType :: Maybe (Val Text)
, _dMSEventSubscriptionSubscriptionName :: Maybe (Val Text)
, _dMSEventSubscriptionTags :: Maybe [Tag]
} deriving (Show, Eq)
instance ToResourceProperties DMSEventSubscription where
toResourceProperties DMSEventSubscription{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::DMS::EventSubscription"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("Enabled",) . toJSON) _dMSEventSubscriptionEnabled
, fmap (("EventCategories",) . toJSON) _dMSEventSubscriptionEventCategories
, (Just . ("SnsTopicArn",) . toJSON) _dMSEventSubscriptionSnsTopicArn
, fmap (("SourceIds",) . toJSON) _dMSEventSubscriptionSourceIds
, fmap (("SourceType",) . toJSON) _dMSEventSubscriptionSourceType
, fmap (("SubscriptionName",) . toJSON) _dMSEventSubscriptionSubscriptionName
, fmap (("Tags",) . toJSON) _dMSEventSubscriptionTags
]
}
-- | Constructor for 'DMSEventSubscription' containing required fields as
-- arguments.
dmsEventSubscription
:: Val Text -- ^ 'dmsesSnsTopicArn'
-> DMSEventSubscription
dmsEventSubscription snsTopicArnarg =
DMSEventSubscription
{ _dMSEventSubscriptionEnabled = Nothing
, _dMSEventSubscriptionEventCategories = Nothing
, _dMSEventSubscriptionSnsTopicArn = snsTopicArnarg
, _dMSEventSubscriptionSourceIds = Nothing
, _dMSEventSubscriptionSourceType = Nothing
, _dMSEventSubscriptionSubscriptionName = Nothing
, _dMSEventSubscriptionTags = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-enabled
dmsesEnabled :: Lens' DMSEventSubscription (Maybe (Val Bool))
dmsesEnabled = lens _dMSEventSubscriptionEnabled (\s a -> s { _dMSEventSubscriptionEnabled = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-eventcategories
dmsesEventCategories :: Lens' DMSEventSubscription (Maybe (ValList Text))
dmsesEventCategories = lens _dMSEventSubscriptionEventCategories (\s a -> s { _dMSEventSubscriptionEventCategories = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-snstopicarn
dmsesSnsTopicArn :: Lens' DMSEventSubscription (Val Text)
dmsesSnsTopicArn = lens _dMSEventSubscriptionSnsTopicArn (\s a -> s { _dMSEventSubscriptionSnsTopicArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-sourceids
dmsesSourceIds :: Lens' DMSEventSubscription (Maybe (ValList Text))
dmsesSourceIds = lens _dMSEventSubscriptionSourceIds (\s a -> s { _dMSEventSubscriptionSourceIds = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-sourcetype
dmsesSourceType :: Lens' DMSEventSubscription (Maybe (Val Text))
dmsesSourceType = lens _dMSEventSubscriptionSourceType (\s a -> s { _dMSEventSubscriptionSourceType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-subscriptionname
dmsesSubscriptionName :: Lens' DMSEventSubscription (Maybe (Val Text))
dmsesSubscriptionName = lens _dMSEventSubscriptionSubscriptionName (\s a -> s { _dMSEventSubscriptionSubscriptionName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-dms-eventsubscription.html#cfn-dms-eventsubscription-tags
dmsesTags :: Lens' DMSEventSubscription (Maybe [Tag])
dmsesTags = lens _dMSEventSubscriptionTags (\s a -> s { _dMSEventSubscriptionTags = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/DMSEventSubscription.hs | mit | 4,559 | 0 | 15 | 508 | 734 | 417 | 317 | 56 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.LogRef
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
--
-- |
--
---------------------------------------------------------------------------------
module IDE.LogRef (
nextError
, previousError
, nextBreakpoint
, previousBreakpoint
, markLogRefs
, unmarkLogRefs
, defaultLineLogger
, defaultLineLogger'
, logOutputLines
, logOutputLines_
, logOutputLines_Default
, logOutput
, logOutputDefault
, logOutputPane
, logOutputForBuild
, logOutputForBreakpoints
, logOutputForSetBreakpoint
, logOutputForSetBreakpointDefault
, logOutputForLiveContext
, logOutputForLiveContextDefault
, logOutputForHistoricContext
, logOutputForHistoricContextDefault
, selectRef
, setBreakpointList
, showSourceSpan
, srcSpanParser
) where
import Graphics.UI.Gtk
import Control.Monad.Reader
import Text.ParserCombinators.Parsec.Language
import Text.ParserCombinators.Parsec hiding(Parser)
import qualified Text.ParserCombinators.Parsec.Token as P
import IDE.Core.State
import IDE.TextEditor
import IDE.Pane.SourceBuffer
import qualified IDE.Pane.Log as Log
import IDE.Utils.Tool
import System.FilePath (equalFilePath)
import Data.List (stripPrefix, elemIndex, isPrefixOf)
import Data.Maybe (catMaybes, isJust)
import System.Exit (ExitCode(..))
import System.Log.Logger (debugM)
import IDE.Utils.FileUtils(myCanonicalizePath)
import IDE.Pane.Log (getDefaultLogLaunch, IDELog(..), getLog)
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import Data.Conduit ((=$))
import IDE.Pane.WebKit.Output(setOutput)
import Data.IORef (atomicModifyIORef, IORef, readIORef)
import Data.Text (Text)
import Control.Applicative ((<$>))
import qualified Data.Text as T
(length, stripPrefix, isPrefixOf, unpack, unlines, pack, null)
import Data.Monoid ((<>))
showSourceSpan :: LogRef -> Text
showSourceSpan = T.pack . displaySrcSpan . logRefSrcSpan
selectRef :: Maybe LogRef -> IDEAction
selectRef (Just ref) = do
logRefs <- readIDE allLogRefs
case elemIndex ref logRefs of
Nothing -> liftIO $ debugM "leksah" "no index" >> return ()
Just index -> do
mbBuf <- selectSourceBuf (logRefFullFilePath ref)
case mbBuf of
Just buf -> markRefInSourceBuf index buf ref True
Nothing -> liftIO $ debugM "leksah" "no buf" >> return ()
log :: Log.IDELog <- Log.getLog
Log.markErrorInLog log (logLines ref)
selectRef Nothing = return ()
forOpenLogRefs :: (Int -> LogRef -> IDEBuffer -> IDEAction) -> IDEAction
forOpenLogRefs f = do
logRefs <- readIDE allLogRefs
allBufs <- allBuffers
forM_ [0 .. ((length logRefs)-1)] (\index -> do
let ref = logRefs !! index
fp = logRefFullFilePath ref
fpc <- liftIO $ myCanonicalizePath fp
forM_ (filter (\buf -> case (fileName buf) of
Just fn -> equalFilePath fpc fn
Nothing -> False) allBufs) (f index ref))
markLogRefs :: IDEAction
markLogRefs = do
forOpenLogRefs $ \index logRef buf -> markRefInSourceBuf index buf logRef False
unmarkLogRefs :: IDEAction
unmarkLogRefs = do
forOpenLogRefs $ \index logRef (IDEBuffer {sourceView = sv}) -> do
buf <- getBuffer sv
removeTagByName buf (T.pack $ show (logRefType logRef) ++ show index)
setErrorList :: [LogRef] -> IDEAction
setErrorList errs = do
unmarkLogRefs
breaks <- readIDE breakpointRefs
contexts <- readIDE contextRefs
modifyIDE_ (\ide -> ide{allLogRefs = errs ++ breaks ++ contexts})
setCurrentError Nothing
markLogRefs
triggerEventIDE ErrorChanged
return ()
setBreakpointList :: [LogRef] -> IDEAction
setBreakpointList breaks = do
ideR <- ask
unmarkLogRefs
errs <- readIDE errorRefs
contexts <- readIDE contextRefs
modifyIDE_ (\ide -> ide{allLogRefs = errs ++ breaks ++ contexts})
setCurrentBreak Nothing
markLogRefs
triggerEventIDE BreakpointChanged
return ()
addLogRefs :: [LogRef] -> IDEAction
addLogRefs refs = do
ideR <- ask
unmarkLogRefs
modifyIDE_ (\ide -> ide{allLogRefs = (allLogRefs ide) ++ refs})
setCurrentError Nothing
markLogRefs
triggerEventIDE ErrorChanged
triggerEventIDE BreakpointChanged
triggerEventIDE TraceChanged
return ()
nextError :: IDEAction
nextError = do
errs <- readIDE errorRefs
currentError <- readIDE currentError
if null errs
then return ()
else do
let new = case currentError of
Nothing -> 0
Just ref ->
case elemIndex ref errs of
Nothing -> 0
Just n | (n + 1) < length errs -> (n + 1)
Just n -> n
setCurrentError (Just $ errs!!new)
selectRef $ Just (errs!!new)
previousError :: IDEAction
previousError = do
errs <- readIDE errorRefs
currentError <- readIDE currentError
if null errs
then return ()
else do
let new = case currentError of
Nothing -> (length errs - 1)
Just ref ->
case elemIndex ref errs of
Nothing -> (length errs - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentError (Just $ errs!!new)
selectRef $ Just (errs!!new)
nextBreakpoint :: IDEAction
nextBreakpoint = do
breaks <- readIDE breakpointRefs
currentBreak <- readIDE currentBreak
if null breaks
then return ()
else do
let new = case currentBreak of
Nothing -> 0
Just ref ->
case elemIndex ref breaks of
Nothing -> 0
Just n | (n + 1) < length breaks -> (n + 1)
Just n -> n
setCurrentBreak (Just $ breaks!!new)
selectRef $ Just (breaks!!new)
previousBreakpoint :: IDEAction
previousBreakpoint = do
breaks <- readIDE breakpointRefs
currentBreak <- readIDE currentBreak
if null breaks
then return ()
else do
let new = case currentBreak of
Nothing -> (length breaks - 1)
Just ref ->
case elemIndex ref breaks of
Nothing -> (length breaks - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentBreak (Just $ breaks!!new)
selectRef $ Just (breaks!!new)
nextContext :: IDEAction
nextContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = case currentContext of
Nothing -> 0
Just ref ->
case elemIndex ref contexts of
Nothing -> 0
Just n | (n + 1) < length contexts -> (n + 1)
Just n -> n
setCurrentContext (Just $ contexts!!new)
selectRef $ Just (contexts!!new)
previousContext :: IDEAction
previousContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = case currentContext of
Nothing -> (length contexts - 1)
Just ref ->
case elemIndex ref contexts of
Nothing -> (length contexts - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentContext (Just $ contexts!!new)
selectRef $ Just (contexts!!new)
lastContext :: IDEAction
lastContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = (last contexts)
setCurrentContext (Just new)
selectRef $ Just new
#if MIN_VERSION_ghc(7,0,1)
fixColumn c = max 0 (c - 1)
#else
fixColumn = id
#endif
srcPathParser :: CharParser () FilePath
srcPathParser = try (do
symbol "dist/build/tmp-" -- Support for cabal haddock
many digit
char '/'
many (noneOf ":"))
<|> many (noneOf ":")
srcSpanParser :: CharParser () SrcSpan
srcSpanParser = try (do
filePath <- srcPathParser
char ':'
char '('
beginLine <- int
char ','
beginCol <- int
char ')'
char '-'
char '('
endLine <- int
char ','
endCol <- int
char ')'
return $ SrcSpan filePath beginLine (fixColumn beginCol) endLine (fixColumn endCol))
<|> try (do
filePath <- srcPathParser
char ':'
line <- int
char ':'
beginCol <- int
char '-'
endCol <- int
return $ SrcSpan filePath line (fixColumn beginCol) line (fixColumn endCol))
<|> try (do
filePath <- srcPathParser
char ':'
line <- int
char ':'
col <- int
return $ SrcSpan filePath line (fixColumn col) line (fixColumn col))
<?> "srcSpanParser"
docTestParser :: CharParser () (SrcSpan, Text)
docTestParser = try (do
symbol "###"
whiteSpace
symbol "Failure"
whiteSpace
symbol "in"
whiteSpace
file <- many (noneOf ":")
char ':'
line <- int
char ':'
whiteSpace
text <- T.pack <$> many anyChar
return ((SrcSpan file line 7 line (T.length text - 7)), "Failure in " <> text))
<?> "docTestParser"
data BuildError = BuildLine
| EmptyLine
| ErrorLine SrcSpan LogRefType Text
| WarningLine Text
| OtherLine Text
buildLineParser :: CharParser () BuildError
buildLineParser = try (do
char '['
int
symbol "of"
int
char ']'
many (anyChar)
return BuildLine)
<|> try (do
whiteSpace
span <- srcSpanParser
char ':'
whiteSpace
refType <- try (do
symbol "Warning:"
return WarningRef)
<|> return ErrorRef
text <- T.pack <$> many anyChar
return (ErrorLine span refType text))
<|> try (do
whiteSpace
eof
return EmptyLine)
<|> try (do
whiteSpace
symbol "Warning:"
text <- T.pack <$> many anyChar
return (WarningLine ("Warning:" <> text)))
<|> try (do
text <- T.pack <$> many anyChar
eof
return (OtherLine text))
<?> "buildLineParser"
data BreakpointDescription = BreakpointDescription Int SrcSpan
breaksLineParser :: CharParser () BreakpointDescription
breaksLineParser = try (do
char '['
n <- int
char ']'
whiteSpace
many (noneOf " ")
whiteSpace
span <- srcSpanParser
return (BreakpointDescription n span))
<?> "buildLineParser"
setBreakpointLineParser :: CharParser () BreakpointDescription
setBreakpointLineParser = try (do
symbol "Breakpoint"
whiteSpace
n <- int
whiteSpace
symbol "activated"
whiteSpace
symbol "at"
whiteSpace
span <- srcSpanParser
return (BreakpointDescription n span))
<?> "setBreakpointLineParser"
lexer = P.makeTokenParser emptyDef
lexeme = P.lexeme lexer
whiteSpace = P.whiteSpace lexer
hexadecimal = P.hexadecimal lexer
symbol = P.symbol lexer
identifier = P.identifier lexer
colon = P.colon lexer
int = fmap fromInteger $ P.integer lexer
defaultLineLogger :: IDELog -> LogLaunch -> ToolOutput -> IDEM Int
defaultLineLogger log logLaunch out = liftIO $ defaultLineLogger' log logLaunch out
defaultLineLogger' :: IDELog -> LogLaunch -> ToolOutput -> IO Int
defaultLineLogger' log logLaunch out = do
case out of
ToolInput line -> appendLog' (line <> "\n") InputTag
ToolOutput line -> appendLog' (line <> "\n") LogTag
ToolError line -> appendLog' (line <> "\n") ErrorTag
ToolPrompt line -> do
unless (T.null line) $ appendLog' (line <> "\n") LogTag >> return ()
appendLog' (T.pack (concat (take 20 (repeat "- "))) <> "-\n") FrameTag
ToolExit ExitSuccess -> appendLog' (T.pack (take 41 (repeat '-')) <> "\n") FrameTag
ToolExit (ExitFailure 1) -> appendLog' (T.pack (take 41 (repeat '=')) <> "\n") FrameTag
ToolExit (ExitFailure n) -> appendLog' (T.pack (take 41 ("========== " ++ show n <> " " ++ repeat '=')) <> "\n") FrameTag
where
appendLog' = Log.appendLog log logLaunch
paneLineLogger :: IDELog -> LogLaunch -> ToolOutput -> IDEM (Maybe Text)
paneLineLogger log logLaunch out = liftIO $ paneLineLogger' log logLaunch out
paneLineLogger' :: IDELog -> LogLaunch -> ToolOutput -> IO (Maybe Text)
paneLineLogger' log logLaunch out = do
case out of
ToolInput line -> appendLog' (line <> "\n") InputTag >> return Nothing
ToolOutput line -> appendLog' (line <> "\n") LogTag >> return (Just line)
ToolError line -> appendLog' (line <> "\n") ErrorTag >> return Nothing
ToolPrompt line -> do
unless (T.null line) $ appendLog' (line <> "\n") LogTag >> return ()
appendLog' (T.pack (concat (take 20 (repeat "- "))) <> "-\n") FrameTag
return Nothing
ToolExit ExitSuccess -> appendLog' (T.pack (take 41 (repeat '-')) <> "\n") FrameTag >> return Nothing
ToolExit (ExitFailure 1) -> appendLog' (T.pack (take 41 (repeat '=')) <> "\n") FrameTag >> return Nothing
ToolExit (ExitFailure n) -> appendLog' (T.pack (take 41 ("========== " ++ show n ++ " " ++ repeat '=')) <> "\n") FrameTag >> return Nothing
where
appendLog' = Log.appendLog log logLaunch
logOutputLines :: LogLaunch -- ^ logLaunch
-> (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM [a]
logOutputLines logLaunch lineLogger = do
log :: Log.IDELog <- lift $ postSyncIDE Log.getLog
results <- (CL.mapM $ postSyncIDE . lineLogger log logLaunch) =$ CL.consume
lift $ triggerEventIDE (StatusbarChanged [CompartmentState "", CompartmentBuild False])
return results
logOutputLines_ :: LogLaunch
-> (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM ()
logOutputLines_ logLaunch lineLogger = do
logOutputLines logLaunch lineLogger
return ()
logOutputLines_Default :: (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM ()
logOutputLines_Default lineLogger = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputLines_ defaultLogLaunch lineLogger
logOutput :: LogLaunch
-> C.Sink ToolOutput IDEM ()
logOutput logLaunch = do
logOutputLines logLaunch defaultLineLogger
return ()
logOutputDefault :: C.Sink ToolOutput IDEM ()
logOutputDefault = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutput defaultLogLaunch
logOutputPane :: Text -> IORef [Text] -> C.Sink ToolOutput IDEM ()
logOutputPane command buffer = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
result <- catMaybes <$> logOutputLines defaultLogLaunch paneLineLogger
when (not $ null result) $ do
new <- liftIO . atomicModifyIORef buffer $ \x -> let new = x ++ result in (new, new)
mbURI <- lift $ readIDE autoURI
unless (isJust mbURI) . lift . postSyncIDE . setOutput command $ T.unlines new
logOutputForBuild :: IDEPackage
-> Bool
-> Bool
-> C.Sink ToolOutput IDEM [LogRef]
logOutputForBuild package backgroundBuild jumpToWarnings = do
liftIO $ putStrLn $ "logOutputForBuild"
log <- lift getLog
logLaunch <- lift $ Log.getDefaultLogLaunch
(_, _, _, errs) <- CL.foldM (readAndShow logLaunch) (log, False, False, [])
ideR <- lift ask
liftIO $ postGUISync $ reflectIDE (do
setErrorList $ reverse errs
triggerEventIDE (Sensitivity [(SensitivityError,not (null errs))])
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
triggerEventIDE (StatusbarChanged [CompartmentState
(T.pack $ show errorNum ++ " Errors, " ++ show warnNum ++ " Warnings"), CompartmentBuild False])
unless (backgroundBuild || (not jumpToWarnings && errorNum == 0)) nextError
return errs) ideR
where
readAndShow :: LogLaunch -> (IDELog, Bool, Bool, [LogRef]) -> ToolOutput -> IDEM (IDELog, Bool, Bool, [LogRef])
readAndShow logLaunch (log, inError, inDocTest, errs) output = do
ideR <- ask
liftIO $ postGUISync $ case output of
ToolError line -> do
let parsed = parse buildLineParser "" $ T.unpack line
let nonErrorPrefixes = ["Linking ", "ar:", "ld:", "ld warning:"]
tag <- case parsed of
Right BuildLine -> return InfoTag
Right (OtherLine text) | "Linking " `T.isPrefixOf` text -> do
-- when backgroundBuild $ lift interruptProcess
reflectIDE (do
setErrorList $ reverse errs
) ideR
return InfoTag
Right (OtherLine text) | any (`T.isPrefixOf` text) nonErrorPrefixes -> do
return InfoTag
_ -> return ErrorTag
lineNr <- Log.appendLog log logLaunch (line <> "\n") tag
case (parsed, errs) of
(Left e,_) -> do
sysMessage Normal . T.pack $ show e
return (log, False, False, errs)
(Right ne@(ErrorLine span refType str),_) ->
return (log, True, False, ((LogRef span package str (lineNr,lineNr) refType):errs))
(Right (OtherLine str1),(LogRef span rootPath str (l1,l2) refType):tl) ->
if inError
then return (log, True, False, ((LogRef span
rootPath
(if T.null str
then line
else str <> "\n" <> line)
(l1,lineNr) refType) : tl))
else return (log, False, False, errs)
(Right (WarningLine str1),(LogRef span rootPath str (l1,l2) isError):tl) ->
if inError
then return (log, True, False, ((LogRef span
rootPath
(if T.null str
then line
else str <> "\n" <> line)
(l1,lineNr) WarningRef) : tl))
else return (log, False, False, errs)
otherwise -> return (log, False, False, errs)
ToolOutput line -> do
case (parse docTestParser "" $ T.unpack line, inDocTest, errs) of
(Right (span, exp), _, _) -> do
logLn <- Log.appendLog log logLaunch (line <> "\n") ErrorTag
return (log, inError, True, LogRef span
package
exp
(logLn,logLn) ErrorRef : errs)
(_, True, (LogRef span rootPath str (l1,l2) refType):tl) -> do
logLn <- Log.appendLog log logLaunch (line <> "\n") ErrorTag
return (log, inError, inDocTest, LogRef span
rootPath
(str <> "\n" <> line)
(l1,logLn) ErrorRef : tl)
_ -> do
Log.appendLog log logLaunch (line <> "\n") LogTag
return (log, inError, False, errs)
ToolInput line -> do
Log.appendLog log logLaunch (line <> "\n") InputTag
return (log, inError, inDocTest, errs)
ToolPrompt line -> do
unless (T.null line) $ Log.appendLog log logLaunch (line <> "\n") LogTag >> return ()
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
case errs of
[] -> defaultLineLogger' log logLaunch output
_ -> Log.appendLog log logLaunch (T.pack $ "- - - " ++ show errorNum ++ " errors - "
++ show warnNum ++ " warnings - - -\n") FrameTag
return (log, inError, inDocTest, errs)
ToolExit _ -> do
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
case errs of
[] -> defaultLineLogger' log logLaunch output
_ -> Log.appendLog log logLaunch (T.pack $ "----- " ++ show errorNum ++ " errors -- "
++ show warnNum ++ " warnings -----\n") FrameTag
return (log, inError, inDocTest, errs)
--logOutputLines :: Text -- ^ logLaunch
-- -> (LogLaunch -> ToolOutput -> IDEM a)
-- -> [ToolOutput]
-- -> IDEM [a]
logOutputForBreakpoints :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForBreakpoints package logLaunch = do
breaks <- logOutputLines logLaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
case parse breaksLineParser "" $ T.unpack line of
Right (BreakpointDescription n span) ->
return $ Just $ LogRef span package line (logLineNumber, logLineNumber) BreakpointRef
_ -> return Nothing
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ setBreakpointList $ catMaybes breaks
logOutputForSetBreakpoint :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForSetBreakpoint package logLaunch = do
breaks <- logOutputLines logLaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
case parse setBreakpointLineParser "" $ T.unpack line of
Right (BreakpointDescription n span) ->
return $ Just $ LogRef span package line (logLineNumber, logLineNumber) BreakpointRef
_ -> return Nothing
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ addLogRefs $ catMaybes breaks
logOutputForSetBreakpointDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForSetBreakpointDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForSetBreakpoint package defaultLogLaunch
logOutputForContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> (Text -> [SrcSpan])
-> C.Sink ToolOutput IDEM ()
logOutputForContext package loglaunch getContexts = do
refs <- fmap catMaybes $ logOutputLines loglaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
let contexts = getContexts line
if null contexts
then return Nothing
else return $ Just $ LogRef (last contexts) package line (logLineNumber, logLineNumber) ContextRef
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ unless (null refs) $ do
addLogRefs [last refs]
lastContext
logOutputForLiveContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForLiveContext package logLaunch = logOutputForContext package logLaunch (getContexts . T.unpack)
where
getContexts [] = []
getContexts line@(x:xs) = case stripPrefix "Stopped at " line of
Just rest -> case parse srcSpanParser "" rest of
Right desc -> desc : getContexts xs
_ -> getContexts xs
_ -> getContexts xs
logOutputForLiveContextDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForLiveContextDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForLiveContext package defaultLogLaunch
logOutputForHistoricContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForHistoricContext package logLaunch = logOutputForContext package logLaunch getContexts
where
getContexts line = case T.stripPrefix "Logged breakpoint at " line of
Just rest -> case parse srcSpanParser "" $ T.unpack rest of
Right desc -> [desc]
_ -> []
_ -> []
logOutputForHistoricContextDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForHistoricContextDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForHistoricContext package defaultLogLaunch
| 573/leksah | src/IDE/LogRef.hs | gpl-2.0 | 27,110 | 138 | 24 | 9,698 | 7,417 | 3,689 | 3,728 | 607 | 20 |
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Boot.Internal
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Internal use for Yi.Boot
module Yi.Boot.Internal where
import Config.Dyre.Relaunch (relaunchWithBinaryState)
import Control.Monad.Base (liftBase)
import Control.Monad.State (get)
import Yi.Types (withEditor)
import Yi.Keymap (YiM, withUI)
import Yi.UI.Common (end)
-- | "reloads" the configuration
--
-- Serializes the editor state and relaunches Yi using the serialized
-- state. The launch of Yi will result in recompilation of the user's
-- custom Yi. This, in effect, "reloads" the configuration.
reload :: YiM ()
reload = do
editor <- withEditor get
withUI (`end` False)
liftBase $ relaunchWithBinaryState (Just editor) Nothing
| atsukotakahashi/wi | src/library/Yi/Boot/Internal.hs | gpl-2.0 | 855 | 0 | 10 | 140 | 143 | 88 | 55 | 13 | 1 |
module Test.QuickFuzz.Gen.Image
( module Test.QuickFuzz.Gen.Image.SVG,
module Test.QuickFuzz.Gen.Image.Gif,
module Test.QuickFuzz.Gen.Image.Png,
module Test.QuickFuzz.Gen.Image.Tiff
) where
import Test.QuickFuzz.Gen.Image.SVG
import Test.QuickFuzz.Gen.Image.Gif
import Test.QuickFuzz.Gen.Image.Png
import Test.QuickFuzz.Gen.Image.Tiff
| elopez/QuickFuzz | src/Test/QuickFuzz/Gen/Image.hs | gpl-3.0 | 343 | 0 | 5 | 29 | 78 | 59 | 19 | 9 | 0 |
{-
factorial n = if n>1
then n * factorial (n-1)
else 1
-}
-- Cuando una función no tiene parametros es una definición o un nombre
nombreDefinicion = "Hola mundo, soy una definicion"
{-
Hay diferentes formas de construir funciones, una de ellas es a través de
patrones como en el ejemplo 1. La otra es a través de guards como en el ejemplo
2. Durante este último ejemplo hay un par de vertientes como es el uso de where
y let (ejemplos 3 y 4), con sus respectivas ventajas y desventajas.
-}
--Ejemplo 1
factorial :: Integer -> Integer
factorial 0 = 1
factorial n = n * factorial (n-1)
--Ejemplo 2
factorial1 :: Integer -> Integer
factorial1 a
| a>1 = a * factorial1 (a-1)
| True = 1
{-
Para estos ejemplos usare otro programa diferente al ejemplo 2.
-}
-- Original
imc :: Double -> String
imc indice
| indice <= 18.5 = "Desnutrido"
| indice <= 25.0 = "Perfecto"
| indice <= 30.0 = "Gordito"
| otherwise = "Eres demasiado pesado"
--Con peso y altura en lugar del indice
imc1 :: Float -> Float -> String
imc1 altura peso
| peso / (altura^2) <= 18.5 = "Desnutrido"
| peso / (altura^2) <= 25.0 = "Perfecto"
| peso / (altura^2) <= 30.0 = "Gordito"
| otherwise = "Eres demasiado pesado"
--Ejemplo 3
imc2 :: Float -> Float -> String
imc2 altura peso
| indice <= ligero = "Desnutrido"
| indice <= normal = "Perfecto"
| indice <= pesado = "Gordito"
| otherwise = "Eres demasiado pesado"
where indice = peso / (altura^2)
ligero = 18.5
normal = 25.0
pesado = 30.0
--Ejemplo 4
{-
La aparente diferencia entre where y let es que let te permite escribir la
equivalencia antes y el where lo hace después, sin embargo, let lo puedes usar
en un monton de lugares, aunque no en los guards.
-}
listaConLet = (let a = 100; b = 200; c = 300 in a*b*c, let foo="Hey "; bar = "there!" in foo ++ bar)
--También podemos usar let en las list comprehension
calcularIMC :: (RealFloat a) => [(a,a)] -> [a]
calcularIMC xs = [imc3 | (w,h)<-xs,let imc3 = w/(h^2),imc3>=25.0]
| rysard/cursos | haskell/forGreatGood/prueba1.hs | gpl-3.0 | 2,051 | 8 | 12 | 469 | 529 | 276 | 253 | 33 | 1 |
module Test where
import Test.HUnit
import TextProcessors
import TextStatistics
testtokens = ["to","be","or","not", "to", "be"]
ngramstest =
TestCase $ assertEqual "Trigrams"
[["to","be","or"],["be","or","not"],["or","not","to"],["not","to","be"]]
(ngrams 3 testtokens)
freqlisttest1 =
TestCase $ assertEqual "Frequency List, count of word 'be'"
2
(count (freqlist testtokens) "be")
freqlisttest2 =
TestCase $ assertEqual "Frequency List, count of word 'or'"
1
(count (freqlist testtokens) "or")
freqlisttest3 =
TestCase $ assertEqual "Frequency List, count of absent word 'blah'"
0
(count (freqlist testtokens) "blah")
freqlisttest4 =
TestCase $ assertEqual "Frequency List, total count"
(length testtokens)
(totalcount (freqlist testtokens))
main = runTestTT $ TestList [ngramstest, freqlisttest1, freqlisttest2, freqlisttest3, freqlisttest4]
| proycon/HasCL | Test.hs | gpl-3.0 | 1,111 | 0 | 10 | 363 | 261 | 147 | 114 | 26 | 1 |
-- | See the question here:
--
-- https://stackoverflow.com/questions/47453657/extracting-an-id-with-ghc-generics
--
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Identifiable where
import Data.Proxy
import GHC.Generics
type Name = String
newtype Id = Id { _id :: Int } deriving (Generic, Eq, Show)
data VarId = VarId Name Id SortId deriving (Generic, Eq, Show)
data FuncId = FuncId Name Id [SortId] SortId deriving (Generic, Eq, Show)
data SortId = SortId Name Id deriving (Generic, Eq, Show)
-- ... and 20 more of these things
data Identifier = IdVar VarId
| IdFunc FuncId
| IdSort SortId
deriving (Generic, Eq, Show)
-- ... and 20 more of these things
-- Apparently this won't work using @default@ (somehow the type @e@ in the
-- signature is not equated to the type @e@ in the implementation).
getId' :: forall e . (Generic e, Identifiable (HasId (Rep e)) (Rep e)) => e -> Id
getId' x = mkGetId (Proxy :: Proxy (HasId (Rep e))) (from x)
data Crumbs = Here | GoLeft Crumbs | GoRight Crumbs
data Res = Found Crumbs | NotFound
type family Choose (a :: Res) (b :: Res) :: Res where
Choose (Found a) b = Found (GoLeft a)
Choose a (Found b) = Found (GoRight b)
Choose NotFound NotFound = NotFound
-- The kind * -> * is needed to be able to work with the representations used
-- at Generics.
type family HasId (e :: * -> *) :: Res where
HasId (M1 i c t) = HasId t
HasId (K1 i Id) = Found Here
HasId (l :+: r) = Choose (HasId l) (HasId r)
HasId (l :*: r) = Choose (HasId l) (HasId r)
HasId a = NotFound
class Identifiable (res :: Res) f where
mkGetId :: Proxy res -> f e -> Id
instance Identifiable (Found Here) (K1 i Id) where
mkGetId _ (K1 x) = x
instance Identifiable (Found c) a => Identifiable (Found (GoLeft c)) (a :+: b) where
-- ScopedTypeVariables needed for having the @c@ in the implementation
-- refer to the same @c@ in the instance constraints.
mkGetId _ (L1 a) = mkGetId (Proxy :: Proxy (Found c)) a
instance Identifiable (Found c) b => Identifiable (Found (GoRight c)) (a :+: b) where
mkGetId _ (R1 b) = mkGetId (Proxy :: Proxy (Found c)) b
instance Identifiable (Found c) a => Identifiable (Found (GoLeft c)) (a :*: b) where
mkGetId _ (a :*: b) = mkGetId (Proxy :: Proxy (Found c)) a
instance Identifiable (Found c) b => Identifiable (Found (GoRight c)) (a :*: b) where
mkGetId _ (a :*: b) = mkGetId (Proxy :: Proxy (Found c)) b
instance Identifiable (Found c) a => Identifiable (Found c) (M1 i d a) where
mkGetId _ (M1 x) = mkGetId (Proxy :: Proxy (Found c)) x
-- | Problem:
--
-- This works:
--
-- λ> getId' (SortId "hello" (Id 10))
-- > Id {_id = 10}
--
-- Bot this won't:
--
-- λ> getId' (IdSort (SortId "hello" (Id 10)))
--
--
-- > No instance for (Identifiable 'NotFound ...)
| capitanbatata/sandbox | typelevel-computations/src/Identifiable.hs | gpl-3.0 | 3,262 | 0 | 11 | 783 | 1,013 | 546 | 467 | 51 | 1 |
module Lamdu.Sugar.Convert.Inject
( convert
) where
import Control.Monad.Once (OnceT)
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Expr.IRef as ExprIRef
import Lamdu.Sugar.Convert.Expression.Actions (addActions)
import qualified Lamdu.Sugar.Convert.Input as Input
import Lamdu.Sugar.Convert.Monad (ConvertM(..))
import qualified Lamdu.Sugar.Convert.Monad as ConvertM
import qualified Lamdu.Sugar.Convert.Tag as ConvertTag
import Lamdu.Sugar.Internal
import qualified Lamdu.Sugar.Internal.EntityId as EntityId
import Lamdu.Sugar.Types
import Revision.Deltum.Transaction (Transaction)
import Lamdu.Prelude
type T = Transaction
convert ::
Monad m =>
(TagRef InternalName (OnceT (T m)) (T m) -> Term v InternalName (OnceT (T m)) (T m) # Annotated (ConvertPayload m)) ->
T.Tag ->
Input.Payload m # V.Term ->
ConvertM m (ExpressionU v m)
convert c tag exprPl =
do
protectedSetToVal <- ConvertM.typeProtectedSetToVal
let typeProtect = protectedSetToVal (exprPl ^. Input.stored) valI
let setTag newTag =
do
V.LInject newTag & V.BLeaf & ExprIRef.writeValI valI
void typeProtect
let resultInfo () = ConvertTag.TagResultInfo <$> EntityId.ofTag entityId <*> setTag
ConvertTag.ref tag Nothing mempty (pure ()) resultInfo >>= ConvertM . lift
<&> c
>>= addActions (Ann exprPl (V.BLeaf (V.LInject tag)))
where
entityId = exprPl ^. Input.entityId
valI = exprPl ^. Input.stored . ExprIRef.iref
| lamdu/lamdu | src/Lamdu/Sugar/Convert/Inject.hs | gpl-3.0 | 1,660 | 9 | 18 | 405 | 489 | 269 | 220 | -1 | -1 |
{-# OPTIONS_GHC -XBangPatterns #-}
module Assembler.SymbolTable
( generateSymbolTable
, generateSymbolTableIns
, initialTable
, replaceSymbol
, replaceSymbols
) where
import Assembler.Instruction
import Data.Either
import Data.List as List
import Data.Map as Map
import Debug.Trace
type SymbolTable = Map.Map Symbol Value
{- first pass: go through program counting instruction number without labels
- and insert all labels into SymbolTable.
- second pass: go through program again and replace all symbols with the
- following strategy: if this symbol is in the SymbolTable then
- replace it with the value in the SymbolTable. Otherwise add a
- new entry into the SymbolTable and increment the count of our
- next memory address
-}
{- first pass just look for Label Instructions and add them to the map -}
generateSymbolTable :: [Instruction ValueOrSymbol]
-> Either String SymbolTable
generateSymbolTable instructions =
let f ins (table, line) = generateSymbolTableIns ins table line
e = List.foldl (\e ins -> e >>= (f ins))
(Right (initialTable, 0))
instructions
in e >>= (Right . fst)
{- generate symbol table for a single instruction/line -}
generateSymbolTableIns :: Instruction ValueOrSymbol
-> SymbolTable
-> Int
-> Either String (SymbolTable, Int)
generateSymbolTableIns (LS symbol') !table !line = case value $ show line of
Right v -> Right (Map.insert symbol' v table, line)
Left e -> Left e
generateSymbolTableIns EmptyLine !table !line = Right (table, line)
generateSymbolTableIns _ !table !line = Right (table, line+1)
initialTable :: SymbolTable
initialTable =
let v v' = head $ rights [value $ show v']
s s' = head $ rights [symbol s']
in Map.fromList [ (s "SP", v 0)
, (s "LCL", v 1)
, (s "ARG", v 2)
, (s "THIS", v 3)
, (s "THAT", v 4)
, (s "R0", v 0)
, (s "R1", v 1)
, (s "R2", v 2)
, (s "R3", v 3)
, (s "R4", v 4)
, (s "R5", v 5)
, (s "R6", v 6)
, (s "R7", v 7)
, (s "R8", v 8)
, (s "R9", v 9)
, (s "R10", v 10)
, (s "R11", v 11)
, (s "R12", v 12)
, (s "R13", v 13)
, (s "R14", v 14)
, (s "R15", v 15)
, (s "SCREEN", v 16384)
, (s "KBD", v 24576)]
replaceSymbol :: Instruction ValueOrSymbol
-> SymbolTable
-> Int
-> Either String (Instruction Value, SymbolTable, Int)
replaceSymbol (AS (Left v)) table count = Right (AS v, table, count)
replaceSymbol (AS (Right symbol')) table count
| Map.member symbol' table = Right (AS $ table Map.! symbol', table, count)
| otherwise = case value $ show count of
Left e -> Left e
Right v -> Right (AS v, Map.insert symbol' v table, count+1)
replaceSymbol (CS a c j) table count = Right (CS a c j, table, count)
replaceSymbol (LS symbol') table count = Right (LS symbol', table, count)
replaceSymbol EmptyLine table count = Right (EmptyLine, table, count)
replaceSymbols :: SymbolTable
-> [Instruction ValueOrSymbol]
-> Either String [Instruction Value]
replaceSymbols table instructions =
let f :: Instruction ValueOrSymbol
-> ([Instruction Value], SymbolTable, Int)
-> Either String ([Instruction Value], SymbolTable, Int)
f i (prevInstructions, table, count) =
let g :: (Instruction Value, SymbolTable, Int)
-> ([Instruction Value], SymbolTable, Int)
g (ins, table, count) = (ins:prevInstructions, table, count)
in (replaceSymbol i table count) >>= (pure . g)
-- Note instructions are reversed here but we can't use foldr
-- because of count
maybeAnswer = List.foldl (\e i -> e >>= (f i))
(Right ([], table, 16)) instructions
getInstructions (e,_,_) = reverse e
in maybeAnswer >>= (pure . getInstructions)
| rifung/nand2tetris | projects/compiler/src/Assembler/SymbolTable.hs | gpl-3.0 | 4,422 | 0 | 16 | 1,542 | 1,341 | 703 | 638 | -1 | -1 |
import Control.Applicative
import Data.Monoid
-- import our assertion function
import Assert
-- We can use the following type to simulate our own list
data List a = Empty | Value a (List a) deriving (Show, Eq)
-- Make the list a Functor
instance Functor List where
fmap _ Empty = Empty
fmap f (Value x xs) = Value (f x) (fmap f xs)
-- Write a function which appends one list on to another
combineLists:: List a -> List a -> List a
combineLists as Empty = as
combineLists Empty bs = bs
combineLists (Value a as') bs = Value a (combineLists as' bs)
-- Make our list a Monoid
instance Monoid (List a) where
mempty = Empty
mappend = combineLists
-- Make our list an Applicative
instance Applicative List where
pure a = Value a Empty
Empty <*> _ = Empty
_ <*> Empty = Empty
(Value f fs) <*> as = (f <$> as) <> (fs <*> as)
-- Make sure that the List obeys the laws for Applicative and Monoid
-- Monoid laws
testListObeysMonoidLaws :: IO()
testListObeysMonoidLaws =
let
xs = Value 1 (Value 2 (Value 3 (Value 4 Empty)))
ys = Value 5 (Value 6 (Value 7 (Value 8 Empty)))
zs = Value 9 (Value 10 (Value 11 (Value 12 Empty)))
in do
putStrLn "List should obey Monoid laws"
assertEqual "(x <> y) <> z = x <> (y <> z)" ((xs <> ys) <> zs) (xs <> (ys <> zs))
assertEqual "mempty <> x = x" xs (mempty <> xs)
assertEqual "x <> mempty = x" xs (xs <> mempty)
-- Applicative laws
testListObeysApplicativeLaws :: IO()
testListObeysApplicativeLaws =
let
value = 1
func = (+1)
fs = Value (+1) (Value (+2) Empty)
xs = Value 1 (Value 2 (Value 3 Empty))
in do
putStrLn "List should obey Applicative laws"
assertEqual "pure id <*> v = v" xs (pure id <*> xs)
assertEqual "pure f <*> pure x = pure (f x)" (pure (func value) :: List Int)
(pure func <*> pure value :: List Int)
assertEqual "u <*> pure y = pure ($ y) <*> u" (fs <*> pure value :: List Int)
(pure ($ value) <*> fs :: List Int)
assertEqual "pure (.) <*> u <*> v <*> w = u <*> (v <*> w)"
(pure (.) <*> fs <*> fs <*> xs) (fs <*> (fs <*> xs))
-- Bonus!
assertEqual "fmap f x = pure f <*> x" (fmap func xs) (pure func <*> xs)
-- Create some lists of numbers of different lengths such as:
threeValues = Value 1 $ Value 2 $ Value 3 Empty
fourValues = Value 4 $ Value 5 $ Value 6 $ Value 7 Empty
stringList = Value "foo" $ Value "bar" $ Value "baz" Empty
-- Use <$> on the lists with a single-parameter function, such as:
plusTwo = (+2)
threeValuesPlusTwo = plusTwo <$> threeValues
fourValuesPlusTwo = plusTwo <$> fourValues
-- Use <$> and <*> on the lists with a binary function
mul = (*)
threeValuesMultiplied = mul <$> threeValues <*> threeValues
fourValuesMultiplied = mul <$> fourValues <*> fourValues
stringListCombinations = (<>) <$> stringList <*> stringList
-- Create some lists of binary functions
binFuncs = Value (+) $ Value (*) $ Value (mod) Empty
-- Use <*> on the binary functions list and the number lists
threeValuesOnBinFuncs = binFuncs <*> threeValues <*> threeValues
| etrepat/learn-you-a-haskell-exercises | 11-functors-applicative-functors-and-monoids.hs | gpl-3.0 | 3,045 | 0 | 15 | 692 | 970 | 497 | 473 | 59 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetHTTPSProxies.SetSSLPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the SSL policy for TargetHttpsProxy. The SSL policy specifies the
-- server-side support for SSL features. This affects connections between
-- clients and the HTTPS proxy load balancer. They do not affect the
-- connection between the load balancer and the backends.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetHttpsProxies.setSslPolicy@.
module Network.Google.Resource.Compute.TargetHTTPSProxies.SetSSLPolicy
(
-- * REST Resource
TargetHTTPSProxiesSetSSLPolicyResource
-- * Creating a Request
, targetHTTPSProxiesSetSSLPolicy
, TargetHTTPSProxiesSetSSLPolicy
-- * Request Lenses
, thpsspRequestId
, thpsspProject
, thpsspPayload
, thpsspTargetHTTPSProxy
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetHttpsProxies.setSslPolicy@ method which the
-- 'TargetHTTPSProxiesSetSSLPolicy' request conforms to.
type TargetHTTPSProxiesSetSSLPolicyResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetHttpsProxies" :>
Capture "targetHttpsProxy" Text :>
"setSslPolicy" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] SSLPolicyReference :>
Post '[JSON] Operation
-- | Sets the SSL policy for TargetHttpsProxy. The SSL policy specifies the
-- server-side support for SSL features. This affects connections between
-- clients and the HTTPS proxy load balancer. They do not affect the
-- connection between the load balancer and the backends.
--
-- /See:/ 'targetHTTPSProxiesSetSSLPolicy' smart constructor.
data TargetHTTPSProxiesSetSSLPolicy =
TargetHTTPSProxiesSetSSLPolicy'
{ _thpsspRequestId :: !(Maybe Text)
, _thpsspProject :: !Text
, _thpsspPayload :: !SSLPolicyReference
, _thpsspTargetHTTPSProxy :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetHTTPSProxiesSetSSLPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'thpsspRequestId'
--
-- * 'thpsspProject'
--
-- * 'thpsspPayload'
--
-- * 'thpsspTargetHTTPSProxy'
targetHTTPSProxiesSetSSLPolicy
:: Text -- ^ 'thpsspProject'
-> SSLPolicyReference -- ^ 'thpsspPayload'
-> Text -- ^ 'thpsspTargetHTTPSProxy'
-> TargetHTTPSProxiesSetSSLPolicy
targetHTTPSProxiesSetSSLPolicy pThpsspProject_ pThpsspPayload_ pThpsspTargetHTTPSProxy_ =
TargetHTTPSProxiesSetSSLPolicy'
{ _thpsspRequestId = Nothing
, _thpsspProject = pThpsspProject_
, _thpsspPayload = pThpsspPayload_
, _thpsspTargetHTTPSProxy = pThpsspTargetHTTPSProxy_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
thpsspRequestId :: Lens' TargetHTTPSProxiesSetSSLPolicy (Maybe Text)
thpsspRequestId
= lens _thpsspRequestId
(\ s a -> s{_thpsspRequestId = a})
-- | Project ID for this request.
thpsspProject :: Lens' TargetHTTPSProxiesSetSSLPolicy Text
thpsspProject
= lens _thpsspProject
(\ s a -> s{_thpsspProject = a})
-- | Multipart request metadata.
thpsspPayload :: Lens' TargetHTTPSProxiesSetSSLPolicy SSLPolicyReference
thpsspPayload
= lens _thpsspPayload
(\ s a -> s{_thpsspPayload = a})
-- | Name of the TargetHttpsProxy resource whose SSL policy is to be set. The
-- name must be 1-63 characters long, and comply with RFC1035.
thpsspTargetHTTPSProxy :: Lens' TargetHTTPSProxiesSetSSLPolicy Text
thpsspTargetHTTPSProxy
= lens _thpsspTargetHTTPSProxy
(\ s a -> s{_thpsspTargetHTTPSProxy = a})
instance GoogleRequest TargetHTTPSProxiesSetSSLPolicy
where
type Rs TargetHTTPSProxiesSetSSLPolicy = Operation
type Scopes TargetHTTPSProxiesSetSSLPolicy =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetHTTPSProxiesSetSSLPolicy'{..}
= go _thpsspProject _thpsspTargetHTTPSProxy
_thpsspRequestId
(Just AltJSON)
_thpsspPayload
computeService
where go
= buildClient
(Proxy ::
Proxy TargetHTTPSProxiesSetSSLPolicyResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetHTTPSProxies/SetSSLPolicy.hs | mpl-2.0 | 5,945 | 0 | 18 | 1,280 | 568 | 342 | 226 | 94 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.HTTPSHealthChecks.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of HttpsHealthCheck resources available to the
-- specified project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.httpsHealthChecks.list@.
module Network.Google.Resource.Compute.HTTPSHealthChecks.List
(
-- * REST Resource
HTTPSHealthChecksListResource
-- * Creating a Request
, httpsHealthChecksList
, HTTPSHealthChecksList
-- * Request Lenses
, hhclReturnPartialSuccess
, hhclOrderBy
, hhclProject
, hhclFilter
, hhclPageToken
, hhclMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.httpsHealthChecks.list@ method which the
-- 'HTTPSHealthChecksList' request conforms to.
type HTTPSHealthChecksListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"httpsHealthChecks" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] HTTPSHealthCheckList
-- | Retrieves the list of HttpsHealthCheck resources available to the
-- specified project.
--
-- /See:/ 'httpsHealthChecksList' smart constructor.
data HTTPSHealthChecksList =
HTTPSHealthChecksList'
{ _hhclReturnPartialSuccess :: !(Maybe Bool)
, _hhclOrderBy :: !(Maybe Text)
, _hhclProject :: !Text
, _hhclFilter :: !(Maybe Text)
, _hhclPageToken :: !(Maybe Text)
, _hhclMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'HTTPSHealthChecksList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hhclReturnPartialSuccess'
--
-- * 'hhclOrderBy'
--
-- * 'hhclProject'
--
-- * 'hhclFilter'
--
-- * 'hhclPageToken'
--
-- * 'hhclMaxResults'
httpsHealthChecksList
:: Text -- ^ 'hhclProject'
-> HTTPSHealthChecksList
httpsHealthChecksList pHhclProject_ =
HTTPSHealthChecksList'
{ _hhclReturnPartialSuccess = Nothing
, _hhclOrderBy = Nothing
, _hhclProject = pHhclProject_
, _hhclFilter = Nothing
, _hhclPageToken = Nothing
, _hhclMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
hhclReturnPartialSuccess :: Lens' HTTPSHealthChecksList (Maybe Bool)
hhclReturnPartialSuccess
= lens _hhclReturnPartialSuccess
(\ s a -> s{_hhclReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
hhclOrderBy :: Lens' HTTPSHealthChecksList (Maybe Text)
hhclOrderBy
= lens _hhclOrderBy (\ s a -> s{_hhclOrderBy = a})
-- | Project ID for this request.
hhclProject :: Lens' HTTPSHealthChecksList Text
hhclProject
= lens _hhclProject (\ s a -> s{_hhclProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
hhclFilter :: Lens' HTTPSHealthChecksList (Maybe Text)
hhclFilter
= lens _hhclFilter (\ s a -> s{_hhclFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
hhclPageToken :: Lens' HTTPSHealthChecksList (Maybe Text)
hhclPageToken
= lens _hhclPageToken
(\ s a -> s{_hhclPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
hhclMaxResults :: Lens' HTTPSHealthChecksList Word32
hhclMaxResults
= lens _hhclMaxResults
(\ s a -> s{_hhclMaxResults = a})
. _Coerce
instance GoogleRequest HTTPSHealthChecksList where
type Rs HTTPSHealthChecksList = HTTPSHealthCheckList
type Scopes HTTPSHealthChecksList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient HTTPSHealthChecksList'{..}
= go _hhclProject _hhclReturnPartialSuccess
_hhclOrderBy
_hhclFilter
_hhclPageToken
(Just _hhclMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy HTTPSHealthChecksListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/HTTPSHealthChecks/List.hs | mpl-2.0 | 7,348 | 0 | 19 | 1,570 | 758 | 454 | 304 | 111 | 1 |
{-# LANGUAGE ApplicativeDo #-}
module Annotations where
import Control.Applicative
import Control.Monad
import Data.Char
import Parsers
import LexicalStructure
import {-# SOURCE #-} Types
import {-# SOURCE #-} Rules
annotationsP :: Parser String
annotationsP = do
s <- some . tokenLP $ annotationP <|> annotationListP
return $ join s
--
-- | convert "Override" into "override"
-- "Deprecated" into "deprecated"
annotationP :: Parser String
annotationP = do
reservedLP "@"
s <- option0 [] $ do
a <- annotationUseSiteTargetP
reservedLP ":"
return $ a ++ ":"
u <- unescapedAnnotationP
return $ '@' : s ++ mapped u
where mapped [ ] = []
mapped (h : t) = toLower h : t
--
annotationListP :: Parser String
annotationListP = do
reservedP "@"
s <- option0 [] $ do
a <- annotationUseSiteTargetP
reservedLP ":"
return []
reservedLP "["
u <- some unescapedAnnotationP
reservedLP "]"
return $ '@' : s ++ join u
--
annotationUseSiteTargetP :: Parser String
annotationUseSiteTargetP = reservedWordsLP words
where
words = [ "field"
, "file"
, "property"
, "get"
, "set"
, "reciever"
, "param"
, "setparam"
, "delegate"
]
--
unescapedAnnotationP :: Parser String
unescapedAnnotationP = do
ns <- reservedLP "." \|/ tokenLP simpleNameP
ta <- option0 [] typeArgumentsP
va <- option0 [] valueArgumentsP
return $ join ns ++ ta ++ va
--
| ice1000/Kt2Dart | src/Annotations.hs | agpl-3.0 | 1,510 | 0 | 12 | 408 | 425 | 209 | 216 | 52 | 2 |
module NGramCrackers.Quant.Stats
( ttrSet
, pMI
, meanSentLength
, sdSentLength
, varSentLength
, sentsPerParagraph
, meanSentsPerParagraph
, sdSentsPerParagraph
, varSentsPerParagraph
) where
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Set as S
import NGramCrackers.Ops.Text
import NGramCrackers.Utilities.List
import NGramCrackers.Parsers.Paragraph
import NGramCrackers.Quant.Dispersion
import NGramCrackers.Quant.Counts
{-Takes a list of words and calculates a type-token ratio, using Set type to
to get the length of the unique types. -}
ttrSet :: [T.Text] -> (Double, Double, Double)
ttrSet tokens = (typesTot, tokenTot, ratio)
where typesTot = (fromIntegral . S.size . wordSet) tokens
tokenTot = (fromIntegral . length) tokens
ratio = typesTot / tokenTot
ttrSet' :: [T.Text] -> (Double, Double, Double)
ttrSet' tokens = (typesTot, tokenTot, ratio)
where typesTot = (fromIntegral . S.size . wordSet) tokens
tokenTot = (fromIntegral . length) tokens
-- Could this be done more efficiently w/Vector?
ratio = typesTot / tokenTot
{- bigramMIProfile :: [[[T.Text]]] -> M.Map T.Text Double
bigramMIProfile doc = pMI bgFreq pW1 pW2 total where
bgFreq =
bmap = bigramMap doc
wmap = wcMap doc
wset = (map wordSet . concatMap concat) doc
bigram = elemAt 0 wset
-}
{-| Pointwise mutual information score calculation based on Church and Hanks -}
pMI :: Int -> Double -> Double -> Int -> Double
pMI bgFreq pW1 pW2 total = log $ count / (pW1 * pW2 * totCount)
where count = fromIntegral bgFreq
totCount = fromIntegral total
{-| Takes a parsed paragraph and gets the mean length of the
sentences in it. -}
meanSentLength :: [[T.Text]] -> Double
meanSentLength paragraph = lengths / sents where
lengths = (fromIntegral . sum . map length) paragraph
sents = (fromIntegral . length) paragraph
{-| Takes a parsed paragraph and gets the standard deviation of sentence length
in it -}
sdSentLength :: [[T.Text]] -> Double
sdSentLength paragraph = standardDev lengths where
lengths = map (fromIntegral . length) paragraph
{-| Takes a parsed paragraph and gets the variance of sentence length in it. -}
varSentLength :: [[T.Text]] -> Double
varSentLength paragraph = variance lengths where
lengths = map (fromIntegral . length) paragraph
{-| Takes a paragraph and gets the number of sentence in it. -}
sentsPerParagraph :: [[T.Text]] -> Double
sentsPerParagraph = fromIntegral . length
{-| Takes a list of paragraphs and gets the mean number of sentences per
paragrpah. -}
meanSentsPerParagraph :: [[[T.Text]]] -> Double
meanSentsPerParagraph = mean . map sentsPerParagraph
{-| Takes a paragraph and gets the number of sentence in it. -}
sdSentsPerParagraph :: [[[T.Text]]] -> Double
sdSentsPerParagraph = standardDev . map sentsPerParagraph
{-| Takes a paragraph and gets the variance of sentences in it. -}
varSentsPerParagraph :: [[[T.Text]]] -> Double
varSentsPerParagraph = variance . map sentsPerParagraph
| R-Morgan/NGramCrackers | testsuite/NGramCrackers/Quant/Stats.hs | agpl-3.0 | 3,468 | 0 | 11 | 946 | 639 | 369 | 270 | 51 | 1 |
module LSystem.GeneralDataTypes (
Name,
readsName,
readsNames,
nameFromString
) where
import Data.List
import Data.Char
import LSystem.ShowReadUtils
----------------------------------------------------------------------------------------------------
-- | Name of parameter or variable. First char of name have to be alphabetic, rest can contain any
-- alphanumeric chars.
newtype Name = Name String
deriving (Eq)
instance Show Name where
showsPrec _ (Name s) = (s++)
instance Read Name where
readsPrec _ = readsName
showsNames :: [Name] -> ShowS
showsNames = showsCommaList
readsName :: ReadS Name
readsName str = [(Name name, rest) | (name, rest) <- myLex str, isNameValid (Name name)]
readsNames :: ReadS [Name]
readsNames = readsCommaList readsName
nameFromString :: String -> Maybe Name
nameFromString str = if isNameValid $ Name str then Just $ Name str else Nothing
isNameValid :: Name -> Bool
isNameValid (Name []) = False
isNameValid (Name str) = isAlpha (head str) && foldl testCharAcc True str where
testCharAcc acc c = acc && isAlphaNum c
| NightElfik/L-systems-in-Haskell | src/LSystem/GeneralDataTypes.hs | unlicense | 1,118 | 0 | 9 | 218 | 311 | 166 | 145 | 26 | 2 |
module Geometry.Cube(
area,
volume
) where
import qualified Geometry.Cuboid as Cuboid
area :: Float -> Float
area side = Cuboid.area side side side
volume :: Float -> Float
volume side = Cuboid.volume side side side
| paulbarbu/haskell-ground | Geometry/Cube.hs | apache-2.0 | 227 | 0 | 6 | 46 | 74 | 41 | 33 | 8 | 1 |
module Main (main) where
import Bucketeer.Timers (startBucketManager)
import Bucketeer.Types
import Bucketeer.WebServer (BucketeerWeb(..))
import Bucketeer.Util
import qualified Bucketeer.Testing.Persistence as P (specs)
import qualified Bucketeer.Testing.Util as U (specs)
import qualified Bucketeer.Testing.Manager as M (specs)
import qualified Bucketeer.Testing.Types as T (specs)
import qualified Bucketeer.Testing.WebServer as WS (runSpecs)
import qualified Bucketeer.Testing.WebServer.Util as WU (specs)
import Control.Monad (when)
import Data.IORef (newIORef)
import Database.Redis (connect,
defaultConnectInfo)
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import qualified Test.Hspec as HS
import qualified Test.Hspec.Monadic as HSM
import System.Exit (ExitCode(..),
exitWith)
main :: IO ()
main = do conn <- connect defaultConnectInfo
mspecs <- M.specs
runSpecs [HS.hspecB $ P.specs conn ++ U.specs ++ WU.specs ++ T.specs ++ mspecs]
WS.runSpecs conn
---- Helpers
runSpecs :: [IO Bool]
-> IO ()
runSpecs specs = do failed <- passed
when failed $ exitWith $ ExitFailure 1
where passed :: IO Bool
passed = return . all id . map not =<< sequence specs
toExitCode :: Bool
-> ExitCode
toExitCode True = ExitSuccess
toExitCode False = ExitFailure 1
| MichaelXavier/Bucketeer | Bucketeer/Testing/Main.hs | bsd-2-clause | 1,436 | 0 | 15 | 331 | 408 | 235 | 173 | 35 | 1 |
module Main where
import Control.Monad
import Control.Monad.IO.Class
import FileIO (TransMethod (Naive))
import RuntimeProcessManager (withRuntimeProcess)
import System.Console.Haskeline (runInputT, defaultSettings)
import System.Directory (doesFileExist)
import System.IO
import qualified Data.Map as Map
-- REPL-specific modules
import BackEnd
import Loop
import qualified Environment as Env
import qualified History as Hist
main :: IO ()
main = do
withRuntimeProcess "FileServer" LineBuffering
(\(inP,outP) ->
do liftIO printHelp
runInputT defaultSettings
(Loop.loop (inP, outP) (0, compileN, [Naive])
Map.empty Env.empty Hist.empty Hist.empty 0 False False False False 0)) False
fileExist :: String -> IO ()
fileExist name = do
exist <- doesFileExist name
unless exist $ fileExist name
printFile :: IO ()
printFile = do
f <- getLine
contents <- readFile f
putStr contents
| bixuanzju/fcore | repl/Main.hs | bsd-2-clause | 1,091 | 0 | 16 | 329 | 286 | 158 | 128 | 30 | 1 |
-- | The module provides an abstraction over transition maps from
-- alphabet symbols to node identifiers.
module Data.DAWG.Gen.Trans
( Trans (..)
) where
import Data.DAWG.Gen.Types
-- | Abstraction over transition maps from alphabet symbols to
-- node identifiers.
class Trans t where
-- | Empty transition map.
empty :: t
-- | Lookup sybol in the map.
lookup :: Sym -> t -> Maybe ID
-- | Find index of the symbol.
index :: Sym -> t -> Maybe Int
-- | Select a (symbol, ID) pair by index of its position in the map.
byIndex :: Int -> t -> Maybe (Sym, ID)
-- | Insert element to the transition map.
insert :: Sym -> ID -> t -> t
-- | Construct transition map from a list.
fromList :: [(Sym, ID)] -> t
-- | Translate transition map into a list.
toList :: t -> [(Sym, ID)]
| kawu/dawg-ord | src/Data/DAWG/Gen/Trans.hs | bsd-2-clause | 866 | 0 | 10 | 246 | 156 | 94 | 62 | 11 | 0 |
-- |
-- Module : GRN.Parse
-- Copyright : (c) 2011 Jason Knight
-- License : BSD3
--
-- Maintainer : jason@jasonknight.us
-- Stability : experimental
-- Portability : portable
--
-- Parses a pathway file *.pw to produce the ParseData construction that can
-- then be processed further. See the example pathway files in pws folder
-- for an example of the syntax.
--
module GRN.Parse where
import GRN.Types
import Text.Parsec
import Text.Parsec.String
import Text.ParserCombinators.Parsec.Char
import Text.Parsec.Token
import Numeric
import Control.Applicative ((<$),empty,(<*))
import Control.Monad
import Data.Maybe
import Data.List
import qualified Data.Map as M
data ParseLine = ParseDependency (Gene, [Gene])
| ParsePathway Pathway
| ParseKnockout (Gene, Bool)
| ParseMeasurement (Gene, Double)
deriving (Show,Eq)
dataLayers :: [ParseLine -> ParseData -> ParseData]
dataLayers = [addPath, addKnock, addMeasure]
parseLayers :: [Parser (Maybe ParseLine)]
parseLayers = [deps, paths, knocks, measurements]
-- FIXME TODO: Does not handle the control section at the bottom
-- well. In fact, everything below a control section is lost...
-- mysteriously...
parseLine :: Parser (Maybe ParseLine)
parseLine = do
ss
try (comment >> return Nothing)
<|> choice (map try parseLayers)
ss = skipMany space
tillNext = skipMany (noneOf "\n") >> ss
comment = ss >> char '#' >> tillNext
geneID = (many1 $ noneOf "#-&<>=: \n") <?> "GeneID"
p_float :: CharParser () Double
p_float = do s<- getInput
case readSigned readFloat s of
[(n,s')] -> n <$ setInput s'
_ -> empty
deps :: Parser (Maybe ParseLine)
deps = do
g <- many1 alphaNum
ss
char '('
ss
deps <- sepBy (many1 alphaNum) (char ',')
ss
char ')'
tillNext
return $ Just $ ParseDependency (g, deps)
paths :: Parser (Maybe ParseLine)
paths = do
let ampSep = try (ss >> string "&&" >> ss)
gpre <- sepBy1 geneID ampSep
ss
char '-'
pre <- digit
char ','
post <- digit
string "->"
ss
gpost <- sepBy1 geneID ampSep
tillNext
let i2b x = if x == '0' then False else True
return $ Just (ParsePathway $ Pathway gpre (i2b pre) (i2b post) gpost)
knocks :: Parser (Maybe ParseLine)
knocks = do
gene <- many1 alphaNum
ss
char '='
ss
kstate <- digit
tillNext
let kbool = if kstate == '0' then False else True
return $ Just $ ParseKnockout (gene, kbool)
measurements :: Parser (Maybe ParseLine)
measurements = do
gene <- many1 alphaNum
ss
string "=measured="
ss
measure <- p_float
tillNext
return $ Just $ ParseMeasurement (gene, measure)
addKnock :: ParseLine -> ParseData -> ParseData
addKnock (ParseKnockout (gene, kbool)) initMap = M.adjust (\x -> x{knockout=Just kbool}) gene initMap
addKnock _ initMap = initMap
addMeasure :: ParseLine -> ParseData -> ParseData
addMeasure (ParseMeasurement (gene, measure)) initMap = M.adjust (\x -> x{measurement=Just measure}) gene initMap
addMeasure _ initMap = initMap
addPath :: ParseLine -> ParseData -> ParseData
addPath (ParsePathway (Pathway a b1 b2 (p:ps))) initMap = addPath (ParsePathway $ Pathway a b1 b2 ps) postMap
where postMap = M.adjust (\x -> x{pathways=pw:(pathways x)}) p initMap
pw = Pathway a b1 b2 [p]
addPath _ initMap = initMap
parseText :: Parser [ParseLine]
parseText = do ss
plines <- many parseLine
ss
return (catMaybes plines)
parsePW :: String -> ParseData
parsePW input = foldr (\f dict -> foldr f dict plines) initMap dataLayers
where
initMap = M.fromList $ map create depsList
create (g,d) = (g, GeneInfo g Nothing Nothing d [])
depsList = [ x | ParseDependency x <- plines]
plines = case parse parseText "Input text" input of
Left err -> error ("Parsing error: " ++ (show err))
Right ret -> ret
fileNamePW :: FilePath -> IO ParseData
fileNamePW filename = do
con <- readFile filename
return $ parsePW con
parseControlText :: Parser ParseControl
parseControlText = do
ss
manyTill anyChar (try (string "<control>"))
ss
targets <- between (string "<targets>" >> ss) (string "</targets>" >> ss) $ many1 $ do
g <- geneID
ss
char '='
m <- p_float
ss
return (g,m)
controls <- between (string "<controls>" >> ss) (string "</controls>" >> ss) $ many1 (geneID <* ss)
string "</control>"
ss
return ParseControl { pctargets = sort $ targets, pccontrols = sort $ controls }
parseControl :: String -> ParseControl
parseControl input = pcontrol
where
pcontrol = case parse parseControlText "Input Control text" input of
Left err -> error ("Parsing control error: " ++ (show err))
Right ret -> ret
| binarybana/grn-pathways | GRN/Parse.hs | bsd-2-clause | 5,247 | 0 | 14 | 1,573 | 1,625 | 822 | 803 | 129 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Coroutine.Highlighter
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Application.HXournal.Coroutine.Highlighter where
import Application.HXournal.Device
import Application.HXournal.Type.Coroutine
import Application.HXournal.Type.Canvas
import Application.HXournal.Coroutine.Pen
import Control.Monad.Trans
-- |
highlighterStart :: CanvasId -> PointerCoord -> MainCoroutine ()
highlighterStart cid pcoord = do
liftIO $ putStrLn "highlighter started"
penStart cid pcoord
| wavewave/hxournal | lib/Application/HXournal/Coroutine/Highlighter.hs | bsd-2-clause | 816 | 0 | 8 | 101 | 99 | 62 | 37 | 10 | 1 |
{-# LANGUAGE QuasiQuotes #-}
-- Compare with tests/pos/StrictPair1.hs
import LiquidHaskell
import Language.Haskell.Liquid.Prelude (liquidAssert)
[lq| measure tsnd :: (a, b) -> b
tsnd (x, y) = y
|]
[lq| type Foo a = ((a, Int), Int)<{\z v -> v <= (tsnd z)}> |]
[lq| poo :: (Foo a) -> () |]
poo :: ((a, Int), Int) -> ()
poo ((x, n), m) = liquidAssert (m <= n) ()
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/StrictPair0.hs | bsd-3-clause | 384 | 0 | 7 | 91 | 96 | 60 | 36 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Utility functions used in the construction of the "Views".
-}
module Views.Helpers
( listOfLinks
, pTitle
, pUrl
, pId
, langUrl
, navLinks
, inlineCss
) where
import Control.Monad (forM_)
import Data.Monoid (mconcat)
import Data.List (intersperse)
import Data.Time.Clock (UTCTime)
import Snap.Types()
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Types
------------------------------------------------------------------------------
-- | Returns a UL, with class 'cls', containing links generated by calling 'linker' on each 'linkee'.
--
listOfLinks :: AttributeValue -> (a -> Html) -> [a] -> Html
listOfLinks cls linker linkees =
ul ! class_ cls $ forM_ linkees (li . linker)
------------------------------------------------------------------------------
-- | A paste's title as Html.
--
pTitle :: Paste -> Html
pTitle = toHtml . pasteTitle
------------------------------------------------------------------------------
-- | Return the URL for the given paste.
--
pUrl :: Paste -> AttributeValue
pUrl paste = toValue $ "/paste/" ++ pId paste
------------------------------------------------------------------------------
-- | Return the paste's id as a string.
--
pId :: Paste -> String
pId = show . pasteId
------------------------------------------------------------------------------
-- | Return the URL for the given language.
--
langUrl :: String -> AttributeValue
langUrl l = toValue $ "/language/" ++ l
------------------------------------------------------------------------------
-- | Return 'Html' for the navigation links.
--
navLinks :: Html
navLinks = mconcat $ intersperse " | " links
where links = [ a ! href "/" $ "All pastes"
, a ! href "/languages" $ "All languages"
, a ! href "/new" $ "Add paste"
]
------------------------------------------------------------------------------
-- | Return a style tag containing the given CSS.
--
inlineCss :: String -> Maybe Html
inlineCss = Just . ( H.style ! type_ "text/css" ) . toHtml
instance ToHtml UTCTime where
toHtml = toHtml . show
| benarmston/hpastie | src/Views/Helpers.hs | bsd-3-clause | 2,205 | 0 | 10 | 418 | 394 | 229 | 165 | 37 | 1 |
{-# LANGUAGE BangPatterns #-}
module LRUCache where
import Control.Applicative ((<$>))
import Data.Hashable (Hashable, hash)
import qualified Data.HashPSQ as HashPSQ
import Data.IORef (IORef, newIORef, atomicModifyIORef')
import Data.Int (Int64)
import Data.Maybe (isNothing)
import qualified Data.Vector as V
import Prelude hiding (lookup)
type Priority = Int64
data Cache k v = Cache
{ cCapacity :: !Int -- ^ The maximum number of elements in the queue
, cSize :: !Int -- ^ The current number of elements in the queue
, cTick :: !Priority -- ^ The next logical time
, cQueue :: !(HashPSQ.HashPSQ k Priority v)
} deriving (Eq, Show)
empty :: Int -> Cache k v
empty capacity
| capacity < 1 = error "Cache.empty: capacity < 1"
| otherwise = Cache
{ cCapacity = capacity
, cSize = 0
, cTick = 0
, cQueue = HashPSQ.empty
}
trim :: (Hashable k, Ord k) => Cache k v -> Cache k v
trim c
| cTick c == maxBound = empty (cCapacity c)
| cSize c > cCapacity c = c
{ cSize = cSize c - 1
, cQueue = HashPSQ.deleteMin (cQueue c)
}
| otherwise = c
insert :: (Hashable k, Ord k) => k -> v -> Cache k v -> Cache k v
insert key val c = trim $!
let (mbOldVal, queue) = HashPSQ.insertView key (cTick c) val (cQueue c)
in c
{ cSize = if isNothing mbOldVal then cSize c + 1 else cSize c
, cTick = cTick c + 1
, cQueue = queue
}
lookup
:: (Hashable k, Ord k) => k -> Cache k v -> Maybe (v, Cache k v)
lookup k c = case HashPSQ.alter lookupAndBump k (cQueue c) of
(Nothing, _) -> Nothing
(Just x, q) ->
let !c' = trim $ c {cTick = cTick c + 1, cQueue = q}
in Just (x, c')
where
lookupAndBump Nothing = (Nothing, Nothing)
lookupAndBump (Just (_, x)) = (Just x, Just ((cTick c), x))
newtype Handle k v = Handle (IORef (Cache k v))
newHandle :: Int -> IO (Handle k v)
newHandle capacity = Handle <$> newIORef (empty capacity)
iolookup :: (Hashable k, Ord k) => Handle k v -> k -> IO(Maybe (v))
iolookup (Handle ref) k = do
lookupRes <- atomicModifyIORef' ref $ \c -> case lookup k c of
Nothing -> (c, Nothing)
Just (v, c') -> (c', Just v)
return lookupRes
ioinsert :: (Hashable k, Ord k) => Handle k v -> k -> v -> IO()
ioinsert (Handle ref) k v = do
atomicModifyIORef' ref $ \c -> (insert k v c, ())
| Garygunn94/DFS | .stack-work/intero/intero3920v71.hs | bsd-3-clause | 2,554 | 0 | 16 | 811 | 1,001 | 529 | 472 | 68 | 3 |
module Data.Minecraft.Release17
( module Data.Minecraft.Release17.Protocol
, module Data.Minecraft.Release17.Version
) where
import Data.Minecraft.Release17.Protocol
import Data.Minecraft.Release17.Version
| oldmanmike/hs-minecraft-protocol | src/Data/Minecraft/Release17.hs | bsd-3-clause | 213 | 0 | 5 | 21 | 39 | 28 | 11 | 5 | 0 |
{-# LANGUAGE ImpredicativeTypes, ConstraintKinds, GADTs, MagicHash, RecursiveDo, BangPatterns, UndecidableInstances, FlexibleInstances, ScopedTypeVariables, FlexibleContexts, KindSignatures, MultiParamTypeClasses #-}
-- | Adapton-style (http://dl.acm.org/citation.cfm?id=2594324) incremental computations.
-- Adapton currently requires the monad to support IO, for efficiency but mostly because our implementation relies heavily on @Unique@ identifiers and @Weak@ references.
-- This exposes more than would be desirable to end-users, but it does not seem easy to overcome this limitation.
module Control.Monad.Incremental.Internal.Adapton.Algorithm where
import Control.Monad.Incremental
import Control.Monad.Incremental.Internal.Adapton.Layers
import Control.Monad.Incremental.Internal.Adapton.Types
import Control.Monad.Incremental.Internal.Adapton.Memo
import Control.Monad
import qualified Data.Strict.Maybe as Strict
import Debug
import Data.Unique
import System.Mem.Weak.Exts
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import qualified System.Mem.MemoTable as MemoTable
import Data.IORef.Exts
import Data.IORef
import Data.Strict.Tuple (Pair(..))
import qualified Data.Strict.List as SList
import Data.Proxy
import Data.WithClass.MData
import Prelude hiding (mod,const,read)
import System.Mem.WeakMap (WeakMap(..))
import qualified System.Mem.WeakMap as WeakMap
-- * Strict modifiables
instance (Layer Inside Adapton) => Thunk M Inside Adapton where
new = modInnerM
{-# INLINE new #-}
newc = refInnerM
{-# INLINE newc #-}
read = getInnerM
{-# INLINE read #-}
instance (Layer Outside Adapton) => Thunk M Outside Adapton where
new = modOuterM
{-# INLINE new #-}
newc = refOuterM
{-# INLINE newc #-}
read = getOuterM
{-# INLINE read #-}
instance (Layer Inside Adapton) => Input M Inside Adapton where
ref = refInnerM
{-# INLINE ref #-}
get = getInnerM
{-# INLINE get #-}
set = setM
{-# INLINE set #-}
getOutside = getOuterM
{-# INLINE getOutside #-}
refOutside = refOuterM
{-# INLINE refOutside #-}
modOutside = \c -> outside c >>= refOutside
{-# INLINE modOutside #-}
instance (Layer Outside Adapton) => Input M Outside Adapton where
ref = refOuterM
{-# INLINE ref #-}
get = getOuterM
{-# INLINE get #-}
set = setM
{-# INLINE set #-}
refOutside = refOuterM
{-# INLINE refOutside #-}
modOutside = \c -> c >>= refOutside
{-# INLINE modOutside #-}
modInnerM :: (AdaptonImpl inc,IncK inc a,Layer Inside inc) => Inside inc a -> Inside inc (M Inside inc a)
modInnerM m = m >>= refInnerM
modOuterM :: (AdaptonImpl inc,IncK inc a,Layer Outside inc) => Outside inc a -> Outside inc (M Outside inc a)
modOuterM m = m >>= refOuterM
refOuterM :: (IncK inc a,Layer l inc,Layer Outside inc) => a -> Outside inc (M l inc a)
refOuterM v = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef v
dependentsU <- WeakMap.new
-- since the ref will never be reused, we don't need to worry about it's creator
return $ M (dta,(NodeMeta (idU,dependentsU,error "nodirty",return (),Nothing)))
refInnerM :: (AdaptonImpl inc,IncK inc a,Layer l inc,Layer Inside inc) => a -> Inside inc (M l inc a)
refInnerM v = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef v
dependentsU <- WeakMap.new
-- add a reference dependency (they are transitive up to the top-level calling thunk)
creator <- mkRefCreator idU
return $ M (dta,(NodeMeta (idU,dependentsU,error "nodirty",return (),creator)))
-- forces a lazy modifiable (encoded as a plain thunk)
-- the layer is just for uniformity, but it does not matter for @M@
{-# INLINE getInnerM #-}
getInnerM :: (AdaptonImpl inc,Eq a,IncK inc a,Layer Inside inc) => M Inside inc a -> Inside inc a
getInnerM = \t -> {-debug ("getInnerM " ++ show (hashUnique $ idNM $ metaM t)) $ -} unsafeIOToInc $ do
value <- readIORef (dataM t)
addDependency (metaM t) (unsafeIOToInc $ checkM t $! value) -- updates dependencies of callers
return value
-- forces a lazy modifiable (encoded as a plain thunk)
-- the layer is just for uniformity, but it does not matter for @M@
-- if running at the outer layer, we don't need to add dependencies since thunks are always re-evaluated
{-# INLINE getOuterM #-}
getOuterM :: (Layer l inc) => M l inc a -> Outside inc a
getOuterM = \t -> unsafeIOToInc $ readIORef (dataM t)
-- force that does not return the value nor adds dependencies, but instead checks whether the value has not changed
{-# INLINE checkM #-}
checkM :: (Eq a,IncK inc a,Layer Inside inc) => M Inside inc a -> a -> IO Bool
checkM = \t oldv -> do
v <- readIORef' (dataM t)
return (oldv == v)
setM :: (Eq a,IncK inc a,Layer Outside inc,Layer l inc) => M l inc a -> a -> Outside inc ()
setM t v' = debug ("changed " ++ show (hashUnique $ idNM $ metaM t)) $ unsafeIOToInc $ do
v <- readIORef' (dataM t)
unless (v == v') $ do
writeIORef' (dataM t) v'
dirty (metaM t) -- dirties only dependents; dirty also parent dependencies (to avoid reuse for thunks that created this reference)
-- * Lazy modifiables
instance (Layer Inside Adapton) => Thunk L Inside Adapton where
new = modL
{-# INLINE new #-}
newc = refL
{-# INLINE newc #-}
read = getInnerL
{-# INLINE read #-}
instance (Layer Outside Adapton) => Thunk L Outside Adapton where
new = modL
{-# INLINE new #-}
newc = refL
{-# INLINE newc #-}
read = getOuterL
{-# INLINE read #-}
instance (Layer Inside Adapton) => Input L Inside Adapton where
ref = refL
{-# INLINE ref #-}
mod = modL
{-# INLINE mod #-}
get = getInnerL
{-# INLINE get #-}
set = setL
{-# INLINE set #-}
overwrite = overwriteL
{-# INLINE overwrite #-}
modify = modifyL
{-# INLINE modify #-}
getOutside = inside . getNoDependentsInnerL
{-# INLINE getOutside #-}
instance (Layer Outside Adapton) => Input L Outside Adapton where
ref = refL
{-# INLINE ref #-}
mod = modL
{-# INLINE mod #-}
get = getOuterL
{-# INLINE get #-}
set = setL
{-# INLINE set #-}
overwrite = overwriteL
{-# INLINE overwrite #-}
modify = modifyL
{-# INLINE modify #-}
refL :: (AdaptonImpl inc,Layer l inc) => a -> l inc (L l inc a)
refL v = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef (LConst v)
dependentsU <- WeakMap.new
creator <- mkRefCreator idU
return $ L (dta,(NodeMeta (idU,dependentsU,error "nodirty",return (),creator)))
modL :: (AdaptonImpl inc,Layer l inc) => l inc a -> l inc (L l inc a)
modL m = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef (LThunk m)
dependentsU <- WeakMap.new
creator <- mkRefCreator idU
return $ L (dta,(NodeMeta (idU,dependentsU,error "nodirty",return (),creator)))
-- forces a lazy modifiable (encoded as a plain thunk)
{-# INLINE getInnerL #-}
getInnerL :: (AdaptonImpl inc,Eq a,IncK inc a,Layer Inside inc) => L Inside inc a -> Inside inc a
getInnerL = \t -> {-debug ("getInnerL " ++ show (hashUnique $ idNM $ metaL t)) $ -} do
value <- getNoDependentsInnerL t
unsafeIOToInc $ addDependency (metaL t) (unsafeIOToInc $ checkL t $! value) -- updates dependencies of callers
return value
-- forces a lazy modifiable (encoded as a plain thunk)
{-# INLINE getOuterL #-}
getOuterL :: (AdaptonImpl inc,IncK inc a,Layer Outside inc) => L Outside inc a -> Outside inc a
getOuterL = error "getting a lazy modifiable outside" --getNoDependentsOuterL
-- force that does not return the value nor adds dependencies, but instead checks whether the value has not changed
{-# INLINE checkL #-}
checkL :: (AdaptonImpl inc,Eq a,IncK inc a,Layer Inside inc) => L Inside inc a -> a -> IO Bool
checkL = \t oldv -> do
d <- readIORef (dataL t)
case d of
LThunk _ -> return False
LConst value -> return (oldv == value)
{-# INLINE getNoDependentsInnerL #-}
getNoDependentsInnerL :: (AdaptonImpl inc,Layer Inside inc) => L Inside inc a -> Inside inc a
getNoDependentsInnerL = \t -> {-debug ("getNoDependentsInnerL " ++ show (hashUnique $ idNM $ metaL t)) $ -} do
d <- unsafeIOToInc $ readIORef (dataL t)
case d of
LThunk force -> evaluateInnerL t force --unevaluated thunk
LConst value -> return value -- constant value
{-# INLINE getNoDependentsOuterL #-}
getNoDependentsOuterL :: (AdaptonImpl inc,Eq a,Layer Outside inc) => L Outside inc a -> Outside inc a
getNoDependentsOuterL = \t -> do
d <- unsafeIOToInc $ readIORef (dataL t)
case d of
LThunk force -> evaluateOuterL t force --unevaluated thunk
LConst value -> return value -- constant value
-- does not add the modifiable to the stack, as we do with thunks
{-# INLINE evaluateInnerL #-}
evaluateInnerL :: (AdaptonImpl inc,Layer Inside inc) => L Inside inc a -> Inside inc a -> Inside inc a
evaluateInnerL t force = debug ("re-evaluatingInnerL " ++ show (hashUnique $ idNM $ metaL t)) $ do
unsafeIOToInc $ pushStack (metaL t :!: Strict.Nothing)
value <- force
unsafeIOToInc $ writeIORef (dataL t) $! LConst value
popStack'
return value
-- does not add the modifiable to the stack, as we do with thunks
-- does not store the result in the modifiable
{-# INLINE evaluateOuterL #-}
evaluateOuterL :: (AdaptonImpl inc,Eq a,Layer Outside inc) => L Outside inc a -> Outside inc a -> Outside inc a
evaluateOuterL t force = debug ("re-evaluatingOuterL " ++ show (hashUnique $ idNM $ metaL t)) $ do
unsafeIOToInc $ pushStack (metaL t :!: Strict.Nothing)
value <- force
popStack'
return value
setL :: (Eq a,IncK inc a,Layer Outside inc,Layer l inc) => L l inc a -> a -> Outside inc ()
setL l v' = debug ("changed " ++ show (hashUnique $ idNM $ metaL l)) $ unsafeIOToInc $ do
d <- readIORef (dataL l)
let value_changed = do
writeIORef (dataL l) $! LConst v'
dirty (metaL l) -- dirties only dependents
case d of
LThunk m -> value_changed
LConst v -> if v==v'
then return ()
else value_changed
-- changes the value lazily, so it cannot perform equality
overwriteL :: (Layer l inc) => L l inc a -> l inc a -> Outside inc ()
overwriteL t m = unsafeIOToInc $ do
d <- readIORef (dataL t)
writeIORef (dataL t) $! case d of
LThunk _ -> LThunk m
LConst _ -> LThunk m
dirty (metaL t) -- dirties only dependents
-- appends a change to the chain of pending changes
modifyL :: (Layer l inc) => L l inc a -> (a -> l inc a) -> Outside inc ()
modifyL t f = unsafeIOToInc $ do
d <- readIORef (dataL t)
writeIORef (dataL t) $! case d of
LThunk force -> LThunk $ force >>= f -- if it has never been evaluated, it remains so
LConst value -> LThunk $ f value
dirty (metaL t) -- dirties only dependents
-- | Tests if a lazy modifiable has been evaluated
isUnevaluatedL :: (Layer l inc) => L l1 inc a -> l inc Bool
isUnevaluatedL t = do
d <- unsafeIOToInc $ readIORef (dataL t)
case d of
LThunk force -> return True --unevaluated thunk
LConst value -> return False -- constant value
-- * Thunks
instance (Layer Inside Adapton) => Thunk U Inside Adapton where
new = thunkU
{-# INLINE new #-}
newc = constU
{-# INLINE newc #-}
read = forceInnerU
{-# INLINE read #-}
instance (Layer Outside Adapton) => Thunk U Outside Adapton where
new = thunkU
{-# INLINE new #-}
newc = constU
{-# INLINE newc #-}
read = forceOuterU
{-# INLINE read #-}
-- no memoization at the outer layer
instance (Layer Outside Adapton) => Output U Outside Adapton where
thunk = thunkU
{-# INLINE thunk #-}
const = constU
{-# INLINE const #-}
force = forceOuterU
{-# INLINE force #-}
forceOutside = forceOuterU
{-# INLINE forceOutside #-}
instance (Layer Inside Adapton) => Output U Inside Adapton where
thunk = thunkU
{-# INLINE thunk #-}
const = constU
{-# INLINE const #-}
force = forceInnerU
{-# INLINE force #-}
forceOutside = world . forceNoDependentsU
{-# INLINE forceOutside #-}
memo = memoU
{-# INLINE memo #-}
memoAs = memoUAs
{-# INLINE memoAs #-}
gmemoQ = gmemoQU
{-# INLINE gmemoQ #-}
gmemoQAs = gmemoQUAs
{-# INLINE gmemoQAs #-}
-- | Creates a new thunk
thunkU :: (Layer l inc,Layer l1 inc) => l1 inc a -> l inc (U l1 inc a)
thunkU c = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef (Thunk c)
dependentsU <- WeakMap.new
-- wdta <- mkWeakRefKey dta dta Nothing -- we use a weak pointer to avoid keeping the thunk alive due to its metadata
debug ("thunkU "++show idU) $ return $ U (dta,(NodeMeta (idU,dependentsU,dirtyValue' dta,forgetUData' dta,Nothing)))
constU :: (Layer l inc,Layer l1 inc) => a -> l inc (U l1 inc a)
constU v = unsafeIOToInc $ do
idU <- newUnique
dta <- newIORef (Const v)
-- wdta <- mkWeakRefKey dta dta Nothing -- we use a weak pointer to avoid keeping the thunk alive due to its metadata
--debug ("constU "++show idU) $
return $ U (dta,(NodeMeta (idU,error "no dependents",error "no dirty",forgetUData' dta,Nothing)))
-- | Force a computation without change propagation
forceOuterU :: (AdaptonImpl inc,IncK inc a,Layer Outside inc) => U Outside inc a -> Outside inc a
forceOuterU = \t -> do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Thunk force -> force
Const value -> return value
otherwise -> error "forceOuterU stores no IC data"
-- | Force a computation with change propagation
-- NOTE: if we allow @U@ thunks to be modified, then this constant optimization is unsound!
forceInnerU :: (AdaptonImpl inc,Eq a,IncK inc a,Layer Inside inc) => U Inside inc a -> Inside inc a
forceInnerU = \t -> {-debug ("forceInnerU " ++ show (hashUnique $ idU t)) $ -} do
value <- forceNoDependentsU t
has <- hasDependenciesU t -- for the case when a thunk is actually a constant computation (what arises frequently in generic code...), we don't need to record dependencies
if has
then unsafeIOToInc $ addDependency (metaU t) (checkU t $! value) -- updates dependencies of callers
else unsafeIOToInc $ writeIORef (dataU t) $! Const value -- make it an actual constant
return value
-- tests if a thunk has no dependencies
hasDependenciesU :: Layer Inside inc => U Inside inc a -> Inside inc Bool
hasDependenciesU t = do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Value _ value force dependencies -> liftM (not . null) $ unsafeIOToInc $ readIORef dependencies
Thunk force -> error "cannot test dependencies of unevaluated thunk"
Const value -> return False -- constant value
oldvalueU :: (Layer l inc,Layer l1 inc) => U l1 inc a -> l inc a
oldvalueU t = do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Value dirty value force dependencies -> return value
Thunk force -> error "no old value available"
Const value -> return value
-- force that does not record any dependency on other thunks, e.g., (internally) when only displaying the contents or when only repairing
{-# INLINE forceNoDependentsU #-}
forceNoDependentsU :: (AdaptonImpl inc,IncK inc a,Layer Inside inc) => U Inside inc a -> Inside inc a
forceNoDependentsU = \t -> {-debug ("forceNoDependentsU "++show (idNM $ metaU t)) $ -} do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Value 0# value force dependencies -> return value -- is not dirty
Value 1# value force dependencies -> repairInnerU t value force dependencies -- is dirty
Thunk force -> unsafeIOToInc (newIORef []) >>= evaluateInnerU t force --unevaluated thunk
Const value -> return value -- constant value
-- force that does not return the value nor adds dependencies, but instead checks whether the value has not changed
checkU :: (AdaptonImpl inc,Eq a,IncK inc a,Layer Inside inc) => U Inside inc a -> a -> Inside inc Bool
checkU t oldv = do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Value 0# value force dependencies -> return (oldv==value)
Value 1# value force dependencies -> liftM (oldv ==) (repairInnerU t value force dependencies)
Thunk _ -> return False -- if the thunk has never been computed
Const value -> return False -- given that @U@ thunks cannot be mutated, the value for constants cannot change
-- used to avoid forcing the current node if none of the dependencies changes
repairInnerU :: (AdaptonImpl inc,Layer Inside inc) => U Inside inc a -> a -> Inside inc a -> IORef (Dependencies inc) -> Inside inc a
repairInnerU t value force dependencies = {-# SCC repairInnerU #-} debug ("repairing thunk "++ show (hashUnique $ idNM $ metaU t)) $
unsafeIOToInc (readIORef dependencies) >>= foldr repair' norepair' . reverse --we need to reverse the dependency list to respect evaluation order
where
{-# INLINE norepair' #-}
norepair' = unsafeIOToInc (writeDirtyValue (dataU t) 0#) >> return value -- if no dependency is dirty, simply return its value
{-# INLINE repair' #-}
repair' (Dependency (srcMetaW,dirtyW,checkW,tgtMetaW),_) m = do
isDirty <- unsafeIOToInc $ readIORef dirtyW
if isDirty
then do
unsafeIOToInc $ writeIORef' dirtyW False -- undirty the dependency
ok <- checkW -- checks if the dependency does not need to be re-evaluated (dependent not dirty or the new value is the same as the old one)
if ok
then debug ("dependency has not changed "++show (idNM $ srcMetaW) ++" "++show (idNM $ tgtMetaW)) m
else debug ("dependency has changed"++show (idNM $ srcMetaW) ++" "++show (idNM $ tgtMetaW)) $ unsafeIOToInc (clearDependencies dependencies >> newIORef []) >>= evaluateInnerU t force -- we create a new dependencies reference to free all the old data that depends on the it
else m
-- recomputes a node
-- does not clear the dependencies on its own
{-# INLINE evaluateInnerU #-}
evaluateInnerU :: (AdaptonImpl inc,Typeable inc,Layer Inside inc) => U Inside inc a -> Inside inc a -> IORef (Dependencies inc) -> Inside inc a
evaluateInnerU t force dependencies = {-# SCC evaluateInnerU #-} debug ("re-evaluatingInnerU " ++ show (hashUnique $ idU t)) $ do
unsafeIOToInc $ pushStack (metaU t :!: Strict.Just dependencies)
value <- force
unsafeIOToInc $ writeIORef (dataU t) $! Value 0# value force dependencies
popStack'
return value
-- Nothing = unevaluated, Just True = dirty, Just False = not dirty
isDirtyUnevaluatedU :: (Layer l inc) => U l1 inc a -> l inc (Maybe Bool)
isDirtyUnevaluatedU t = do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Thunk force -> return Nothing --unevaluated thunk
Const value -> return $ Just False -- constant value
Value 1# value force dependencies -> return $ Just True -- dirty
Value 0# value force dependencies -> return $ Just False -- dirty
isUnevaluatedU :: (Layer l inc) => U l1 inc a -> l inc Bool
isUnevaluatedU t = do
d <- unsafeIOToInc $ readIORef (dataU t)
case d of
Thunk force -> return True --unevaluated thunk
otherwise -> return False
-- | Explicit memoization for recursive functions, fixpoint
-- A fixed-point operation returning thunks that in the process of tying the knot adds memoization.
memoU :: (Typeable inc,Typeable a,Typeable arg,IncK inc a,Layer Outside inc,Memo arg) => ((arg -> Inside inc (U Inside inc a)) -> arg -> Inside inc a) -> (arg -> Inside inc (U Inside inc a))
memoU f = let memo_func = memoNonRecU (thunkU . f memo_func) in memo_func
memoUAs :: (Typeable inc,Memo name,Typeable a,Typeable arg,IncK inc a,Layer Outside inc,Memo arg) => name -> ((arg -> Inside inc (U Inside inc a)) -> arg -> Inside inc a) -> (arg -> Inside inc (U Inside inc a))
memoUAs name f = let memo_func = memoNonRecUAs name (thunkU . f memo_func) in memo_func
gmemoQU :: (Typeable inc,Typeable b,Typeable ctx,IncK inc b,Output U Inside inc) => Proxy ctx -> (GenericQMemoU ctx Inside inc b -> GenericQMemoU ctx Inside inc b) -> GenericQMemoU ctx Inside inc b
gmemoQU ctx (f :: (GenericQMemoU ctx Inside inc b -> GenericQMemoU ctx Inside inc b)) =
let memo_func :: GenericQMemoU ctx Inside inc b
memo_func = gmemoNonRecU ctx (f memo_func)
in memo_func
gmemoQUAs :: (Typeable inc,Memo name,Typeable b,Typeable ctx,IncK inc b,Output U Inside inc) => Proxy ctx -> name -> (GenericQMemoU ctx Inside inc b -> GenericQMemoU ctx Inside inc b) -> GenericQMemoU ctx Inside inc b
gmemoQUAs ctx name (f :: (GenericQMemoU ctx Inside inc b -> GenericQMemoU ctx Inside inc b)) =
let memo_func :: GenericQMemoU ctx Inside inc b
memo_func = gmemoNonRecUAs ctx name (f memo_func)
in memo_func
-- * Auxiliary functions
{-# INLINE addDependency #-}
-- adds a bidirectional dependency on a thunk
addDependency :: (AdaptonImpl inc,Layer Inside inc) => NodeMeta inc -> Inside inc Bool -> IO ()
addDependency calleemeta check = do
!top <- topThunkStack
case top of
Just (callermeta :!: Strict.Just callerdependencies) -> debug ("added BX dependency: "++show (hashUnique $ idNM calleemeta) ++ " -> " ++ show (hashUnique $ idNM callermeta)) $ do
dirtyW <- newIORef False
let dependencyW = Dependency (calleemeta,dirtyW,check,callermeta)
let weakset = dependentsNM calleemeta
let callerid = idNM callermeta
weak <- liftIO $ mkWeakRefKey callerdependencies dependencyW (Just $ WeakMap.deleteFinalized weakset callerid) -- the dependency lives as long as the dependencies reference lives, that in turn lives as long as the caller thunk itself lives
modifyIORef' callerdependencies ((dependencyW,finalize weak):)
liftIO $ WeakMap.insertWeak weakset callerid weak
otherwise -> debug ("nostack "++show (hashUnique $ idNM calleemeta)) $ return ()
-- | deletes all dependencies, by running their finalizers; this will also kill the "inverse" dependents
{-# INLINE clearDependencies #-}
clearDependencies :: IORef (Dependencies inc) -> IO ()
clearDependencies = \r -> readIORef r >>= mapM_ (liftIO . snd)
{-# INLINE writeDirtyValue #-}
writeDirtyValue :: IORef (UData l inc a) -> UBool -> IO ()
writeDirtyValue = \dta dirty -> modifyIORef' dta (\(Value _ value force dependencies) -> Value dirty value force dependencies)
{-# INLINE dirtyValue #-}
dirtyValue :: Weak (IORef (UData l inc a)) -> IO ()
dirtyValue = \wdta -> do
mb <- liftIO $ deRefWeak wdta
case mb of
Just dta -> dirtyValue' dta
Nothing -> return ()
dirtyValue' :: IORef (UData l inc a) -> IO ()
dirtyValue' dta = modifyIORef' dta (\(Value _ value force dependencies) -> Value 1# value force dependencies)
-- consider the example: do { t <- thunk (ref e); r <- force t; v <- get r; set r v'; r <- force t; v <- get r }
--we need to forget the previous execution of the thunk (and therefore ignore the set over the inner reference) so that execution is consistent with re-evaluating from scratch
-- XXX: we could refine the type system to prevent the modification of references created inside thunks, but that seems overcomplicated just to cut an edge case.
-- dirtying purges dead weak pointers
{-# INLINE dirty #-}
dirty :: NodeMeta inc -> IO ()
dirty = \umeta -> do
dirtyCreator (creatorNM umeta) -- if we change a reference that was created inside some memoized thunk, we have to forget all the memoized data for the parent thunk
dirtyRecursively (dependentsNM umeta)
-- does not remove dead dependencies
dirtyRecursively :: Dependents inc -> IO ()
dirtyRecursively deps = do
WeakMap.mapM_ dirty' deps -- take the chance to purge eventually dead pointers from dependents to dependencies (since there is no ultimate guarantee that finalizers run)
where
{-# INLINE dirty' #-}
dirty' :: (Unique,Dependent inc) -> IO ()
dirty' = \(_,Dependency (srcMeta,dirty,check,tgtMeta)) -> do
isDirty <- readIORef dirty
unless isDirty $ do
writeIORef dirty $! True -- dirty the dependency
dirtyUM tgtMeta -- dirty the caller thunk
dirtyRecursively (dependentsNM tgtMeta) -- dirty recursively
-- whenever a reference is changed, remove all the cached data of the parent thunk that created the reference, and dirty recursive thunks that depend on the parent thunk
dirtyCreator :: Maybe (Creator inc) -> IO ()
dirtyCreator Nothing = return ()
dirtyCreator (Just wcreator) = do
mb <- liftIO $ deRefWeak wcreator
case mb of
Just creatorMeta -> do
forgetUM creatorMeta
dirtyRecursively (dependentsNM creatorMeta)
Nothing -> return ()
-- forgets all the old cached data in a thunk
forgetUData :: Weak (IORef (UData l inc a)) -> IO ()
forgetUData wdta = do
mb <- liftIO $ deRefWeak wdta
case mb of
Just dta -> forgetUData' dta
Nothing -> return ()
forgetUData' :: IORef (UData l inc a) -> IO ()
forgetUData' dta = do
d <- readIORef dta
case d of
Value dirty value force dependencies -> clearDependencies dependencies >> (writeIORef dta $! Thunk force)
otherwise -> return ()
{-# INLINE mkRefCreator #-}
-- if the parent is a reference, we don't need to remember it because no dirtying will be necessary
mkRefCreator :: AdaptonImpl inc => Unique -> IO (Maybe (Creator inc))
mkRefCreator = \idU -> liftIO $ do
top <- topStack
case top of
Just (callermeta :!: Strict.Just callerdependencies) -> do
weak <- mkWeakRefKey callerdependencies callermeta Nothing -- the parent reference should live as long as the creator's dependencies
{-debug (show (hashUnique idU) ++ "refparent " ++ show (hashUnique $ idNM callermeta)) $ -}
return $ Just weak
otherwise -> {-debug (show (hashUnique idU) ++ "refparent NONE") $ -}return Nothing
instance (IncK inc a,Layer l inc,Thunk M l inc,MData ctx (l inc) a
, Sat (ctx (M l inc a)),DeepTypeable (M l inc a)
) => MData ctx (l inc) (M l inc a) where
gfoldl ctx k z t = z new >>= flip k (read t)
gunfold ctx k z c = z new >>= k
toConstr ctx m = dataTypeOf ctx m >>= (return . (flip indexConstr) 1)
dataTypeOf ctx x = return ty
where ty = mkDataType "Control.Monad.Adapton.M" [mkConstr ty "M" [] Prefix]
instance (IncK inc a,Layer l inc,Thunk L l inc,MData ctx (l inc) a
, Sat (ctx (L l inc a)),DeepTypeable (L l inc a)
) => MData ctx (l inc) (L l inc a) where
gfoldl ctx k z t = z new >>= flip k (read t)
gunfold ctx k z c = z new >>= k
toConstr ctx m = dataTypeOf ctx m >>= (return . (flip indexConstr) 1)
dataTypeOf ctx x = return ty
where ty = mkDataType "Control.Monad.Adapton.L" [mkConstr ty "L" [] Prefix]
instance (IncK inc a,Layer l inc,Thunk U l inc,MData ctx (l inc) a
, Sat (ctx (U l inc a)),DeepTypeable (U l inc a)
) => MData ctx (l inc) (U l inc a) where
gfoldl ctx k z t = z new >>= flip k (read t)
gunfold ctx k z c = z new >>= k
toConstr ctx m = dataTypeOf ctx m >>= (return . (flip indexConstr) 1)
dataTypeOf ctx x = return ty
where ty = mkDataType "Control.Monad.Adapton.U" [mkConstr ty "U" [] Prefix]
| cornell-pl/HsAdapton | src/Control/Monad/Incremental/Internal/Adapton/Algorithm.hs | bsd-3-clause | 25,988 | 192 | 22 | 4,852 | 7,941 | 4,026 | 3,915 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE StandaloneDeriving , CPP#-}
{-# LANGUAGE DeriveGeneric, DeriveAnyClass #-}
module Codec.JVM.Pretty
(module Text.PrettyPrint.GenericPretty)
where
import Codec.JVM
import Codec.JVM.ASM.Code.Instr
import Codec.JVM.ConstPool
import Codec.JVM.Const
import Codec.JVM.Method
import Codec.JVM.Field
import Codec.JVM.Attr
import Codec.JVM.ASM.Code.CtrlFlow
import GHC.Generics
import Text.PrettyPrint.GenericPretty
import Text.PrettyPrint
import Codec.JVM.Pretty.GenericOut
#define DERIVE_INSTANCE(T) deriving instance Generic (T); deriving instance Out (T)
DERIVE_INSTANCE(ClassFile)
DERIVE_INSTANCE(Version)
DERIVE_INSTANCE(IClassName)
DERIVE_INSTANCE(Const)
DERIVE_INSTANCE(MethodInfo)
DERIVE_INSTANCE(FieldInfo)
DERIVE_INSTANCE(AccessFlag)
DERIVE_INSTANCE(Attr)
DERIVE_INSTANCE(NameAndDesc)
DERIVE_INSTANCE(MethodRef)
DERIVE_INSTANCE(FieldRef)
DERIVE_INSTANCE(ConstVal)
DERIVE_INSTANCE(UName)
DERIVE_INSTANCE(Desc)
DERIVE_INSTANCE(Code)
DERIVE_INSTANCE(Offset)
DERIVE_INSTANCE(FieldType)
DERIVE_INSTANCE(PrimType)
DERIVE_INSTANCE(StackMapFrame)
DERIVE_INSTANCE(InnerClassMap)
DERIVE_INSTANCE(InnerClass)
DERIVE_INSTANCE(VerifType)
DERIVE_INSTANCE(MethodDef)
DERIVE_INSTANCE(MethodDesc)
#undef DERIVE_INSTANCE
deriving instance Generic (Signature a)
deriving instance Out a => Out (Signature a)
deriving instance Generic (MethodSignature a)
deriving instance Out a => Out (MethodSignature a)
deriving instance Generic (FieldSignature a)
deriving instance Out a => Out (FieldSignature a)
deriving instance Generic (ClassSignature a)
deriving instance Out a => Out (ClassSignature a)
deriving instance Generic (TypeVariableDeclaration a)
deriving instance Out a => Out (TypeVariableDeclaration a)
deriving instance Generic (ReferenceParameter a)
deriving instance Out a => Out (ReferenceParameter a)
deriving instance Generic (Parameter a)
deriving instance Out a => Out (Parameter a)
deriving instance Generic (TypeParameter a)
deriving instance Out a => Out (TypeParameter a)
deriving instance Generic (Bound a)
deriving instance Out a => Out (Bound a)
instance Out Instr where
docPrec n x = parens $ text (show x)
doc = docPrec 0
| HaskellZhangSong/codec-jvm-pretty | src/Codec/JVM/Pretty.hs | bsd-3-clause | 2,242 | 0 | 9 | 248 | 666 | 335 | 331 | -1 | -1 |
module MB.Gen.Index
( buildIndexPage
)
where
import Control.Monad
import Control.Monad.Trans
import System.Directory
import MB.Types
buildIndexPage :: BlogM ()
buildIndexPage = do
blog <- theBlog
when (null $ blogPosts blog) $
error "No blog posts; please create a first post in posts/."
let src = ofsPostFinalHtml (outputFS blog) post
index = ofsIndexHtml $ outputFS $ blog
post = head $ blogPosts blog
liftIO $ copyFile src index
| jtdaugherty/mathblog | src/MB/Gen/Index.hs | bsd-3-clause | 473 | 0 | 12 | 107 | 135 | 69 | 66 | 15 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Physics.FunPart.MCTest
( runTests
) where
import qualified Data.Vector as V
import Test.QuickCheck
import Data.Maybe (isNothing)
import System.Random (mkStdGen)
import Control.Monad.State (evalState)
import Physics.FunPart.Core
import Physics.FunPart.Approx
import Physics.FunPart.MC
import Physics.FunPart.Vec
newtype PositiveVector a = PositiveVector (V.Vector a)
deriving (Show, Eq, Ord)
instance (Arbitrary a, Ord a, Num a, Fractional a) => Arbitrary (PositiveVector a) where
arbitrary = let helper = do n <- choose (1, 20)
v <- vector n
return $ fromVector $ V.map abs $ V.fromList v
in helper `suchThat` anyPositive
anyPositive :: (Ord a, Num a) => PositiveVector a -> Bool
anyPositive (PositiveVector v) = V.any (>0) v
fromVector :: Fractional a => V.Vector a -> PositiveVector a
fromVector v = let tot = V.sum v
in PositiveVector $ fmap (/tot) v
prop_getFirst :: PositiveVector Double -> Bool
prop_getFirst (PositiveVector v) = let i = sampleV v 0.0
in i == Just 1
prop_getLast :: PositiveVector Double -> Bool
prop_getLast (PositiveVector v) = let i = sampleV v 1.1
in isNothing i
prop_normalisation :: Positive FPFloat -> Positive (Large Int) -> Bool
prop_normalisation norm seed = mag randomVec ~== norm'
where norm' = getPositive norm
seed' = getLarge $ getPositive seed
randomVec = evalState (sampleIsoVec norm') $ mkStdGen seed'
prop_positiveExp :: Positive FPFloat -> Positive (Large Int) -> Bool
prop_positiveExp lambda seed = expRandom >= 0.0
where expRandom = evalState (sampleExp lambda') $ mkStdGen seed'
lambda' = getPositive lambda
seed' = getLarge $ getPositive seed
return []
runTests :: IO Bool
runTests = $quickCheckAll
| arekfu/funpart | test/Physics/FunPart/MCTest.hs | bsd-3-clause | 1,937 | 0 | 15 | 502 | 623 | 318 | 305 | 43 | 1 |
module Main where
import Control.Monad
import System.Environment
import YaLedger.Types
import YaLedger.Tree
import YaLedger.Pretty
import YaLedger.Parser
import YaLedger.Parser.CSV
main = do
[config, path] <- getArgs
coa <- readCoA "test.accounts"
records <- loadCSV config coa path
forM_ records $ \rec ->
putStrLn $ prettyPrint rec
| portnov/yaledger | CSV.hs | bsd-3-clause | 352 | 0 | 10 | 61 | 104 | 55 | 49 | 14 | 1 |
-- |
module Window (UI(..), initGL, terminate) where
import Prelude hiding (init)
import Control.Applicative
import Control.Monad (when)
import Data.IORef
import Data.Maybe (isNothing)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Time.Clock
import Graphics.UI.GLFW
import Linear
import System.Directory (getCurrentDirectory, setCurrentDirectory)
-- | Interface updates provided to the party responsible for
-- generating each frame.
data UI = UI { timeStep :: Double
-- ^ Time in seconds since last frame
, keysPressed :: Set Key
-- ^ All keys currently pressed
, buttonsPressed :: Set MouseButton
-- ^ All mouse buttons currently pressed
, mousePos :: V2 Double
-- ^ Current mouse position
, windowSize :: V2 Int
-- ^ Current window size
}
keyCallback :: IORef (Set Key) -> KeyCallback
keyCallback keys _w k _ KeyState'Pressed _mods = modifyIORef' keys (S.insert k)
keyCallback keys _w k _ KeyState'Released _mods = modifyIORef' keys (S.delete k)
keyCallback _ _ _ _ _ _ = return ()
mbCallback :: IORef (Set MouseButton) -> MouseButtonCallback
mbCallback mbs _w b MouseButtonState'Pressed _ = modifyIORef' mbs (S.insert b)
mbCallback mbs _w b MouseButtonState'Released _ = modifyIORef' mbs (S.delete b)
mpCallback :: IORef (V2 Double) -> CursorPosCallback
mpCallback mp _w x y = writeIORef mp (V2 x y)
wsCallback :: IORef (V2 Int) -> WindowSizeCallback
wsCallback ws _w w h = writeIORef ws (V2 w h)
initGL :: String -> Int -> Int -> IO (IO UI)
initGL windowTitle width height = do
currDir <- getCurrentDirectory
setErrorCallback $ Just simpleErrorCallback
r <- init
when (not r) (error "Error initializing GLFW!")
windowHint $ WindowHint'ClientAPI ClientAPI'OpenGL
windowHint $ WindowHint'OpenGLForwardCompat True
windowHint $ WindowHint'OpenGLProfile OpenGLProfile'Core
windowHint $ WindowHint'ContextVersionMajor 3
windowHint $ WindowHint'ContextVersionMinor 2
m@(~(Just w)) <- createWindow width height windowTitle Nothing Nothing
when (isNothing m) (error "Couldn't create window!")
makeContextCurrent m
kbState <- newIORef S.empty
mbState <- newIORef S.empty
mpState <- getCursorPos w >>= newIORef . uncurry V2
wsState <- getWindowSize w >>= newIORef . uncurry V2
lastTick <- getCurrentTime >>= newIORef
setKeyCallback w (Just $ keyCallback kbState)
setMouseButtonCallback w (Just $ mbCallback mbState)
setCursorPosCallback w (Just $ mpCallback mpState)
setWindowSizeCallback w (Just $ wsCallback wsState)
setCurrentDirectory currDir
return $ do swapBuffers w
pollEvents
t <- getCurrentTime
dt <- realToFrac . diffUTCTime t <$> readIORef lastTick
writeIORef lastTick t
UI dt <$> readIORef kbState
<*> readIORef mbState
<*> readIORef mpState
<*> readIORef wsState
where simpleErrorCallback e s = putStrLn $ unwords [show e, show s]
| rabipelais/record-gl | examples/2D/Window.hs | bsd-3-clause | 3,090 | 0 | 14 | 729 | 893 | 436 | 457 | 62 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Network.CGI
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/core/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (uses Control.Exception)
--
-- Haskell binding for CGI
--
-- Original Version by Erik Meijer <mailto:erik@cs.ruu.nl>
-- Further hacked on by Sven Panne <mailto:sven_panne@yahoo.com>
-- Further hacking by Andy Gill <mailto:andy@galconn.com>
--
-----------------------------------------------------------------------------
-- note: if using Windows, you might need to wrap 'withSocketsDo' round main.
module Network.CGI (
Html,
wrapper, -- :: ([(String,String)] -> IO Html) -> IO ()
pwrapper, -- :: PortID -> ([(String,String)] -> IO Html) -> IO ()
connectToCGIScript -- :: String -> PortID -> IO ()
) where
import Data.Char ( ord, chr, toUpper, isDigit, isAlphaNum, isHexDigit )
import System.Environment ( getEnv )
import Control.Monad(MonadPlus(..), guard)
import System.IO
import Text.Html
import Control.Exception as Exception
import Control.Concurrent
import Network
import Network.Socket as Socket
-- ---------------------------------------------------------------------------
-- Yet another combinator parser library
-- NOTE: This is all a little bit of a sledgehammer here for the simple task
-- at hand...
-- The parser monad
newtype Parser a = Parser (String -> [(a,String)])
instance Functor Parser where
-- map :: (a -> b) -> (Parser a -> Parser b)
fmap f (Parser p) = Parser (\inp -> [(f v, out) | (v, out) <- p inp])
instance Monad Parser where
-- return :: a -> Parser a
return v = Parser (\inp -> [(v,inp)])
-- >>= :: Parser a -> (a -> Parser b) -> Parser b
(Parser p) >>= f = Parser (\inp -> concat [papply (f v) out
| (v,out) <- p inp])
instance MonadPlus Parser where
-- zero :: Parser a
mzero = Parser (\_ -> [])
-- (++) :: Parser a -> Parser a -> Parser a
(Parser p) `mplus` (Parser q) = Parser (\inp -> (p inp ++ q inp))
-- Other primitive parser combinators
item :: Parser Char
item = Parser (\inp -> case inp of
[] -> []
(x:xs) -> [(x,xs)])
force :: Parser a -> Parser a
force (Parser p) = Parser (\inp -> let x = p inp in
(fst (head x), snd (head x)) : tail x)
first :: Parser a -> Parser a
first (Parser p) = Parser (\inp -> case p inp of
[] -> []
(x:_) -> [x])
papply :: Parser a -> String -> [(a,String)]
papply (Parser p) inp = p inp
-- Derived combinators
plusplusplus :: Parser a -> Parser a -> Parser a
p `plusplusplus` q = first (p `mplus` q)
sat :: (Char -> Bool) -> Parser Char
sat p = do {x <- item; guard (p x); return x}
many :: Parser a -> Parser [a]
many p = force (many1 p `plusplusplus` return [])
many1 :: Parser a -> Parser [a]
many1 p = do {x <- p; xs <- many p; return (x:xs)}
sepby :: Parser a -> Parser b -> Parser [a]
p `sepby` sep = (p `sepby1` sep) `plusplusplus` return []
sepby1 :: Parser a -> Parser b -> Parser [a]
p `sepby1` sep = do x <- p
xs <- many (do {sep; p})
return(x:xs)
char :: Char -> Parser Char
char x = sat (x==)
alphanum :: Parser Char
alphanum = sat isAlphaNum
string :: String -> Parser String
string [] = return ""
string (x:xs) = do char x
string xs
return (x:xs)
hexdigit :: Parser Char
hexdigit = sat isHexDigit
-- ---------------------------------------------------------------------------
-- Decoding application/x-www-form-urlencoded data
-- An URL encoded value consist of a sequence of
-- zero or more name "=" value pairs separated by "&"
-- Env ::= [Name "=" Value {"&" Name "=" Value}]
-- Names and values are URL-encoded,
-- according to the following table
--
-- character | encoding
-- ----------|---------
-- ' ' | '+'
-- '<' | "%XX"
-- c | "%"hexval(ord c)
urlDecode :: String -> [(String,String)]
urlDecode s = case papply env s of
[] -> []
((e,_):_) -> e
env :: Parser [(String,String)]
env = (do n <- urlEncoded
string "="
v <- urlEncoded
return (n,v)) `sepby` (string "&")
urlEncoded :: Parser String
urlEncoded
= many ( alphanum `mplus` extra `mplus` safe
`mplus` do{ char '+' ; return ' '}
`mplus` do{ char '%'
; d <- hexadecimal
; return $ chr (hex2int d)
}
)
extra :: Parser Char
extra = sat (`elem` "!*'(),")
safe :: Parser Char
safe = sat (`elem` "$-_.")
hexadecimal :: Parser HexString
hexadecimal = do d1 <- hexdigit
d2 <- hexdigit
return [d1,d2]
type HexString = String
hex2int :: HexString -> Int
hex2int ds = foldl (\n d -> n*16+d) 0 (map (toInt . toUpper) ds)
where toInt d | isDigit d = ord d - ord '0'
toInt d | isHexDigit d = (ord d - ord 'A') + 10
toInt d = error ("hex2int: illegal hex digit " ++ [d])
-- A function to do URL encoding and proving its correctness might be a
-- nice exercise for the book.
--
-- We don't usually need it for CGI scripts though. The browser does the
-- encoding and the CGI script does the decoding.
-- ---------------------------------------------------------------------------
-- Hide the CGI protocol from the programmer
wrapper :: ([(String,String)] -> IO Html) -> IO ()
wrapper f = do qs <- getQueryString
cgiVars <- getCgiVars
a <- f (cgiVars ++ urlDecode qs)
putStr "Content-type: text/html\n\n"
putStr (renderHtml a)
getCgiVars :: IO [(String,String)]
getCgiVars = do vals <- mapM myGetEnv cgiVarNames
return (zip cgiVarNames vals)
cgiVarNames :: [String]
cgiVarNames =
[ "DOCUMENT_ROOT"
, "AUTH_TYPE"
, "GATEWAY_INTERFACE"
, "SERVER_SOFTWARE"
, "SERVER_NAME"
, "REQUEST_METHOD"
, "SERVER_ADMIN"
, "SERVER_PORT"
, "QUERY_STRING"
, "CONTENT_LENGTH"
, "CONTENT_TYPE"
, "REMOTE_USER"
, "REMOTE_IDENT"
, "REMOTE_ADDR"
, "REMOTE_HOST"
, "TZ"
, "PATH"
, "PATH_INFO"
, "PATH_TRANSLATED"
, "SCRIPT_NAME"
, "SCRIPT_FILENAME"
, "HTTP_CONNECTION"
, "HTTP_ACCEPT_LANGUAGE"
, "HTTP_ACCEPT"
, "HTTP_HOST"
, "HTTP_UA_COLOR"
, "HTTP_UA_CPU"
, "HTTP_UA_OS"
, "HTTP_UA_PIXELS"
, "HTTP_USER_AGENT"
]
pwrapper :: PortID -> ([(String,String)] -> IO Html) -> IO ()
pwrapper pid f =
do { sock <- listenOn pid
; acceptConnections fn sock
}
where
fn h = do { qs <- hGetLine h
; a <- f (urlDecode qs)
; hPutStr h "Content-type: text/html\n\n"
; hPutStr h (renderHtml a)
}
acceptConnections fn sock = do
(h, SockAddrInet port haddr) <- accept' sock
forkIO (fn h `finally` (hClose h))
acceptConnections fn sock
accept' :: Socket -- Listening Socket
-> IO (Handle,SockAddr) -- StdIO Handle for read/write
accept' sock = do
(sock', addr) <- Socket.accept sock
handle <- socketToHandle sock' ReadWriteMode
return (handle,addr)
-- ---------------------------------------------------------------------------
-- Small boot function for creating dummy cgi scripts
-- Sample program:
--
-- main = connectToCGIScript "localhost" (PortNumber 3432)
--
connectToCGIScript :: String -> PortID -> IO ()
connectToCGIScript host portId
= do { str <- getQueryString
; h <- connectTo host portId
`Exception.catch`
(\ e -> abort "Can not connect to CGI damon." e)
; hPutStrLn h str
; (sendBack h `finally` (hClose h))
`Prelude.catch` (\e -> if isEOFError e
then return ()
else ioError e)
}
abort :: String -> Exception -> IO a
abort msg e =
do { putStrLn ("Content-type: text/html\n\n" ++
"<html><body>" ++ msg ++ "</body></html>")
; throw e
}
sendBack h = do { s <- hGetLine h
; putStrLn s
; sendBack h
}
getQueryString :: IO String
getQueryString = do
method <- myGetEnv "REQUEST_METHOD"
case method of
"POST" -> do len <- myGetEnv "CONTENT_LENGTH"
inp <- getContents
return (take (read len) inp)
_ -> myGetEnv "QUERY_STRING"
myGetEnv :: String -> IO String
myGetEnv v = Prelude.catch (getEnv v) (const (return ""))
| OS2World/DEV-UTIL-HUGS | libraries/Network/CGI.hs | bsd-3-clause | 8,746 | 20 | 16 | 2,468 | 2,491 | 1,324 | 1,167 | 180 | 3 |
{-# LANGUAGE PolyKinds, DataKinds, TemplateHaskell, TypeFamilies,
GADTs, TypeOperators, RankNTypes, FlexibleContexts, UndecidableInstances,
FlexibleInstances, ScopedTypeVariables, MultiParamTypeClasses,
OverlappingInstances #-}
module Main where
import Test.Framework (defaultMain, testGroup, defaultMainWithArgs)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Data.Singletons
import Oxymoron
main = defaultMain []
testVertexShader0 :: Sing ('VertexShader '[] '[] '[])
testVertexShader0 = sing
testFragmentShader0 :: Sing ('FragmentShader '[] '[])
testFragmentShader0 = sing
testProgram0 = Program testVertexShader0 testFragmentShader0
testVertexShader1 :: Sing ('VertexShader '[] '[] '[ 'Varying Color VFloat, 'Varying Position VInt] )
testVertexShader1 = sing
testFragmentShader1 :: Sing ('FragmentShader '[ 'Varying Position VInt, 'Varying Color VFloat ] '[])
testFragmentShader1 = sing
testProgram1 = Program testVertexShader1 testFragmentShader1
testVertexShader2 :: Sing ('VertexShader '[] '[] '[ 'Varying Color VFloat ])
testVertexShader2 = sing
testFragmentShader2 :: Sing ('FragmentShader '[ 'Varying Position VInt, 'Varying Color VInt] '[])
testFragmentShader2 = sing
--testProgram2 = Program testVertexShader1 testFragmentShader2
singletons [d| data TestValue = Test1
| Test2
| Test3
deriving(Eq, Show)
data TestKey = Key1
| Key2
| Key3
deriving(Eq, Show)
|]
testMesh0 :: Sing (ExMesh ('[] :: [Attribute]) ':
('[] :: [*]))
testMesh0 = undefined
-- Sing (''Mesh ('IndexArray 'IUnsignedByte 'TRIANGLES)
-- ['Attribute "" 'VTByte 'C2])
testMesh1 :: Sing (ExMesh
('Attribute ('Symbol ('[] :: [AChar] ))
'VTByte 'C2 ': ('[] :: [Attribute])) ': ('[] :: [*]))
testMesh1 = undefined
testMaterial0 :: Material ('[] :: [Attribute]) ('[] :: [Uniform]) ('[] :: [Varying])
testMaterial0 = undefined
testRenderable0 = Renderable testMesh0 testMaterial0
--testRenderable1 = Renderable testMesh1 testMaterial0
test :: ((a :==: b) ~ True) => Sing (a :: AssocArray k v) -> Sing (b :: AssocArray k v) -> Int
test = undefined
testA :: Sing ('AssocArray '[ '( 'Key1, 'Test3 ),
'( 'Key2, 'Test2 )])
testA = undefined
testB :: Sing ('AssocArray '[ '( 'Key2, 'Test2),
'( 'Key1, 'Test3)])
testB = undefined
testC :: Sing ('AssocArray '[ '( 'Key2, 'Test3) ])
testC = undefined
-- Order is different but it still compiles
test1 = test testA testB
testKey :: Sing 'Key1
testKey = sing
testValue :: Sing ('Just 'Test1)
testValue = sing
findTester :: (Lookup key array ~ Just value)
=> Sing (key :: k)
-> Sing (array :: AssocArray k v)
-> Sing (value :: v)
findTester = undefined
findTest = findTester testKey testA
| jfischoff/oxymoron | tests/Main.hs | bsd-3-clause | 3,160 | 8 | 17 | 864 | 921 | 494 | 427 | 64 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Malgo.Syntax where
import Control.Lens (makeLenses, makePrisms, view, (^.), _2)
import Data.Foldable (foldl1)
import Data.Graph (flattenSCC, stronglyConnComp)
import qualified Data.HashSet as HashSet
import Koriel.Id
import Koriel.Pretty
import Malgo.Prelude
import Malgo.Syntax.Extension
import Malgo.TypeRep hiding (Type, TyApp, TyVar, TyCon, TyArr, TyTuple, TyRecord, freevars)
-- | Unboxed and boxed literal
data Literal x = Int32 Int32 | Int64 Int64 | Float Float | Double Double | Char Char | String Text
deriving stock (Show, Eq, Ord)
instance Pretty (Literal x) where
pPrint (Int32 i) = pPrint (toInteger i)
pPrint (Int64 i) = pPrint (toInteger i) <> "L"
pPrint (Float f) = pPrint f <> "F"
pPrint (Double d) = pPrint d
pPrint (Char c) = quotes (pPrint c)
pPrint (String s) = doubleQuotes (pPrint s)
instance HasType (Literal x) where
typeOf Int32 {} = TyPrim Int32T
typeOf Int64 {} = TyPrim Int64T
typeOf Float {} = TyPrim FloatT
typeOf Double {} = TyPrim DoubleT
typeOf Char {} = TyPrim CharT
typeOf String {} = TyPrim StringT
types f v = f (typeOf v) $> v
toUnboxed :: Literal Boxed -> Literal Unboxed
toUnboxed = coerce
----------
-- Type --
----------
data Type x
= TyApp (XTyApp x) (Type x) [Type x]
| TyVar (XTyVar x) (XId x)
| TyCon (XTyCon x) (XId x)
| TyArr (XTyArr x) (Type x) (Type x)
| TyTuple (XTyTuple x) [Type x]
| TyRecord (XTyRecord x) [(XId x, Type x)]
| TyBlock (XTyBlock x) (Type x)
| TyDArr (XTyDArr x) (Type x) (Type x)
deriving stock instance (ForallTypeX Eq x, Eq (XId x)) => Eq (Type x)
deriving stock instance (ForallTypeX Show x, Show (XId x)) => Show (Type x)
instance (Pretty (XId x)) => Pretty (Type x) where
pPrintPrec l d (TyApp _ t ts) =
maybeParens (d > 11) $ pPrint t <+> sep (map (pPrintPrec l 12) ts)
pPrintPrec _ _ (TyVar _ i) = pPrint i
pPrintPrec _ _ (TyCon _ i) = pPrint i
pPrintPrec l d (TyArr _ t1 t2) =
maybeParens (d > 10) $ pPrintPrec l 11 t1 <+> "->" <+> pPrintPrec l 10 t2
pPrintPrec _ _ (TyTuple _ ts) = parens $ sep $ punctuate "," $ map pPrint ts
pPrintPrec l _ (TyRecord _ kvs) = braces $ sep $ punctuate "," $ map (\(k, v) -> pPrintPrec l 0 k <> ":" <+> pPrintPrec l 0 v) kvs
pPrintPrec _ _ (TyBlock _ t) = braces $ pPrint t
pPrintPrec l d (TyDArr _ t1 t2) =
maybeParens (d > 10) $ pPrintPrec l 11 t1 <+> "=>" <+> pPrintPrec l 10 t2
getTyVars :: (Eq (XId x), Hashable (XId x)) => Type x -> HashSet (XId x)
getTyVars (TyApp _ t ts) = getTyVars t <> mconcat (map getTyVars ts)
getTyVars (TyVar _ v) = one v
getTyVars TyCon {} = mempty
getTyVars (TyArr _ t1 t2) = getTyVars t1 <> getTyVars t2
getTyVars (TyTuple _ ts) = mconcat $ map getTyVars ts
getTyVars (TyRecord _ kvs) = mconcat $ map (getTyVars . snd) kvs
getTyVars (TyBlock _ t) = getTyVars t
getTyVars (TyDArr _ t1 t2) = getTyVars t1 <> getTyVars t2
----------------
-- Expression --
----------------
data Exp x
= Var (XVar x) (WithPrefix (XId x))
| Unboxed (XUnboxed x) (Literal Unboxed)
| Boxed (XBoxed x) (Literal Boxed)
| Apply (XApply x) (Exp x) (Exp x)
| OpApp (XOpApp x) (XId x) (Exp x) (Exp x)
| Fn (XFn x) (NonEmpty (Clause x))
| Tuple (XTuple x) [Exp x]
| Record (XRecord x) [(WithPrefix (XId x), Exp x)]
| List (XList x) [Exp x]
| RecordAccess (XRecordAccess x) (WithPrefix (XId x))
| Ann (XAnn x) (Exp x) (Type x)
| Seq (XSeq x) (NonEmpty (Stmt x))
| Parens (XParens x) (Exp x)
deriving stock instance (ForallExpX Eq x, ForallClauseX Eq x, ForallPatX Eq x, ForallStmtX Eq x, ForallTypeX Eq x, Eq (XId x)) => Eq (Exp x)
deriving stock instance (ForallExpX Show x, ForallClauseX Show x, ForallPatX Show x, ForallStmtX Show x, ForallTypeX Show x, Show (XId x)) => Show (Exp x)
instance (Pretty (XId x)) => Pretty (Exp x) where
pPrintPrec _ _ (Var _ i) = pPrint i
pPrintPrec _ _ (Unboxed _ lit) = pPrint lit <> "#"
pPrintPrec _ _ (Boxed _ lit) = pPrint lit
pPrintPrec l d (Apply _ e1 e2) =
maybeParens (d > 10) $ sep [pPrintPrec l 10 e1, pPrintPrec l 11 e2]
pPrintPrec l d (OpApp _ o e1 e2) =
maybeParens (d > 10) $ sep [pPrintPrec l 11 e1, pPrintPrec l 10 o <+> pPrintPrec l 11 e2]
pPrintPrec l _ (Fn _ cs) =
braces $
space
<> foldl1
(\a b -> sep [a, nest (-2) $ "|" <+> b])
(toList $ fmap (pPrintPrec l 0) cs)
pPrintPrec l _ (Tuple _ xs) = parens $ sep $ punctuate "," $ map (pPrintPrec l 0) xs
pPrintPrec l _ (Record _ kvs) = braces $ sep $ punctuate "," $ map (\(k, v) -> pPrintPrec l 0 k <> ":" <+> pPrintPrec l 0 v) kvs
pPrintPrec l _ (List _ xs) = brackets $ sep $ punctuate "," $ map (pPrintPrec l 0) xs
pPrintPrec l _ (RecordAccess _ x) = "#" <> pPrintPrec l 0 x
pPrintPrec _ _ (Ann _ e t) = parens $ pPrint e <+> ":" <+> pPrint t
pPrintPrec _ _ (Seq _ ss) = parens $ sep $ punctuate ";" $ toList $ fmap pPrint ss
pPrintPrec _ _ (Parens _ x) = parens $ pPrint x
instance
(ForallExpX HasType x, ForallClauseX HasType x, ForallPatX HasType x) =>
HasType (Exp x)
where
typeOf (Var x _) = typeOf x
typeOf (Unboxed x _) = typeOf x
typeOf (Boxed x _) = typeOf x
typeOf (Apply x _ _) = typeOf x
typeOf (OpApp x _ _ _) = typeOf x
typeOf (Fn x _) = typeOf x
typeOf (Tuple x _) = typeOf x
typeOf (Record x _) = typeOf x
typeOf (List x _) = typeOf x
typeOf (RecordAccess x _) = typeOf x
typeOf (Ann x _ _) = typeOf x
typeOf (Seq x _) = typeOf x
typeOf (Parens x _) = typeOf x
types f = \case
Var x v -> Var <$> types f x <*> pure v
Unboxed x u -> Unboxed <$> types f x <*> types f u
Boxed x b -> Boxed <$> types f x <*> types f b
Apply x e1 e2 -> Apply <$> types f x <*> types f e1 <*> types f e2
OpApp x op e1 e2 -> OpApp <$> types f x <*> pure op <*> types f e1 <*> types f e2
Fn x cs -> Fn <$> types f x <*> traverse (types f) cs
Tuple x es -> Tuple <$> types f x <*> traverse (types f) es
Record x kvs -> Record <$> types f x <*> traverse (\(k, v) -> (k,) <$> types f v) kvs
List x es -> List <$> types f x <*> traverse (types f) es
RecordAccess x l -> RecordAccess <$> types f x <*> pure l
Ann x e t -> Ann <$> types f x <*> types f e <*> pure t
Seq x ss -> Seq <$> types f x <*> traverse (types f) ss
Parens x e -> Parens <$> types f x <*> types f e
freevars :: (Eq (XId x), Hashable (XId x)) => Exp x -> HashSet (XId x)
freevars (Var _ (WithPrefix v)) = one (v ^. value)
freevars (Unboxed _ _) = mempty
freevars (Boxed _ _) = mempty
freevars (Apply _ e1 e2) = freevars e1 <> freevars e2
freevars (OpApp _ op e1 e2) = HashSet.insert op $ freevars e1 <> freevars e2
freevars (Fn _ cs) = foldMap freevarsClause cs
freevars (Tuple _ es) = mconcat $ map freevars es
freevars (Record _ kvs) = mconcat $ map (freevars . snd) kvs
freevars (List _ es) = mconcat $ map freevars es
freevars (RecordAccess _ _) = mempty
freevars (Ann _ e _) = freevars e
freevars (Seq _ ss) = freevarsStmts ss
freevars (Parens _ e) = freevars e
----------
-- Stmt --
----------
data Stmt x
= Let (XLet x) (XId x) (Exp x)
| With (XWith x) (Maybe (XId x)) (Exp x)
| NoBind (XNoBind x) (Exp x)
deriving stock instance (ForallClauseX Eq x, ForallPatX Eq x, ForallExpX Eq x, ForallStmtX Eq x, ForallTypeX Eq x, Eq (XId x)) => Eq (Stmt x)
deriving stock instance (ForallClauseX Show x, ForallPatX Show x, ForallExpX Show x, ForallStmtX Show x, ForallTypeX Show x, Show (XId x)) => Show (Stmt x)
instance Pretty (XId x) => Pretty (Stmt x) where
pPrint (Let _ v e) = "let" <+> pPrint v <+> "=" <+> pPrint e
pPrint (With _ Nothing e) = "with" <+> pPrint e
pPrint (With _ (Just v) e) = "with" <+> pPrint v <+> "=" <+> pPrint e
pPrint (NoBind _ e) = pPrint e
instance
(ForallExpX HasType x, ForallClauseX HasType x, ForallPatX HasType x) =>
HasType (Stmt x)
where
typeOf (Let _ _ e) = typeOf e
typeOf (With _ _ e) = typeOf e
typeOf (NoBind _ e) = typeOf e
types f = \case
Let x v e -> Let x v <$> types f e
With x v e -> With x v <$> types f e
NoBind x e -> NoBind x <$> types f e
freevarsStmts :: (Eq (XId x), Hashable (XId x)) => NonEmpty (Stmt x) -> HashSet (XId x)
freevarsStmts (Let _ x e :| ss) = freevars e <> HashSet.delete x (freevarsStmts' ss)
freevarsStmts (With _ Nothing e :| ss) = freevars e <> freevarsStmts' ss
freevarsStmts (With _ (Just x) e :| ss) = freevars e <> HashSet.delete x (freevarsStmts' ss)
freevarsStmts (NoBind _ e :| ss) = freevars e <> freevarsStmts' ss
freevarsStmts' :: (Hashable (XId x), Eq (XId x)) => [Stmt x] -> HashSet (XId x)
freevarsStmts' [] = mempty
freevarsStmts' (s : ss) = freevarsStmts (s :| ss)
------------
-- Clause --
------------
data Clause x = Clause (XClause x) [Pat x] (Exp x)
deriving stock instance (ForallClauseX Eq x, ForallExpX Eq x, ForallPatX Eq x, ForallStmtX Eq x, ForallTypeX Eq x, Eq (XId x)) => Eq (Clause x)
deriving stock instance (ForallClauseX Show x, ForallExpX Show x, ForallPatX Show x, ForallStmtX Show x, ForallTypeX Show x, Show (XId x)) => Show (Clause x)
instance (ForallClauseX Eq x, ForallExpX Eq x, ForallPatX Eq x, Ord (XId x), ForallPatX Ord x, ForallStmtX Ord x, ForallTypeX Ord x) => Ord (Clause x) where
(Clause _ ps1 _) `compare` (Clause _ ps2 _) = ps1 `compare` ps2
instance (Pretty (XId x)) => Pretty (Clause x) where
pPrintPrec _ _ (Clause _ [] e) = pPrint e
pPrintPrec l _ (Clause _ ps e) = sep [sep (map (pPrintPrec l 11) ps) <+> "->", pPrint e]
instance
(ForallClauseX HasType x, ForallPatX HasType x, ForallExpX HasType x) =>
HasType (Clause x)
where
typeOf (Clause x _ _) = typeOf x
types f (Clause x ps e) = Clause <$> types f x <*> traverse (types f) ps <*> types f e
freevarsClause :: (Eq (XId x), Hashable (XId x)) => Clause x -> HashSet (XId x)
freevarsClause (Clause _ pats e) = HashSet.difference (freevars e) (mconcat (map bindVars pats))
-------------
-- Pattern --
-------------
data Pat x
= VarP (XVarP x) (XId x)
| ConP (XConP x) (XId x) [Pat x]
| TupleP (XTupleP x) [Pat x]
| RecordP (XRecordP x) [(WithPrefix (XId x), Pat x)]
| ListP (XListP x) [Pat x]
| UnboxedP (XUnboxedP x) (Literal Unboxed)
| BoxedP (XBoxedP x) (Literal Boxed)
deriving stock instance (ForallPatX Eq x, Eq (XId x)) => Eq (Pat x)
deriving stock instance (ForallPatX Show x, Show (XId x)) => Show (Pat x)
deriving stock instance (ForallPatX Ord x, Ord (XId x)) => Ord (Pat x)
instance (Pretty (XId x)) => Pretty (Pat x) where
pPrintPrec _ _ (VarP _ i) = pPrint i
pPrintPrec _ _ (ConP _ i []) = pPrint i
pPrintPrec l d (ConP _ i ps) =
maybeParens (d > 10) $ pPrint i <+> sep (map (pPrintPrec l 11) ps)
pPrintPrec _ _ (TupleP _ ps) =
parens $ sep $ punctuate "," $ map pPrint ps
pPrintPrec l _ (RecordP _ kps) =
braces $ sep $ punctuate "," $ map (\(k, p) -> pPrintPrec l 0 k <> ":" <+> pPrintPrec l 0 p) kps
pPrintPrec _ _ (ListP _ ps) =
brackets $ sep $ punctuate "," $ map pPrint ps
pPrintPrec _ _ (UnboxedP _ u) = pPrint u
pPrintPrec _ _ (BoxedP _ x) = pPrint x
instance
ForallPatX HasType x =>
HasType (Pat x)
where
typeOf (VarP x _) = typeOf x
typeOf (ConP x _ _) = typeOf x
typeOf (TupleP x _) = typeOf x
typeOf (RecordP x _) = typeOf x
typeOf (ListP x _) = typeOf x
typeOf (UnboxedP x _) = typeOf x
typeOf (BoxedP x _) = typeOf x
types f = \case
VarP x v -> VarP <$> types f x <*> pure v
ConP x c ps -> ConP <$> types f x <*> pure c <*> traverse (types f) ps
TupleP x ps -> TupleP <$> types f x <*> traverse (types f) ps
RecordP x kps -> RecordP <$> types f x <*> traverse (bitraverse pure (types f)) kps
ListP x ps -> ListP <$> types f x <*> traverse (types f) ps
UnboxedP x u -> UnboxedP <$> types f x <*> types f u
BoxedP x b -> BoxedP <$> types f x <*> types f b
bindVars :: (Eq (XId x), Hashable (XId x)) => Pat x -> HashSet (XId x)
bindVars (VarP _ x) = one x
bindVars (ConP _ _ ps) = mconcat $ map bindVars ps
bindVars (TupleP _ ps) = mconcat $ map bindVars ps
bindVars (RecordP _ kps) = mconcat $ map (bindVars . snd) kps
bindVars (ListP _ ps) = mconcat $ map bindVars ps
bindVars UnboxedP {} = mempty
bindVars BoxedP {} = mempty
makePrisms ''Pat
-----------------
-- Declaration --
-----------------
data Decl x
= ScDef (XScDef x) (XId x) (Exp x)
| ScSig (XScSig x) (XId x) (Type x)
| DataDef (XDataDef x) (XId x) [XId x] [(XId x, [Type x])]
| TypeSynonym (XTypeSynonym x) (XId x) [XId x] (Type x)
| Infix (XInfix x) Assoc Int (XId x)
| Foreign (XForeign x) (XId x) (Type x)
| Import (XImport x) ModuleName ImportList
| Class (XClass x) (XId x) [XId x] (Type x)
| Impl (XImpl x) (XId x) (Type x) (Exp x)
deriving stock instance (ForallDeclX Eq x, Eq (XId x)) => Eq (Decl x)
deriving stock instance (ForallDeclX Show x, Show (XId x)) => Show (Decl x)
instance (Pretty (XId x)) => Pretty (Decl x) where
pPrint (ScDef _ f e) = sep [pPrint f <+> "=", nest 2 $ pPrint e]
pPrint (ScSig _ f t) = pPrint f <+> ":" <+> pPrint t
pPrint (DataDef _ d xs cs) =
sep
[ "data" <+> pPrint d <+> sep (map pPrint xs) <+> "=",
nest 2 $ foldl1 (\a b -> sep [a, "|" <+> b]) $ map pprConDef cs
]
where
pprConDef (con, ts) = pPrint con <+> sep (map (pPrintPrec prettyNormal 12) ts)
pPrint (TypeSynonym _ t xs t') =
sep
[ "type" <+> pPrint t <+> sep (map pPrint xs) <+> "=",
pPrint t'
]
pPrint (Infix _ a o x) = "infix" <> pPrint a <+> pPrint o <+> pPrint x
pPrint (Foreign _ x t) = "foreign import" <+> pPrint x <+> ":" <+> pPrint t
pPrint (Import _ name All) = "module" <+> braces ".." <+> "=" <+> "import" <+> pPrint name
pPrint (Import _ name (Selected xs)) = "module" <+> braces (sep $ punctuate "," $ map pPrint xs) <+> "=" <+> "import" <+> pPrint name
pPrint (Import _ name (As name')) = "module" <+> pPrint name' <+> "=" <+> "import" <+> pPrint name
pPrint (Class _ name params synType) = "class" <+> pPrint name <+> sep (map pPrint params) <+> "=" <+> pPrint synType
pPrint (Impl _ name typ expr) = "impl" <+> pPrint name <+> ":" <+> pPrint typ <+> "=" <+> pPrint expr
makePrisms ''Decl
------------
-- Module --
------------
data Module x = Module {_moduleName :: ModuleName, _moduleDefinition :: XModule x}
deriving stock instance (ForallDeclX Eq x, Eq (XId x), Eq (XModule x)) => Eq (Module x)
deriving stock instance (ForallDeclX Show x, Show (XId x), Show (XModule x)) => Show (Module x)
instance (Pretty (XId x), Pretty (XModule x)) => Pretty (Module x) where
pPrint (Module name defs) =
"module" <+> pPrint name <+> "=" $+$ braces (pPrint defs)
newtype ParsedDefinitions = ParsedDefinitions [Decl (Malgo 'Parse)]
deriving stock (Eq, Show)
instance Pretty ParsedDefinitions where
pPrint (ParsedDefinitions ds) = sep $ map (\x -> pPrint x <> ";") ds
-- モジュールの循環参照を防ぐため、このモジュールでtype instanceを定義する
type instance XModule (Malgo 'Parse) = ParsedDefinitions
type instance XModule (Malgo 'Rename) = BindGroup (Malgo 'Rename)
type instance XModule (Malgo 'Infer) = BindGroup (Malgo 'Infer)
type instance XModule (Malgo 'Refine) = BindGroup (Malgo 'Refine)
----------------
-- Bind group --
----------------
data BindGroup x = BindGroup
{ -- | 相互再帰的なグループに分割
_scDefs :: [[ScDef x]],
_scSigs :: [ScSig x],
_dataDefs :: [DataDef x],
_typeSynonyms :: [TypeSynonym x],
_foreigns :: [Foreign x],
_imports :: [Import x],
_classes :: [Class x],
_impls :: [Impl x]
}
type ScDef x = (XScDef x, XId x, Exp x)
type ScSig x = (XScSig x, XId x, Type x)
type DataDef x = (XDataDef x, XId x, [XId x], [(XId x, [Type x])])
type TypeSynonym x = (XTypeSynonym x, XId x, [XId x], Type x)
type Foreign x = (XForeign x, XId x, Type x)
type Import x = (XImport x, ModuleName, ImportList)
type Class x = (XClass x, XId x, [XId x], Type x)
type Impl x = (XImpl x, XId x, Type x, Exp x)
makeLenses ''BindGroup
deriving stock instance (ForallDeclX Eq x, Eq (XId x)) => Eq (BindGroup x)
deriving stock instance (ForallDeclX Show x, Show (XId x)) => Show (BindGroup x)
instance (Pretty (XId x)) => Pretty (BindGroup x) where
pPrint BindGroup {..} =
sep $
punctuate ";" $
map prettyDataDef _dataDefs
<> map prettyForeign _foreigns
<> map prettyClass _classes
<> map prettyImpl _impls
<> map prettyScSig _scSigs
<> concatMap (map prettyScDef) _scDefs
where
prettyDataDef (_, d, xs, cs) =
sep
[ "data" <+> pPrint d <+> sep (map pPrint xs) <+> "=",
nest 2 $ foldl1 (\a b -> sep [a, "|" <+> b]) $ map pprConDef cs
]
pprConDef (con, ts) = pPrint con <+> sep (map (pPrintPrec prettyNormal 12) ts)
prettyForeign (_, x, t) = "foreign import" <+> pPrint x <+> ":" <+> pPrint t
prettyScSig (_, f, t) = pPrint f <+> ":" <+> pPrint t
prettyScDef (_, f, e) =
sep [pPrint f <+> "=", pPrint e]
prettyClass (_, name, params, synType) =
"class" <+> pPrint name <+> sep (map pPrint params) <+> "=" <+> pPrint synType
prettyImpl (_, name, synType, expr) =
"impl" <+> pPrint name <+> ":" <+> pPrint synType <+> "=" <+> pPrint expr
makeBindGroup :: (XId x ~ Id a, Eq a) => [Decl x] -> BindGroup x
makeBindGroup ds =
BindGroup
{ _scDefs = splitScDef (makeSCC $ mapMaybe scDef ds) (mapMaybe scDef ds),
_scSigs = mapMaybe scSig ds,
_dataDefs = mapMaybe dataDef ds,
_typeSynonyms = mapMaybe typeSynonym ds,
_foreigns = mapMaybe foreignDef ds,
_imports = mapMaybe importDef ds,
_classes = mapMaybe classDef ds,
_impls = mapMaybe implDef ds
}
where
scDef (ScDef x f e) = Just (x, f, e)
scDef _ = Nothing
scSig (ScSig x f t) = Just (x, f, t)
scSig _ = Nothing
dataDef (DataDef x t ps cons) = Just (x, t, ps, cons)
dataDef _ = Nothing
typeSynonym (TypeSynonym x t ps t') = Just (x, t, ps, t')
typeSynonym _ = Nothing
foreignDef (Foreign x n t) = Just (x, n, t)
foreignDef _ = Nothing
importDef (Import x m ns) = Just (x, m, ns)
importDef _ = Nothing
classDef (Class x n ps ms) = Just (x, n, ps, ms)
classDef _ = Nothing
implDef (Impl x n t ms) = Just (x, n, t, ms)
implDef _ = Nothing
splitScDef sccs ds = map (mapMaybe (\n -> find (\d -> n == d ^. _2) ds)) sccs
adjacents :: (Eq a1, XId x ~ Id a1) => (a, XId x, Exp x) -> (XId x, Int, [Int])
adjacents (_, f, e) =
(f, f ^. idUniq, map (view idUniq) $ toList $ HashSet.delete f (freevars e))
makeSCC :: (Eq a1, XId x ~ Id a1) => [(a, XId x, Exp x)] -> [[XId x]]
makeSCC ds = map flattenSCC $ stronglyConnComp adjacents'
where
vertices = map (view _2 . adjacents) ds
adjacents' = map ((\(l, v, vs) -> (l, v, filter (`elem` vertices) vs)) . adjacents) ds
| takoeight0821/malgo | src/Malgo/Syntax.hs | bsd-3-clause | 18,821 | 0 | 17 | 4,444 | 9,422 | 4,737 | 4,685 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Haddock
-- Copyright : Isaac Jones 2003-2005
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module deals with the @haddock@ and @hscolour@ commands. Sadly this is
-- a rather complicated module. It deals with two versions of haddock (0.x and
-- 2.x). It has to do pre-processing for haddock 0.x which involves
-- \'unlit\'ing and using @-DHADDOCK@ for any source code that uses @cpp@. It
-- uses information about installed packages (from @ghc-pkg@) to find the
-- locations of documentation for dependent packages, so it can create links.
--
-- The @hscolour@ support allows generating html versions of the original
-- source, with coloured syntax highlighting.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Haddock (
haddock, hscolour
) where
-- local
import Distribution.Package
( PackageIdentifier, Package(..) )
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription as PD
(PackageDescription(..), BuildInfo(..), hcOptions,
Library(..), hasLibs, withLib,
Executable(..), withExe)
import Distribution.Simple.Compiler
( Compiler(..), CompilerFlavor(..), compilerVersion
, extensionsToFlags )
import Distribution.Simple.Program
( ConfiguredProgram(..), requireProgram
, rawSystemProgram, rawSystemProgramStdoutConf, rawSystemProgramStdout
, hscolourProgram, haddockProgram, ghcProgram )
import Distribution.Simple.PreProcess (ppCpp', ppUnlit, preprocessSources,
PPSuffixHandler, runSimplePreProcessor)
import Distribution.Simple.Setup
import Distribution.Simple.Build (initialBuildSteps)
import Distribution.Simple.InstallDirs (InstallDirs(..), PathTemplate,
PathTemplateVariable(..),
toPathTemplate, fromPathTemplate,
substPathTemplate,
initialPathTemplateEnv)
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) )
import Distribution.Simple.BuildPaths ( haddockPref, haddockName,
hscolourPref, autogenModulesDir,
cppHeaderName )
import Distribution.Simple.PackageIndex (dependencyClosure, allPackages)
import qualified Distribution.Simple.PackageIndex as PackageIndex
( lookupPackageId )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
( InstalledPackageInfo_(..) )
import Distribution.Simple.Utils
( die, warn, notice, intercalate, setupMessage
, createDirectoryIfMissingVerbose, withTempFile, copyFileVerbose
, findFileWithExtension, findFile )
import Distribution.Text
( display, simpleParse )
import Distribution.Verbosity
import Language.Haskell.Extension
-- Base
import System.Directory(removeFile, doesFileExist,
removeDirectoryRecursive)
import Control.Monad ( when, unless )
import Data.Maybe ( isJust, fromJust, listToMaybe )
import Data.Char (isSpace)
import Data.List (nub)
import System.FilePath((</>), (<.>), splitFileName, splitExtension,
replaceExtension, normalise)
import System.IO (hClose, hPutStrLn)
import Distribution.Version
-- --------------------------------------------------------------------------
-- Haddock support
haddock :: PackageDescription -> LocalBuildInfo -> [PPSuffixHandler] -> HaddockFlags -> IO ()
haddock pkg_descr _ _ haddockFlags
| not (hasLibs pkg_descr)
&& not (fromFlag $ haddockExecutables haddockFlags) =
warn (fromFlag $ haddockVerbosity haddockFlags) $
"No documentation was generated as this package does not contain "
++ "a library. Perhaps you want to use the --executables flag."
haddock pkg_descr lbi suffixes flags = do
let distPref = fromFlag (haddockDistPref flags)
doExes = fromFlag (haddockExecutables flags)
hsColour = fromFlag (haddockHscolour flags)
when hsColour $ hscolour pkg_descr lbi suffixes defaultHscolourFlags {
hscolourCSS = haddockHscolourCss flags,
hscolourExecutables = haddockExecutables flags,
hscolourVerbosity = haddockVerbosity flags
}
(confHaddock, _) <- requireProgram verbosity haddockProgram
(orLaterVersion (Version [0,6] [])) (withPrograms lbi)
let tmpDir = buildDir lbi </> "tmp"
createDirectoryIfMissingVerbose verbosity True tmpDir
createDirectoryIfMissingVerbose verbosity True $
haddockPref distPref pkg_descr
initialBuildSteps distPref pkg_descr lbi verbosity suffixes
setupMessage verbosity "Running Haddock for" (packageId pkg_descr)
let replaceLitExts = map ( (tmpDir </>) . (`replaceExtension` "hs") )
let showPkg = display (packageId pkg_descr)
let hoogle = fromFlag (haddockHoogle flags)
outputFlag | hoogle = "--hoogle"
| otherwise = "--html"
let Just version = programVersion confHaddock
let have_src_hyperlink_flags = version >= Version [0,8] []
isVersion2 = version >= Version [2,0] []
when (hoogle && version > Version [2] []
&& version < Version [2,2] []) $
die $ "haddock 2.0 and 2.1 do not support the --hoogle flag."
let mockFlags
| isVersion2 = []
| otherwise = ["-D__HADDOCK__"]
let mockAll bi = mapM_ (mockPP mockFlags bi tmpDir)
let comp = compiler lbi
let cssFileFlag = case flagToMaybe $ haddockCss flags of
Nothing -> []
Just cssFile -> ["--css=" ++ cssFile]
let verboseFlags = if verbosity >= deafening then ["--verbose"] else []
when (hsColour && not have_src_hyperlink_flags) $
die "haddock --hyperlink-source requires Haddock version 0.8 or later"
let linkToHscolour = if hsColour
then ["--source-module=src/%{MODULE/./-}.html"
,"--source-entity=src/%{MODULE/./-}.html#%{NAME}"]
else []
let htmlTemplate = fmap toPathTemplate $
flagToMaybe (haddockHtmlLocation flags)
packageFlags <- do
(packageFlags, warnings) <- haddockPackageFlags lbi htmlTemplate
maybe (return ()) (warn verbosity) warnings
return packageFlags
when isVersion2 $ do
strHadGhcVers <- rawSystemProgramStdout verbosity confHaddock ["--ghc-version"]
let mHadGhcVers :: Maybe Version
mHadGhcVers = simpleParse strHadGhcVers
when (mHadGhcVers == Nothing) $ die "Could not get GHC version from Haddock"
when (fromJust mHadGhcVers /= compilerVersion comp) $
die "Haddock's internal GHC version must match the configured GHC version"
ghcLibDir0 <- rawSystemProgramStdoutConf verbosity ghcProgram (withPrograms lbi) ["--print-libdir"]
let ghcLibDir = reverse $ dropWhile isSpace $ reverse ghcLibDir0
let packageName = if isVersion2
then ["--optghc=-package-name", "--optghc=" ++ showPkg]
else ["--package=" ++ showPkg]
let haddock2options bi preprocessDir = if isVersion2
then ("-B" ++ ghcLibDir) : map ("--optghc=" ++) (ghcSimpleOptions lbi bi preprocessDir)
else []
withLib pkg_descr () $ \lib -> do
let bi = libBuildInfo lib
modules = PD.exposedModules lib ++ otherModules bi
inFiles <- getLibSourceFiles lbi lib
unless isVersion2 $ mockAll bi inFiles
let template = showPkg ++ "-haddock-prolog.txt"
prolog | null (PD.description pkg_descr) = synopsis pkg_descr
| otherwise = PD.description pkg_descr
subtitle | null (synopsis pkg_descr) = ""
| otherwise = ": " ++ synopsis pkg_descr
titleComment | fromFlag (haddockInternal flags) = " (internal documentation)"
| otherwise = ""
withTempFile distPref template $ \prologFileName prologFileHandle -> do
hPutStrLn prologFileHandle prolog
hClose prologFileHandle
let targets
| isVersion2 = map display modules
| otherwise = replaceLitExts inFiles
let haddockFile = haddockPref distPref pkg_descr
</> haddockName pkg_descr
-- FIX: replace w/ rawSystemProgramConf?
let hideArgs | fromFlag (haddockInternal flags) = []
| otherwise = [ "--hide=" ++ display m
| m <- otherModules bi ]
let exportsFlags | fromFlag (haddockInternal flags) = ["--ignore-all-exports"]
| otherwise = []
rawSystemProgram verbosity confHaddock
([ outputFlag
, "--odir=" ++ haddockPref distPref pkg_descr
, "--title=" ++ showPkg ++ subtitle ++ titleComment
, "--dump-interface=" ++ haddockFile
, "--prologue=" ++ prologFileName ]
++ packageName
++ cssFileFlag
++ linkToHscolour
++ packageFlags
++ verboseFlags
++ hideArgs
++ exportsFlags
++ haddock2options bi (buildDir lbi)
++ targets
)
notice verbosity $ "Documentation created: "
++ (haddockPref distPref pkg_descr </> "index.html")
withExe pkg_descr $ \exe -> when doExes $ do
let bi = buildInfo exe
exeTargetDir = haddockPref distPref pkg_descr </> exeName exe
createDirectoryIfMissingVerbose verbosity True exeTargetDir
inFiles@(srcMainPath:_) <- getExeSourceFiles lbi exe
mockAll bi inFiles
let template = showPkg ++ "-haddock-prolog.txt"
prolog | null (PD.description pkg_descr) = synopsis pkg_descr
| otherwise = PD.description pkg_descr
titleComment | fromFlag (haddockInternal flags) = " (internal documentation)"
| otherwise = ""
withTempFile distPref template $ \prologFileName prologFileHandle -> do
hPutStrLn prologFileHandle prolog
hClose prologFileHandle
let targets
| isVersion2 = srcMainPath : map display (otherModules bi)
| otherwise = replaceLitExts inFiles
let preprocessDir = buildDir lbi </> exeName exe </> exeName exe ++ "-tmp"
let exportsFlags | fromFlag (haddockInternal flags) = ["--ignore-all-exports"]
| otherwise = []
rawSystemProgram verbosity confHaddock
([ outputFlag
, "--odir=" ++ exeTargetDir
, "--title=" ++ exeName exe ++ titleComment
, "--prologue=" ++ prologFileName ]
++ linkToHscolour
++ packageFlags
++ verboseFlags
++ exportsFlags
++ haddock2options bi preprocessDir
++ targets
)
notice verbosity $ "Documentation created: "
++ (exeTargetDir </> "index.html")
removeDirectoryRecursive tmpDir
where
verbosity = fromFlag (haddockVerbosity flags)
mockPP inputArgs bi pref file
= do let (filePref, fileName) = splitFileName file
let targetDir = pref </> filePref
let targetFile = targetDir </> fileName
let (targetFileNoext, targetFileExt) = splitExtension targetFile
let cppOutput = targetFileNoext <.> "hspp"
let hsFile = targetFileNoext <.> "hs"
createDirectoryIfMissingVerbose verbosity True targetDir
-- Run unlit first, then CPP
if (targetFileExt == ".lhs")
then runSimplePreProcessor ppUnlit file hsFile verbosity
else copyFileVerbose verbosity file hsFile
when (needsCpp bi) $ do
runSimplePreProcessor (ppCpp' inputArgs bi lbi)
hsFile cppOutput verbosity
removeFile hsFile
copyFileVerbose verbosity cppOutput hsFile
removeFile cppOutput
needsCpp :: BuildInfo -> Bool
needsCpp bi = CPP `elem` extensions bi
haddockPackageFlags :: LocalBuildInfo
-> Maybe PathTemplate
-> IO ([String], Maybe String)
haddockPackageFlags lbi htmlTemplate = do
let allPkgs = installedPkgs lbi
directDeps = packageDeps lbi
transitiveDeps <- case dependencyClosure allPkgs directDeps of
Left x -> return x
Right _ -> die "Can't find transitive deps for haddock"
interfaces <- sequence
[ case interfaceAndHtmlPath pkgid of
Nothing -> return (pkgid, Nothing)
Just (interface, html) -> do
exists <- doesFileExist interface
if exists
then return (pkgid, Just (interface, html))
else return (pkgid, Nothing)
| pkgid <- map InstalledPackageInfo.package $ allPackages transitiveDeps ]
let missing = [ pkgid | (pkgid, Nothing) <- interfaces ]
warning = "The documentation for the following packages are not "
++ "installed. No links will be generated to these packages: "
++ intercalate ", " (map display missing)
flags = [ "--read-interface="
++ (if null html then "" else html ++ ",") ++ interface
| (_, Just (interface, html)) <- interfaces ]
return (flags, if null missing then Nothing else Just warning)
where
interfaceAndHtmlPath :: PackageIdentifier -> Maybe (FilePath, FilePath)
interfaceAndHtmlPath pkgId = do
pkg <- PackageIndex.lookupPackageId (installedPkgs lbi) pkgId
interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg)
html <- case htmlTemplate of
Nothing -> listToMaybe (InstalledPackageInfo.haddockHTMLs pkg)
Just htmlPathTemplate -> Just (expandTemplateVars htmlPathTemplate)
return (interface, html)
where expandTemplateVars = fromPathTemplate . substPathTemplate env
env = (PrefixVar, prefix (installDirTemplates lbi))
: initialPathTemplateEnv pkgId (compilerId (compiler lbi))
ghcSimpleOptions :: LocalBuildInfo -> BuildInfo -> FilePath -> [String]
ghcSimpleOptions lbi bi mockDir
= ["-hide-all-packages"]
++ (concat [ ["-package", display pkg] | pkg <- packageDeps lbi ])
++ ["-i"]
++ hcOptions GHC bi
++ ["-i" ++ l | l <- nub (hsSourceDirs bi)]
++ ["-i" ++ autogenModulesDir lbi]
++ ["-i" ++ mockDir]
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ ["-optP" ++ opt | opt <- cppOptions bi]
++ [ "-optP-include", "-optP"++ (autogenModulesDir lbi </> cppHeaderName) ]
++ ["-odir", mockDir]
++ ["-hidir", mockDir]
++ extensionsToFlags c (extensions bi)
where c = compiler lbi
-- --------------------------------------------------------------------------
-- hscolour support
hscolour :: PackageDescription -> LocalBuildInfo -> [PPSuffixHandler] -> HscolourFlags -> IO ()
hscolour pkg_descr lbi suffixes flags = do
let distPref = fromFlag $ hscolourDistPref flags
(hscolourProg, _) <- requireProgram verbosity hscolourProgram
(orLaterVersion (Version [1,8] [])) (withPrograms lbi)
createDirectoryIfMissingVerbose verbosity True $
hscolourPref distPref pkg_descr
preprocessSources pkg_descr lbi False verbosity suffixes
setupMessage verbosity "Running hscolour for" (packageId pkg_descr)
let moduleNameToHtmlFilePath mn =
intercalate "-" (ModuleName.components mn) <.> "html"
withLib pkg_descr () $ \lib -> when (isJust $ library pkg_descr) $ do
let bi = libBuildInfo lib
modules = PD.exposedModules lib ++ otherModules bi
outputDir = hscolourPref distPref pkg_descr </> "src"
createDirectoryIfMissingVerbose verbosity True outputDir
copyCSS hscolourProg outputDir
inFiles <- getLibSourceFiles lbi lib
flip mapM_ (zip modules inFiles) $ \(mo, inFile) ->
let outFile = outputDir </> moduleNameToHtmlFilePath mo
in rawSystemProgram verbosity hscolourProg
["-css", "-anchor", "-o" ++ outFile, inFile]
withExe pkg_descr $ \exe -> when doExes $ do
let bi = buildInfo exe
modules = ModuleName.main : otherModules bi
outputDir = hscolourPref distPref pkg_descr </> exeName exe </> "src"
createDirectoryIfMissingVerbose verbosity True outputDir
copyCSS hscolourProg outputDir
inFiles <- getExeSourceFiles lbi exe
flip mapM_ (zip modules inFiles) $ \(mo, inFile) ->
let outFile = outputDir </> moduleNameToHtmlFilePath mo
in rawSystemProgram verbosity hscolourProg
["-css", "-anchor", "-o" ++ outFile, inFile]
where copyCSS hscolourProg dir = case stylesheet of
Nothing | programVersion hscolourProg >= Just (Version [1,9] []) ->
rawSystemProgram verbosity hscolourProg
["-print-css", "-o" ++ dir </> "hscolour.css"]
| otherwise -> return ()
Just s -> copyFileVerbose verbosity s (dir </> "hscolour.css")
doExes = fromFlag (hscolourExecutables flags)
stylesheet = flagToMaybe (hscolourCSS flags)
verbosity = fromFlag (hscolourVerbosity flags)
getLibSourceFiles :: LocalBuildInfo -> Library -> IO [FilePath]
getLibSourceFiles lbi lib = sequence
[ findFileWithExtension ["hs", "lhs"] (autogenModulesDir lbi: preprocessDir : hsSourceDirs bi)
(ModuleName.toFilePath module_) >>= maybe (notFound module_) (return . normalise)
| module_ <- modules ]
where
bi = libBuildInfo lib
modules = PD.exposedModules lib ++ otherModules bi
preprocessDir = buildDir lbi
notFound module_ = die $ "can't find source for module " ++ display module_
getExeSourceFiles :: LocalBuildInfo -> Executable -> IO [FilePath]
getExeSourceFiles lbi exe = do
srcMainPath <- findFile (hsSourceDirs bi) (modulePath exe)
moduleFiles <- sequence
[ findFileWithExtension ["hs", "lhs"] (autogenModulesDir lbi : preprocessDir : hsSourceDirs bi)
(ModuleName.toFilePath module_) >>= maybe (notFound module_) (return . normalise)
| module_ <- modules ]
return (srcMainPath : moduleFiles)
where
bi = buildInfo exe
modules = otherModules bi
preprocessDir = buildDir lbi </> exeName exe </> exeName exe ++ "-tmp"
notFound module_ = die $ "can't find source for module " ++ display module_
| dcreager/cabal | Distribution/Simple/Haddock.hs | bsd-3-clause | 20,680 | 184 | 27 | 5,870 | 4,396 | 2,274 | 2,122 | 334 | 7 |
{-|
Module : AERN2.MP.Accuracy
Description : Rough accuracy of an enclosure
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
A type for measuring the accuracy of an enclosing set,
roughly corresponding to the maximum absolute error in some distance metric
approximately measured in bits.
-}
module AERN2.MP.Accuracy
(Accuracy(NoInformation, Exact), bits, fromAccuracy,
HasAccuracy(..),
ac2prec,
iterateUntilAccurate,
convergentList2seqByAccuracy,
seqByPrecision2seqByAccuracy,
setPrecisionAtLeastAccuracy,
ShowWithAccuracy(..),
HasApproximate(..))
where
import MixedTypesNumPrelude
import qualified Prelude as P
import Control.CollectErrors
import Data.Complex
-- import Test.Hspec
import Test.QuickCheck ( Arbitrary(arbitrary), frequency )
import AERN2.Norm
import AERN2.Kleenean
import AERN2.MP.Precision
{- example -}
_example1 :: Accuracy
_example1 = 1 + 2*(bits 100)
{-| A non-negative Double value to serve as an error bound. Arithmetic is rounded towards +infinity. -}
data Accuracy = NoInformation | Bits { fromAccuracy :: Integer } | Exact
deriving (P.Eq, P.Ord)
instance Arbitrary Accuracy where
arbitrary =
frequency
[(int 1, pure Exact),
(int 1, pure NoInformation),
(int 8, Bits <$> arbitrary)]
instance Enum Accuracy where
fromEnum NoInformation = minBound
fromEnum (Bits i) = int i
fromEnum Exact = maxBound
toEnum i = Bits (integer i)
instance Bounded Accuracy where
minBound = NoInformation
maxBound = Exact
instance ConvertibleExactly Integer Accuracy where
safeConvertExactly = Right . Bits
instance ConvertibleExactly Int Accuracy where
safeConvertExactly = Right . Bits . integer
instance ConvertibleExactly Precision Accuracy where
safeConvertExactly = Right . Bits . integer
instance ConvertibleExactly NormLog Accuracy where
safeConvertExactly (NormBits b) = Right $ bits (-b)
safeConvertExactly NormZero = Right Exact
bits :: (ConvertibleExactly t Accuracy) => t -> Accuracy
bits = convertExactly
instance Show Accuracy where
show (NoInformation) = "NoInformation"
show (Bits a) = "bits " ++ show a
show (Exact) = "Exact"
instance HasEqAsymmetric Accuracy Accuracy
instance HasOrderAsymmetric Accuracy Accuracy
instance CanMinMaxAsymmetric Accuracy Accuracy
instance HasEqAsymmetric Accuracy Integer where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Integer Accuracy where
equalTo = convertFirst equalTo
instance HasEqAsymmetric Accuracy Int where
equalTo = convertSecond equalTo
instance HasEqAsymmetric Int Accuracy where
equalTo = convertFirst equalTo
instance HasOrderAsymmetric Accuracy Integer where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Integer Accuracy where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance HasOrderAsymmetric Accuracy Int where
lessThan = convertSecond lessThan
leq = convertSecond leq
instance HasOrderAsymmetric Int Accuracy where
lessThan = convertFirst lessThan
leq = convertFirst leq
instance CanMinMaxAsymmetric Accuracy Integer where
type MinMaxType Accuracy Integer = Accuracy
min = convertSecond min
max = convertSecond max
instance CanMinMaxAsymmetric Integer Accuracy where
type MinMaxType Integer Accuracy = Accuracy
min = convertFirst min
max = convertFirst max
instance CanMinMaxAsymmetric Accuracy Int where
type MinMaxType Accuracy Int = Accuracy
min = convertSecond min
max = convertSecond max
instance CanMinMaxAsymmetric Int Accuracy where
type MinMaxType Int Accuracy = Accuracy
min = convertFirst min
max = convertFirst max
instance CanNeg Accuracy where
negate NoInformation = Exact
negate Exact = NoInformation
negate (Bits a) = Bits (-a)
instance CanAddAsymmetric Accuracy Accuracy where
add NoInformation _ = NoInformation
add _ NoInformation = NoInformation
add (Bits a) (Bits b) = Bits $ a + b
add Exact _ = Exact
add _ Exact = Exact
instance CanSub Accuracy Accuracy
--instance CanMulAsymmetric Accuracy Accuracy where
-- mulA NoInformation _ = NoInformation
-- mulA _ NoInformation = NoInformation
-- mulA (Bits a) (Bits b) = Bits $ a * b
-- mulA Exact _ = Exact
-- mulA _ Exact = Exact
instance CanMulAsymmetric Accuracy Integer where
type MulType Accuracy Integer = Accuracy
mul NoInformation _ = NoInformation
mul (Bits a) i = Bits $ a * i
mul Exact _ = Exact
instance CanMulAsymmetric Integer Accuracy where
type MulType Integer Accuracy = Accuracy
mul i a = mul a i
instance CanAddAsymmetric Accuracy Integer where
type AddType Accuracy Integer = Accuracy
add NoInformation _ = NoInformation
add (Bits a) i = Bits $ a + i
add Exact _ = Exact
instance CanAddAsymmetric Integer Accuracy where
type AddType Integer Accuracy = Accuracy
add i a = add a i
instance CanSub Accuracy Integer where
type SubType Accuracy Integer = Accuracy
sub NoInformation _ = NoInformation
sub (Bits a) i = Bits $ a - i
sub Exact _ = Exact
class HasAccuracy a where
getAccuracy :: a -> Accuracy
{-| Return accuracy, except when the element is Exact, return its nominal Precision dressed as Accuracy.
This function is useful when we have a convergent sequence where all elements happen to be
actually equal to the limit and we need the property that the sequence elements keep improving.
-}
getFiniteAccuracy :: a -> Accuracy
default getFiniteAccuracy :: (HasPrecision a) => a -> Accuracy
getFiniteAccuracy b =
case getAccuracy b of
Exact -> bits $ getPrecision b
a -> a
instance (HasAccuracy a, CanBeErrors es) => HasAccuracy (CollectErrors es a) where
getAccuracy (CollectErrors ma es) =
case ma of
Just a | not (hasCertainError es) -> getAccuracy a
_ -> NoInformation
getFiniteAccuracy (CollectErrors ma es) =
case ma of
Just a | not (hasCertainError es) -> getFiniteAccuracy a
_ -> NoInformation
instance HasAccuracy Int where getAccuracy _ = Exact; getFiniteAccuracy _ = NoInformation
instance HasAccuracy Integer where getAccuracy _ = Exact; getFiniteAccuracy _ = NoInformation
instance HasAccuracy Rational where getAccuracy _ = Exact; getFiniteAccuracy _ = NoInformation
instance HasAccuracy Bool where getAccuracy _ = Exact; getFiniteAccuracy _ = NoInformation
instance HasAccuracy Kleenean where getAccuracy _ = Exact; getFiniteAccuracy _ = NoInformation
instance HasAccuracy t => HasAccuracy (Complex t) where
getAccuracy (a :+ i) =
(getAccuracy a) `min` (getAccuracy i)
getFiniteAccuracy (a :+ i) =
(getFiniteAccuracy a) `min` (getFiniteAccuracy i)
instance HasAccuracy t => HasAccuracy [t] where
getAccuracy xs = foldl min Exact $ map getAccuracy xs
getFiniteAccuracy xs = foldl min Exact $ map getFiniteAccuracy xs
instance HasAccuracy t => HasAccuracy (Maybe t) where
getAccuracy (Just x) = getAccuracy x
getAccuracy _ = NoInformation
getFiniteAccuracy (Just x) = getFiniteAccuracy x
getFiniteAccuracy _ = NoInformation
iterateUntilAccurate ::
(HasAccuracy t) =>
Accuracy ->
(Precision -> Maybe t) ->
[(Precision, Maybe t)]
iterateUntilAccurate ac =
iterateUntilOK (ac2prec ac) $ \maybeResult ->
case maybeResult of
Just result -> getAccuracy result >= ac
_ -> False
ac2prec :: Accuracy -> Precision
ac2prec ac =
case ac of
Bits b -> prec (max 2 $ b + 50)
_ -> prec 100
seqByPrecision2seqByAccuracy ::
(HasAccuracy t) =>
(Precision -> t) -> (Accuracy -> t)
seqByPrecision2seqByAccuracy seqByPrecision ac =
convergentList2seqByAccuracy list ac
where
list =
map seqByPrecision $ dropWhile (lowPrec ac) (standardPrecisions (ac2prec ac))
lowPrec Exact _ = False
lowPrec _ p = bits p < ac
convergentList2seqByAccuracy :: (HasAccuracy t) => [t] -> (Accuracy -> t)
convergentList2seqByAccuracy list ac = findAccurate list
where
findAccurate [] =
error "convergentList2seqByAccuracy: the sequence either converges too slowly or it does not converge"
findAccurate (b : rest)
| getAccuracy b >= ac = b
| otherwise = findAccurate rest
{-|
Change the precision so that
it is at least as high as the supplied accuracy
(assuming the accuracy is finite).
-}
setPrecisionAtLeastAccuracy :: (HasPrecision t, CanSetPrecision t) => Accuracy -> t -> t
setPrecisionAtLeastAccuracy acc b
| p_b < p_acc = setPrecision p_acc b
| otherwise = b
where
p_acc =
case acc of
Exact -> error $ "setPrecisionAtLeastAccuracy: cannot match Exact accuracy"
NoInformation -> p_b
_ -> prec $ max 2 (fromAccuracy acc)
p_b = getPrecision b
class ShowWithAccuracy t where
showWithAccuracy :: Accuracy -> t -> String
{-| An unsafe approximation of an enclosure or exact value,
useful mainly for showing something brief and readable to humans.
-}
class HasApproximate t where
type Approximate t
getApproximate :: Accuracy -> t -> (Approximate t)
| michalkonecny/aern2 | aern2-mp/src/AERN2/MP/Accuracy.hs | bsd-3-clause | 9,212 | 0 | 14 | 1,924 | 2,358 | 1,213 | 1,145 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
module LLVM.General.Quote.Base (
CodeGen,
CodeGenMonad(..),
ToDefintions(..),
quasiquote,
quasiquoteM,
TQuasiQuoter(..),
parse
) where
import Control.Applicative
import Control.Monad.Identity
import qualified Data.ByteString.Char8 as B
import Control.Monad.State.Strict
import Control.Monad.Writer.Strict
import Data.Word
import Data.Loc
import Data.Data (Data(..))
import Language.Haskell.Meta (parseExp)
import Language.Haskell.TH
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Syntax
import Data.IORef (atomicModifyIORef')
import Language.Haskell.TH.Quote (QuasiQuoter(..))
import qualified LLVM.General.Quote.Parser as P
import qualified LLVM.General.Quote.AST as A
import LLVM.General.Quote.SSA
import qualified LLVM.General.AST.IntegerPredicate as LI
import qualified LLVM.General.AST as L
import qualified LLVM.General.AST.Constant as L
(Constant(Int, Float, Null, Struct, Array, Vector,
Undef, BlockAddress, GlobalReference))
import qualified LLVM.General.AST.Float as L
import qualified LLVM.General.AST.InlineAssembly as L
import qualified LLVM.General.AST.DataLayout as L
import qualified LLVM.General.AST.Attribute as L
import qualified Data.Map as M
class (Applicative m, Monad m) => CodeGenMonad m where
newVariable :: m L.Name
exec :: m () -> m [L.BasicBlock]
type CodeGen = State (Int, M.Map L.Name [L.Operand])
instance CodeGenMonad CodeGen where
newVariable = state $ \(i,vs) -> (L.UnName (fromIntegral i), (i+1,vs))
exec = error "not defined: exec"
class ToBasicBlockList a where
toBasicBlockList :: CodeGenMonad m => m a -> m [L.BasicBlock]
instance ToBasicBlockList () where
toBasicBlockList = exec
instance ToBasicBlockList [L.BasicBlock] where
toBasicBlockList = id
class ToDefintion a where
toDefinition :: a -> L.Definition
instance ToDefintion L.Definition where
toDefinition = id
instance ToDefintion L.Global where
toDefinition = L.GlobalDefinition
class ToDefintions a where
toDefinitions :: a -> [L.Definition]
instance ToDefintion a => ToDefintions [a] where
toDefinitions = map toDefinition
class ToConstant a where
toConstant :: a -> L.Constant
instance ToConstant L.Constant where
toConstant = id
instance ToConstant Word8 where
toConstant n = L.Int 8 (toInteger n)
instance ToConstant Word16 where
toConstant n = L.Int 16 (toInteger n)
instance ToConstant Word32 where
toConstant n = L.Int 32 (toInteger n)
instance ToConstant Word64 where
toConstant n = L.Int 64 (toInteger n)
instance ToConstant Float where
toConstant n = L.Float (L.Single n)
instance ToConstant Double where
toConstant n = L.Float (L.Double n)
class ToName a where
toName :: a -> L.Name
instance ToName L.Name where
toName = id
instance ToName String where
toName = L.Name
instance ToName Word where
toName = L.UnName
class ToTargetTriple a where
toTargetTriple :: a -> Maybe String
instance ToTargetTriple String where
toTargetTriple = Just
instance ToTargetTriple (Maybe String) where
toTargetTriple = id
antiVarE :: String -> ExpQ
antiVarE s = [|$(either fail return $ parseExp s)|]
type Conversion a b = forall m.(CodeGenMonad m) => a -> TExpQ (m b)
type Conversion' m a b = (CodeGenMonad m) => a -> TExpQ (m b)
class QQExp a b where
qqExpM :: Conversion a b
qqExp :: a -> TExpQ b
qqExp x = [||fst $ runState $$(qqExpM x) ((0,M.empty) :: (Int,M.Map L.Name [L.Operand]))||]
instance (Lift a) => QQExp a a where
qqExpM x = [||pure x||]
instance QQExp [A.MetadataNodeID] [L.MetadataNodeID] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp A.InstructionMetadata L.InstructionMetadata where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [(A.Constant, A.Name)] [(L.Constant, L.Name)] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [A.Name] [L.Name] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [(A.Operand, [L.ParameterAttribute])]
[(L.Operand, [L.ParameterAttribute])] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [A.Operand] [L.Operand] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [(A.Operand, A.Name)] [(L.Operand, L.Name)] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [A.LandingPadClause] [L.LandingPadClause] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [Maybe A.Operand] [Maybe L.Operand] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [A.Constant] [L.Constant] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
instance QQExp [A.Type] [L.Type] where
qqExpM (x:xs) = [||(:) <$> $$(qqExpM x) <*> $$(qqExpM xs)||]
qqExpM [] = [||pure []||]
-- instance (QQExp a b) => QQExp (Maybe a) (Maybe b) where
-- qqExpM Nothing = [||pure Nothing||]
-- qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe A.Operand) (Maybe L.Operand) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe A.Name) (Maybe L.Name) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe A.Type) (Maybe L.Type) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe A.DataLayout) (Maybe L.DataLayout) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe A.Constant) (Maybe L.Constant) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance QQExp (Maybe (A.Type, A.Operand, A.Name))
(Maybe (L.Type, L.Operand, L.Name)) where
qqExpM Nothing = [||pure Nothing||]
qqExpM (Just x) = [||Just <$> $$(qqExpM x)||]
instance (QQExp a c, QQExp b d) => QQExp (Either a b) (Either c d) where
qqExpM (Left x) = [||Left <$> $$(qqExpM x)||]
qqExpM (Right x) = [||Right <$> $$(qqExpM x)||]
instance (QQExp a c, QQExp b d) => QQExp (a,b) (c,d) where
qqExpM (x,y) = [||(,) <$> $$(qqExpM x) <*> $$(qqExpM y)||]
instance (QQExp a d, QQExp b e, QQExp c f) => QQExp (a,b,c) (d,e,f) where
qqExpM (x,y,z) = [||(,,) <$> $$(qqExpM x) <*> $$(qqExpM y) <*> $$(qqExpM z)||]
instance QQExp A.Definition L.Definition where
qqExpM = qqDefinitionE
instance QQExp [A.Definition] [L.Definition] where
qqExpM = qqDefinitionListE
instance QQExp A.Module L.Module where
qqExpM = qqModuleE
instance QQExp A.Global L.Global where
qqExpM = qqGlobalE
instance QQExp [A.Parameter] [L.Parameter] where
qqExpM = qqParameterListE
instance QQExp A.Parameter L.Parameter where
qqExpM = qqParameterE
instance QQExp A.LandingPadClause L.LandingPadClause where
qqExpM = qqLandingPadClauseE
instance QQExp A.FastMathFlags L.FastMathFlags where
qqExpM = qqFastMathFlagsE
instance QQExp A.InlineAssembly L.InlineAssembly where
qqExpM = qqInlineAssemblyE
instance QQExp A.Instruction (Either L.Instruction L.Terminator) where
qqExpM = qqInstructionE
instance QQExp A.Instruction L.Instruction where
qqExpM x1 = [||do x1' <- $$(qqExpM x1)
case x1' :: Either L.Instruction L.Terminator of
Left x1'' -> return x1''
Right x1'' -> fail $ show x1'' ++ " is no Instruction"||]
instance QQExp [A.LabeledInstruction] [L.BasicBlock] where
qqExpM = qqLabeledInstructionListE
instance QQExp A.NamedInstruction [L.BasicBlock] where
qqExpM = qqNamedInstructionE
instance QQExp A.LabeledInstruction [L.BasicBlock] where
qqExpM = qqLabeledInstructionE
instance QQExp A.MetadataNodeID L.MetadataNodeID where
qqExpM = qqMetadataNodeIDE
instance QQExp A.MetadataNode L.MetadataNode where
qqExpM = qqMetadataNodeE
instance QQExp A.Operand L.Operand where
qqExpM = qqOperandE
instance QQExp A.Constant L.Constant where
qqExpM = qqConstantE
instance QQExp A.Name L.Name where
qqExpM = qqNameE
instance QQExp A.Type L.Type where
qqExpM = qqTypeE
instance QQExp A.DataLayout L.DataLayout where
qqExpM = qqDataLayoutE
instance QQExp A.TargetTriple (Maybe String) where
qqExpM = qqTargetTripleE
qqDefinitionListE :: Conversion [A.Definition] [L.Definition]
qqDefinitionListE [] = [||pure []||]
qqDefinitionListE (A.AntiDefinitionList v : defs) =
[||(++) <$> $$(unsafeTExpCoerce [|$(antiVarE v) >>= return . toDefinitions|])
<*> $$(qqExpM defs)||]
qqDefinitionListE (def : defs) =
[||(:) <$> $$(qqExpM def) <*> $$(qqExpM defs)||]
qqDefinitionE :: Conversion A.Definition L.Definition
qqDefinitionE (A.GlobalDefinition v) =
[||L.GlobalDefinition <$> $$(qqExpM v)||]
qqDefinitionE (A.TypeDefinition n v) =
[||L.TypeDefinition <$> $$(qqExpM n) <*> $$(qqExpM v)||]
qqDefinitionE (A.MetadataNodeDefinition i vs) =
[||L.MetadataNodeDefinition <$> $$(qqExpM i) <*> $$(qqExpM vs)||]
qqDefinitionE (A.NamedMetadataDefinition i vs) =
[||L.NamedMetadataDefinition <$> $$(qqExpM i) <*> $$(qqExpM vs)||]
qqDefinitionE (A.ModuleInlineAssembly s) =
[||L.ModuleInlineAssembly <$> $$(qqExpM s)||]
qqDefinitionE (A.AntiDefinition s) =
unsafeTExpCoerce $ [|$(antiVarE s) >>= return . toDefinition|]
qqDefinitionE a@(A.AntiDefinitionList _s) =
error $ "Internal Error: unexpected antiquote " ++ show a
qqModuleE :: Conversion A.Module L.Module
qqModuleE (A.Module n dl tt ds) =
[||L.Module <$> $$(qqExpM n) <*> $$(qqExpM dl) <*> $$(qqExpM tt) <*> $$(qqExpM ds)||]
qqGlobalE :: Conversion A.Global L.Global
qqGlobalE (A.GlobalVariable x1 x2 x3 x4 x5 x6 x7 x8 x9 xA xB) =
[||L.GlobalVariable <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5) <*> $$(qqExpM x6) <*> $$(qqExpM x7) <*> $$(qqExpM x8)
<*> $$(qqExpM x9) <*> $$(qqExpM xA) <*> $$(qqExpM xB)||]
qqGlobalE (A.GlobalAlias x1 x2 x3 x4 x5) =
[||L.GlobalAlias <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5)||]
qqGlobalE (A.Function x1 x2 x3 x4 x5 x6 x7 x8 x9 xA xB xC) =
[||L.Function <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5) <*> $$(qqExpM x6) <*> $$(qqExpM x7) <*> $$(qqExpM x8)
<*> $$(qqExpM x9) <*> $$(qqExpM xA) <*> $$(qqExpM xB) <*> toSSA `fmap` $$(qqExpM xC)||]
qqParameterListE :: Conversion [A.Parameter] [L.Parameter]
qqParameterListE [] = [||pure []||]
qqParameterListE (A.AntiParameterList v : defs) =
[||(++) <$> $$(unsafeTExpCoerce $ antiVarE v) <*> $$(qqExpM defs)||]
qqParameterListE (def : defs) =
[||(:) <$> $$(qqExpM def) <*> $$(qqExpM defs)||]
qqParameterE :: Conversion A.Parameter L.Parameter
qqParameterE (A.Parameter x1 x2 x3) =
[||L.Parameter <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3)||]
qqParameterE (A.AntiParameter s) =
unsafeTExpCoerce $ antiVarE s
qqParameterE a@(A.AntiParameterList _s) =
error $ "Internal Error: unexpected antiquote " ++ show a
qqLandingPadClauseE :: Conversion A.LandingPadClause L.LandingPadClause
qqLandingPadClauseE (A.Catch x1) =
[||L.Catch <$> $$(qqExpM x1)||]
qqLandingPadClauseE (A.Filter x1) =
[||L.Filter <$> $$(qqExpM x1)||]
qqFastMathFlagsE :: Conversion A.FastMathFlags L.FastMathFlags
qqFastMathFlagsE A.NoFastMathFlags =
[||pure L.NoFastMathFlags||]
qqFastMathFlagsE A.UnsafeAlgebra =
[||pure L.UnsafeAlgebra||]
qqFastMathFlagsE (A.FastMathFlags x1 x2 x3 x4) =
[||L.FastMathFlags <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)||]
qqInlineAssemblyE :: Conversion A.InlineAssembly L.InlineAssembly
qqInlineAssemblyE (A.InlineAssembly x1 x2 x3 x4 x5 x6) =
[||L.InlineAssembly <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5) <*> $$(qqExpM x6)||]
qqInstructionE :: Conversion A.Instruction (Either L.Instruction L.Terminator)
qqInstructionE (A.Add x1 x2 x3 x4 x5) =
[||Left <$> (L.Add <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5))||]
qqInstructionE (A.FAdd x1 x2 x3 x4) =
[||Left <$> (L.FAdd <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Sub x1 x2 x3 x4 x5) =
[||Left <$> (L.Sub <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5))||]
qqInstructionE (A.FSub x1 x2 x3 x4) =
[||Left <$> (L.FSub <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Mul x1 x2 x3 x4 x5) =
[||Left <$> (L.Mul <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5))||]
qqInstructionE (A.FMul x1 x2 x3 x4) =
[||Left <$> (L.FMul <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.UDiv x1 x2 x3 x4) =
[||Left <$> (L.UDiv <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.SDiv x1 x2 x3 x4) =
[||Left <$> (L.SDiv <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.FDiv x1 x2 x3 x4) =
[||Left <$> (L.FDiv <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.URem x1 x2 x3) =
[||Left <$> (L.URem <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.SRem x1 x2 x3) =
[||Left <$> (L.SRem <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.FRem x1 x2 x3 x4) =
[||Left <$> (L.FRem <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Shl x1 x2 x3 x4 x5) =
[||Left <$> (L.Shl <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5))||]
qqInstructionE (A.LShr x1 x2 x3 x4) =
[||Left <$> (L.LShr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.AShr x1 x2 x3 x4) =
[||Left <$> (L.AShr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.And x1 x2 x3) =
[||Left <$> (L.And <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.Or x1 x2 x3) =
[||Left <$> (L.Or <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.Xor x1 x2 x3) =
[||Left <$> (L.Xor <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.Alloca x1 x2 x3 x4) =
[||Left <$> (L.Alloca <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Load x1 x2 x3 x4 x5) =
[||Left <$> (L.Load <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5))||]
qqInstructionE (A.Store x1 x2 x3 x4 x5 x6) =
[||Left <$> (L.Store <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5)
<*> $$(qqExpM x6))||]
qqInstructionE (A.GetElementPtr x1 x2 x3 x4) =
[||Left <$> (L.GetElementPtr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Fence x1 x2) =
[||Left <$> (L.Fence <$> $$(qqExpM x1) <*> $$(qqExpM x2))||]
qqInstructionE (A.CmpXchg x1 x2 x3 x4 x5 x6) =
[||Left <$> (L.CmpXchg <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5)
<*> $$(qqExpM x6))||]
qqInstructionE (A.AtomicRMW x1 x2 x3 x4 x5 x6) =
[||Left <$> (L.AtomicRMW <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5) <*> $$(qqExpM x6))||]
qqInstructionE (A.Trunc x1 x2 x3) =
[||Left <$> (L.Trunc <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.ZExt x1 x2 x3) =
[||Left <$> (L.ZExt <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.SExt x1 x2 x3) =
[||Left <$> (L.SExt <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.FPToUI x1 x2 x3) =
[||Left <$> (L.FPToUI <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.FPToSI x1 x2 x3) =
[||Left <$> (L.FPToSI <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.UIToFP x1 x2 x3) =
[||Left <$> (L.UIToFP <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.SIToFP x1 x2 x3) =
[||Left <$> (L.SIToFP <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.FPTrunc x1 x2 x3) =
[||Left <$> (L.FPTrunc <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.FPExt x1 x2 x3) =
[||Left <$> (L.FPExt <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.PtrToInt x1 x2 x3) =
[||Left <$> (L.PtrToInt <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.IntToPtr x1 x2 x3) =
[||Left <$> (L.IntToPtr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.BitCast x1 x2 x3) =
[||Left <$> (L.BitCast <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.AddrSpaceCast x1 x2 x3) =
[||Left <$> (L.AddrSpaceCast <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.ICmp x1 x2 x3 x4) =
[||Left <$> (L.ICmp <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.FCmp x1 x2 x3 x4) =
[||Left <$> (L.FCmp <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Phi x1 x2 x3) =
[||Left <$> (L.Phi <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.Call x1 x2 x3 x4 x5 x6 x7) =
[||Left <$> (L.Call <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5)
<*> $$(qqExpM x6) <*> $$(qqExpM x7))||]
qqInstructionE (A.Select x1 x2 x3 x4) =
[||Left <$> (L.Select <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.VAArg x1 x2 x3) =
[||Left <$> (L.VAArg <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.ExtractElement x1 x2 x3) =
[||Left <$> (L.ExtractElement <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.InsertElement x1 x2 x3 x4) =
[||Left <$> (L.InsertElement <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.ShuffleVector x1 x2 x3 x4) =
[||Left <$> (L.ShuffleVector <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.ExtractValue x1 x2 x3) =
[||Left <$> (L.ExtractValue <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.InsertValue x1 x2 x3 x4) =
[||Left <$> (L.InsertValue <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.LandingPad x1 x2 x3 x4 x5) =
[||Left <$> (L.LandingPad <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5))||]
qqInstructionE (A.OperandInstruction x1) =
[||do x1' <- $$(qqExpM x1)
let true = L.ConstantOperand $ L.Int 1 1
return $ Left $ L.Select true x1' x1' []||]
qqInstructionE (A.AntiInstruction s) =
unsafeTExpCoerce $ antiVarE s
qqInstructionE (A.Ret x1 x2) =
[||Right <$> (L.Ret <$> $$(qqExpM x1) <*> $$(qqExpM x2))||]
qqInstructionE (A.CondBr x1 x2 x3 x4) =
[||Right <$> (L.CondBr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.Br x1 x2) =
[||Right <$> (L.Br <$> $$(qqExpM x1) <*> $$(qqExpM x2))||]
qqInstructionE (A.Switch x1 x2 x3 x4) =
[||Right <$> (L.Switch <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4))||]
qqInstructionE (A.IndirectBr x1 x2 x3) =
[||Right <$> (L.IndirectBr <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3))||]
qqInstructionE (A.Invoke x1 x2 x3 x4 x5 x6 x7 x8) =
[||Right <$> (L.Invoke <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4) <*> $$(qqExpM x5)
<*> $$(qqExpM x6) <*> $$(qqExpM x7) <*> $$(qqExpM x8))||]
qqInstructionE (A.Resume x1 x2) =
[||Right <$> (L.Resume <$> $$(qqExpM x1) <*> $$(qqExpM x2))||]
qqInstructionE (A.Unreachable x1) =
[||Right <$> (L.Unreachable <$> $$(qqExpM x1))||]
qqLabeledInstructionListE :: Conversion [A.LabeledInstruction] [L.BasicBlock]
qqLabeledInstructionListE [] =
[||pure []||]
qqLabeledInstructionListE (x:xs) =
[||let nextLabel :: L.Name
nextLabel = L.Name "nextblock"
jumpNext :: L.BasicBlock -> Bool
jumpNext (L.BasicBlock _ _ t) =
case t of
_ L.:= L.Br l2 _ | l2 == nextLabel -> True
L.Do (L.Br l2 _) | l2 == nextLabel -> True
_ -> False
replacePhiFroms :: [(L.Name,L.Name)] -> L.BasicBlock -> L.BasicBlock
replacePhiFroms labels (L.BasicBlock n is t) =
L.BasicBlock n (map (replacePhiFrom labels) is) t
replacePhiFrom :: [(L.Name,L.Name)] -> L.Named L.Instruction -> L.Named L.Instruction
replacePhiFrom names (n L.:= phi@L.Phi{}) =
n L.:= replacePhiFrom' names phi
replacePhiFrom names (L.Do phi@L.Phi{}) =
L.Do $ replacePhiFrom' names phi
replacePhiFrom _ named = named
replacePhiFrom' :: [(L.Name,L.Name)] -> L.Instruction -> L.Instruction
replacePhiFrom' names phi@L.Phi{} =
phi{ L.incomingValues =
[ (op,n') | (op,n) <- L.incomingValues phi,
let n' = maybe n id (lookup n names)] }
replacePhiFrom' _ _ =
error "this should never happen"
fuse :: L.BasicBlock -> L.BasicBlock -> Writer [(L.Name,L.Name)] L.BasicBlock
fuse (L.BasicBlock n1 i1 _t1) (L.BasicBlock n2 i2 t2) = do
tell [(n2,n1)]
return $ L.BasicBlock n1 (i1++i2) t2
fuseBlocks' :: [L.BasicBlock] -> Writer [(L.Name,L.Name)] [L.BasicBlock]
fuseBlocks' bbs@[] = return bbs
fuseBlocks' bbs@[_] = return bbs
fuseBlocks' (bb1:bbs@(bb2:bbs')) =
case jumpNext bb1 of
True -> do
fused <- fuse bb1 bb2
fuseBlocks' (fused:bbs')
False -> do
bbs_ <- fuseBlocks' bbs
return $ bb1 : bbs_
fuseBlocks :: [L.BasicBlock] -> [L.BasicBlock]
fuseBlocks bbs =
let (bbs',labels) = runWriter $ fuseBlocks' bbs
in map (replacePhiFroms labels) bbs'
in fuseBlocks <$> ((++) <$> $$(qqExpM x) <*> $$(qqExpM xs))||]
qqLabeledInstructionE :: forall m. Conversion' m A.LabeledInstruction [L.BasicBlock]
qqLabeledInstructionE (A.Labeled label instr) =
[||do label' <- $$(qqExpM label)
L.BasicBlock _ is t:bbs <- $$(qqExpM instr)
return $ L.BasicBlock label' is t:bbs||]
qqLabeledInstructionE (A.ForLoop label iterType iterName direction from to step body) =
[||do
label' <- $$(qqExpM label)
body' <- $$(qqExpM body :: TExpQ (m [L.BasicBlock]))
iterName' <- $$(qqExpM iterName :: TExpQ (m L.Name))
iterType' <- $$(qqExpM iterType :: TExpQ (m L.Type))
from' <- $$(qqExpM from :: TExpQ (m L.Operand))
to' <- $$(qqExpM to)
step' <- $$(qqExpM step :: TExpQ (m L.Operand))
let labelString = case label' of
L.Name s -> s
L.UnName n -> "num"++show n
cond = L.Name (labelString ++ ".cond")
labelHead = L.Name (labelString ++ ".head")
labelEnd = L.Name (labelString ++ ".end")
labelLast = L.Name (labelString ++ ".last")
iter = L.LocalReference iterType' iterName'
newIterInstr = case direction of
A.Up -> [ iterName' L.:= L.Add True True iter step' [] ]
A.Down -> [ iterName' L.:= L.Sub True True iter step' [] ]
preInstrs = case direction of
A.Up ->
[ cond L.:= L.ICmp LI.SLT iter to' [] ]
A.Down ->
[ cond L.:= L.ICmp LI.SGT iter to' [] ]
branchTo l = case body'' of
[] -> error "empty body of for-loop"
(L.BasicBlock bodyLabel _ _:_) -> L.Do (L.CondBr (L.LocalReference (L.IntegerType 1) cond) bodyLabel l [])
retTerm = L.Do (L.Br (L.Name "nextblock") [])
true = L.ConstantOperand $ L.Int 1 1
initIter = iterName' L.:= L.Select true from' from' []
(pre,post) =
([L.BasicBlock label' [initIter] (L.Do (L.Br labelHead [])), L.BasicBlock labelHead preInstrs (branchTo labelEnd)]
,[L.BasicBlock labelEnd [] retTerm])
body'' = body' ++ [L.BasicBlock labelLast newIterInstr (L.Do (L.Br labelHead []))]
return (pre ++ body'' ++ post)
||]
qqLabeledInstructionE (A.ITE label cond then_body else_body) =
[||do
label' <- $$(qqExpM label)
cond' <- $$(qqExpM cond)
then_body' <- $$(qqExpM then_body)
else_body' <- $$(qqExpM else_body)
let labelString = case label' of
L.Name n -> n
L.UnName n -> show n
thenLabel = L.Name (labelString ++ ".then")
thenLastLabel = L.Name (labelString ++ ".then.last")
elseLabel = L.Name (labelString ++ ".else")
elseLastLabel = L.Name (labelString ++ ".else.last")
endLabel = L.Name (labelString ++ ".end")
headLabel = L.Name (labelString ++ ".head")
brEnd l = [L.BasicBlock l [] (L.Do (L.Br endLabel []))]
pre = [L.BasicBlock label' [] (L.Do (L.Br headLabel []))
,L.BasicBlock headLabel [] (L.Do (L.CondBr cond' thenLabel elseLabel []))]
brNext l = [L.BasicBlock l [] (L.Do (L.Br (L.Name "nextblock") []))]
end = brNext endLabel
then_body'' = brNext thenLabel ++ then_body' ++ brEnd thenLastLabel
else_body'' = brNext elseLabel ++ else_body' ++ brEnd elseLastLabel
return (pre ++ then_body'' ++ else_body'' ++ end)
||]
qqLabeledInstructionE (A.While label cond body) =
[||do
label' <- $$(qqExpM label)
cond' <- $$(qqExpM cond)
body' <- $$(qqExpM body)
let labelString = case label' of
L.Name n -> n
L.UnName n -> show n
bodyLabel = L.Name (labelString ++ ".body")
bodyLastLabel = L.Name (labelString ++ ".body.last")
endLabel = L.Name (labelString ++ ".end")
headLabel = L.Name (labelString ++ ".head")
pre = [L.BasicBlock label' [] (L.Do (L.Br headLabel []))
,L.BasicBlock headLabel [] (L.Do (L.CondBr cond' bodyLabel endLabel []))]
brNext l = [L.BasicBlock l [] (L.Do (L.Br (L.Name "nextblock") []))]
end = brNext endLabel
brTop = [L.BasicBlock bodyLastLabel [] (L.Do (L.Br headLabel []))]
body'' = brNext bodyLabel ++ body' ++ brTop
return (pre ++ body'' ++ end)
||]
qqNamedInstructionE :: Conversion A.NamedInstruction [L.BasicBlock]
qqNamedInstructionE (x1 A.:= x2) =
[||do x1' <- $$(qqExpM x1)
x2' <- $$(qqExpM x2)
n <- newVariable
case x2' of
Left ins -> return [L.BasicBlock n [x1' L.:= ins] (L.Do $ L.Br (L.Name "nextblock") [])]
Right term -> return [L.BasicBlock n [] (x1' L.:= term)]||]
qqNamedInstructionE (A.Do x2) =
[||do x2' <- $$(qqExpM x2)
n <- newVariable
case x2' of
Left ins -> return [L.BasicBlock n [L.Do ins] (L.Do $ L.Br (L.Name "nextblock") [])]
Right term -> return [L.BasicBlock n [] (L.Do term)]||]
qqNamedInstructionE (A.AntiInstructionList s) =
unsafeTExpCoerce $ antiVarE s
qqNamedInstructionE (A.AntiBasicBlock v)
= [||(:[]) <$> $$(unsafeTExpCoerce $ antiVarE v)||]
qqNamedInstructionE (A.AntiBasicBlockList v)
= unsafeTExpCoerce $ [|toBasicBlockList $(antiVarE v)|]
qqMetadataNodeIDE :: Conversion A.MetadataNodeID L.MetadataNodeID
qqMetadataNodeIDE (A.MetadataNodeID x1) =
[||L.MetadataNodeID <$> $$(qqExpM x1)||]
qqMetadataNodeE :: Conversion A.MetadataNode L.MetadataNode
qqMetadataNodeE (A.MetadataNode x1) =
[||L.MetadataNode <$> $$(qqExpM x1)||]
qqMetadataNodeE (A.MetadataNodeReference x1) =
[||L.MetadataNodeReference <$> $$(qqExpM x1)||]
qqOperandE :: Conversion A.Operand L.Operand
qqOperandE (A.LocalReference x1 x2) =
[||L.LocalReference <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqOperandE (A.ConstantOperand x1) =
[||L.ConstantOperand <$> $$(qqExpM x1)||]
qqOperandE (A.MetadataStringOperand x1) =
[||L.MetadataStringOperand <$> $$(qqExpM x1)||]
qqOperandE (A.MetadataNodeOperand x1) =
[||L.MetadataNodeOperand <$> $$(qqExpM x1)||]
qqOperandE (A.AntiOperand s) =
[||$$(unsafeTExpCoerce $ antiVarE s)||]
qqConstantE :: Conversion A.Constant L.Constant
qqConstantE (A.Int x1 x2) =
[||L.Int <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqConstantE (A.IntAntiBs x1 x2) =
[||let typeBits (L.IntegerType bs) = return bs
typeBits t = fail $ "unexpected type: " ++ show t
in L.Int <$> ($$(unsafeTExpCoerce (antiVarE x1)) >>= typeBits) <*> $$(qqExpM x2)||]
qqConstantE (A.Float x1) =
[||L.Float <$> $$(qqExpM x1)||]
qqConstantE (A.Null x1) =
[||L.Null <$> $$(qqExpM x1)||]
qqConstantE (A.Struct x1 x2 x3) =
[||L.Struct <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3)||]
qqConstantE (A.Array x1 x2) =
[||L.Array <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqConstantE (A.Vector x1) =
[||L.Vector <$> $$(qqExpM x1)||]
qqConstantE (A.Undef x1) =
[||L.Undef <$> $$(qqExpM x1)||]
qqConstantE (A.BlockAddress x1 x2) =
[||L.BlockAddress <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqConstantE (A.GlobalReference x1 x2) =
[||L.GlobalReference <$> $$(qqExpM x1)<*> $$(qqExpM x2)||]
qqConstantE (A.AntiConstant s) =
unsafeTExpCoerce [|$(antiVarE s) >>= (return . toConstant)|]
qqNameE :: Conversion A.Name L.Name
qqNameE (A.Name x1) =
[||L.Name <$> $$(qqExpM x1)||]
qqNameE (A.UnName x1) =
[||L.UnName <$> $$(qqExpM x1)||]
qqNameE A.NeedsName = do
n <- runIO $ atomicModifyIORef' counter $ \n -> (n+1,n)
[||pure $ L.Name $ "n" ++ show (n :: Int)||]
qqNameE (A.AntiName s) =
unsafeTExpCoerce [|$(antiVarE s) >>= return . toName|]
qqTypeE :: Conversion A.Type L.Type
qqTypeE A.VoidType =
[||pure L.VoidType||]
qqTypeE (A.IntegerType x1) =
[||L.IntegerType <$> $$(qqExpM x1)||]
qqTypeE (A.PointerType x1 x2) =
[||L.PointerType <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqTypeE (A.FloatingPointType x1 x2) =
[||L.FloatingPointType <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqTypeE (A.FunctionType x1 x2 x3) =
[||L.FunctionType <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3)||]
qqTypeE (A.VectorType x1 x2) =
[||L.VectorType <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqTypeE (A.StructureType x1 x2) =
[||L.StructureType <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqTypeE (A.ArrayType x1 x2) =
[||L.ArrayType <$> $$(qqExpM x1) <*> $$(qqExpM x2)||]
qqTypeE (A.NamedTypeReference x1) =
[||L.NamedTypeReference <$> $$(qqExpM x1)||]
qqTypeE A.MetadataType =
[||pure L.MetadataType||]
qqTypeE (A.AntiType s) =
[||$$(unsafeTExpCoerce $ antiVarE s)||]
qqDataLayoutE :: Conversion A.DataLayout L.DataLayout
qqDataLayoutE (A.DataLayout x1 x2 x3 x4 x5) =
[||L.DataLayout <$> $$(qqExpM x1) <*> $$(qqExpM x2) <*> $$(qqExpM x3) <*> $$(qqExpM x4)
<*> $$(qqExpM x5)||]
qqDataLayoutE (A.AntiDataLayout s) =
unsafeTExpCoerce $ antiVarE s
qqTargetTripleE :: Conversion A.TargetTriple (Maybe String)
qqTargetTripleE A.NoTargetTriple =
[||pure Nothing||]
qqTargetTripleE (A.TargetTriple v) =
[||Just <$> $$(qqExpM v)||]
qqTargetTripleE (A.AntiTargetTriple v) =
unsafeTExpCoerce [|$(antiVarE v) >>= return . toTargetTriple|]
parse :: [A.Extensions]
-> P.P a
-> String
-> Q a
parse exts p s = do
loc <- location
case P.parse (A.Antiquotation : exts) p (B.pack s) (locToPos loc) of
Left err -> fail (show err)
Right x -> return x
where
locToPos :: Language.Haskell.TH.Loc -> Pos
locToPos loc = Pos (loc_filename loc)
((fst . loc_start) loc)
((snd . loc_start) loc)
0
newtype TQuasiQuoter a = TQuasiQuoter { unTQuasiQuoter :: QuasiQuoter }
quasiquote :: forall a b. (Data a, QQExp a b)
=> [A.Extensions]
-> P.P a
-> TQuasiQuoter b
quasiquote exts p = TQuasiQuoter $
QuasiQuoter { quoteExp = parse exts p >=> unTypeQ . (qqExp :: a -> TExpQ b)
, quotePat = fail "LLVM pattern quasiquoter undefined"
, quoteType = fail "LLVM type quasiquoter undefined"
, quoteDec = fail "LLVM declaration quasiquoter undefined"
}
quasiquoteM :: forall a b m. (Data a, QQExp a b, CodeGenMonad m)
=> [A.Extensions]
-> P.P a
-> TQuasiQuoter (m b)
quasiquoteM exts p = TQuasiQuoter $
QuasiQuoter { quoteExp = parse exts p >=> unTypeQ . (qqExpM :: Conversion' m a b)
, quotePat = fail "LLVM monadic pattern quasiquoter undefined"
, quoteType = fail "LLVM type quasiquoter undefined"
, quoteDec = fail "LLVM declaration quasiquoter undefined"
}
| tvh/llvm-general-quote | src/LLVM/General/Quote/Base.hs | bsd-3-clause | 33,688 | 22 | 14 | 7,028 | 1,738 | 970 | 768 | -1 | -1 |
{-# LANGUAGE CPP
, NoImplicitPrelude
, RankNTypes
, TypeFamilies
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, MultiParamTypeClasses #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Safe #-}
#endif
#if MIN_VERSION_transformers(0,4,0)
-- Hide warnings for the deprecated ErrorT transformer:
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
#endif
{- |
Module : Control.Monad.Trans.Control
Copyright : Bas van Dijk, Anders Kaseorg
License : BSD-style
Maintainer : Bas van Dijk <v.dijk.bas@gmail.com>
Stability : experimental
-}
module Control.Monad.Trans.Control
( -- * MonadTransControl
MonadTransControl(..), Run
-- ** Defaults for MonadTransControl
-- $MonadTransControlDefaults
, RunDefault, defaultLiftWith, defaultRestoreT
-- * MonadBaseControl
, MonadBaseControl (..), RunInBase
-- ** Defaults for MonadBaseControl
-- $MonadBaseControlDefaults
, ComposeSt, RunInBaseDefault, defaultLiftBaseWith, defaultRestoreM
-- * Utility functions
, control, embed, embed_, captureT, captureM
, liftBaseOp, liftBaseOp_
, liftBaseDiscard, liftBaseOpDiscard
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
-- from base:
import Data.Function ( (.), ($), const )
import Data.Monoid ( Monoid, mempty )
import Control.Monad ( Monad, (>>=), return, liftM )
import System.IO ( IO )
import Data.Maybe ( Maybe )
import Data.Either ( Either )
#if MIN_VERSION_base(4,4,0)
import Control.Monad.ST.Lazy.Safe ( ST )
import qualified Control.Monad.ST.Safe as Strict ( ST )
#endif
-- from stm:
import Control.Monad.STM ( STM )
-- from transformers:
import Control.Monad.Trans.Class ( MonadTrans )
import Control.Monad.Trans.Identity ( IdentityT(IdentityT), runIdentityT )
import Control.Monad.Trans.List ( ListT (ListT), runListT )
import Control.Monad.Trans.Maybe ( MaybeT (MaybeT), runMaybeT )
import Control.Monad.Trans.Error ( ErrorT (ErrorT), runErrorT, Error )
import Control.Monad.Trans.Reader ( ReaderT (ReaderT), runReaderT )
import Control.Monad.Trans.State ( StateT (StateT), runStateT )
import Control.Monad.Trans.Writer ( WriterT (WriterT), runWriterT )
import Control.Monad.Trans.RWS ( RWST (RWST), runRWST )
import Control.Monad.Trans.Except ( ExceptT (ExceptT), runExceptT )
import qualified Control.Monad.Trans.RWS.Strict as Strict ( RWST (RWST), runRWST )
import qualified Control.Monad.Trans.State.Strict as Strict ( StateT (StateT), runStateT )
import qualified Control.Monad.Trans.Writer.Strict as Strict ( WriterT(WriterT), runWriterT )
import Data.Functor.Identity ( Identity )
-- from transformers-base:
import Control.Monad.Base ( MonadBase )
#if MIN_VERSION_base(4,3,0)
import Control.Monad ( void )
#else
import Data.Functor (Functor, fmap)
void :: Functor f => f a -> f ()
void = fmap (const ())
#endif
import Prelude (id)
--------------------------------------------------------------------------------
-- MonadTransControl type class
--------------------------------------------------------------------------------
class MonadTrans t => MonadTransControl t where
-- | Monadic state of @t@.
type StT t a :: *
-- | @liftWith@ is similar to 'lift' in that it lifts a computation from
-- the argument monad to the constructed monad.
--
-- Instances should satisfy similar laws as the 'MonadTrans' laws:
--
-- @liftWith . const . return = return@
--
-- @liftWith (const (m >>= f)) = liftWith (const m) >>= liftWith . const . f@
--
-- The difference with 'lift' is that before lifting the @m@ computation
-- @liftWith@ captures the state of @t@. It then provides the @m@
-- computation with a 'Run' function that allows running @t n@ computations in
-- @n@ (for all @n@) on the captured state.
liftWith :: Monad m => (Run t -> m a) -> t m a
-- | Construct a @t@ computation from the monadic state of @t@ that is
-- returned from a 'Run' function.
--
-- Instances should satisfy:
--
-- @liftWith (\\run -> run t) >>= restoreT . return = t@
restoreT :: Monad m => m (StT t a) -> t m a
-- | A function that runs a transformed monad @t n@ on the monadic state that
-- was captured by 'liftWith'
--
-- A @Run t@ function yields a computation in @n@ that returns the monadic state
-- of @t@. This state can later be used to restore a @t@ computation using
-- 'restoreT'.
type Run t = forall n b. Monad n => t n b -> n (StT t b)
--------------------------------------------------------------------------------
-- Defaults for MonadTransControl
--------------------------------------------------------------------------------
-- $MonadTransControlDefaults
--
-- The following functions can be used to define a 'MonadTransControl' instance
-- for a monad transformer which simply wraps another monad transformer which
-- already has a @MonadTransControl@ instance. For example:
--
-- @
-- {-\# LANGUAGE GeneralizedNewtypeDeriving \#-}
--
-- newtype CounterT m a = CounterT {unCounterT :: StateT Int m a}
-- deriving (Monad, MonadTrans)
--
-- instance MonadTransControl CounterT where
-- type StT CounterT a = StT (StateT Int) a
-- liftWith = 'defaultLiftWith' CounterT unCounterT
-- restoreT = 'defaultRestoreT' CounterT
-- @
-- | A function like 'Run' that runs a monad transformer @t@ which wraps the
-- monad transformer @t'@. This is used in 'defaultLiftWith'.
type RunDefault t t' = forall n b. Monad n => t n b -> n (StT t' b)
-- | Default definition for the 'liftWith' method.
defaultLiftWith :: (Monad m, MonadTransControl n)
=> (forall b. n m b -> t m b) -- ^ Monad constructor
-> (forall o b. t o b -> n o b) -- ^ Monad deconstructor
-> (RunDefault t n -> m a)
-> t m a
defaultLiftWith t unT = \f -> t $ liftWith $ \run -> f $ run . unT
{-# INLINABLE defaultLiftWith #-}
-- | Default definition for the 'restoreT' method.
defaultRestoreT :: (Monad m, MonadTransControl n)
=> (n m a -> t m a) -- ^ Monad constructor
-> m (StT n a)
-> t m a
defaultRestoreT t = t . restoreT
{-# INLINABLE defaultRestoreT #-}
--------------------------------------------------------------------------------
-- MonadTransControl instances
--------------------------------------------------------------------------------
instance MonadTransControl IdentityT where
type StT IdentityT a = a
liftWith f = IdentityT $ f $ runIdentityT
restoreT = IdentityT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl MaybeT where
type StT MaybeT a = Maybe a
liftWith f = MaybeT $ liftM return $ f $ runMaybeT
restoreT = MaybeT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance Error e => MonadTransControl (ErrorT e) where
type StT (ErrorT e) a = Either e a
liftWith f = ErrorT $ liftM return $ f $ runErrorT
restoreT = ErrorT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl (ExceptT e) where
type StT (ExceptT e) a = Either e a
liftWith f = ExceptT $ liftM return $ f $ runExceptT
restoreT = ExceptT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl ListT where
type StT ListT a = [a]
liftWith f = ListT $ liftM return $ f $ runListT
restoreT = ListT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl (ReaderT r) where
type StT (ReaderT r) a = a
liftWith f = ReaderT $ \r -> f $ \t -> runReaderT t r
restoreT = ReaderT . const
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl (StateT s) where
type StT (StateT s) a = (a, s)
liftWith f = StateT $ \s ->
liftM (\x -> (x, s))
(f $ \t -> runStateT t s)
restoreT = StateT . const
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance MonadTransControl (Strict.StateT s) where
type StT (Strict.StateT s) a = (a, s)
liftWith f = Strict.StateT $ \s ->
liftM (\x -> (x, s))
(f $ \t -> Strict.runStateT t s)
restoreT = Strict.StateT . const
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance Monoid w => MonadTransControl (WriterT w) where
type StT (WriterT w) a = (a, w)
liftWith f = WriterT $ liftM (\x -> (x, mempty))
(f $ runWriterT)
restoreT = WriterT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance Monoid w => MonadTransControl (Strict.WriterT w) where
type StT (Strict.WriterT w) a = (a, w)
liftWith f = Strict.WriterT $ liftM (\x -> (x, mempty))
(f $ Strict.runWriterT)
restoreT = Strict.WriterT
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance Monoid w => MonadTransControl (RWST r w s) where
type StT (RWST r w s) a = (a, s, w)
liftWith f = RWST $ \r s -> liftM (\x -> (x, s, mempty))
(f $ \t -> runRWST t r s)
restoreT mSt = RWST $ \_ _ -> mSt
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
instance Monoid w => MonadTransControl (Strict.RWST r w s) where
type StT (Strict.RWST r w s) a = (a, s, w)
liftWith f =
Strict.RWST $ \r s -> liftM (\x -> (x, s, mempty))
(f $ \t -> Strict.runRWST t r s)
restoreT mSt = Strict.RWST $ \_ _ -> mSt
{-# INLINABLE liftWith #-}
{-# INLINABLE restoreT #-}
--------------------------------------------------------------------------------
-- MonadBaseControl type class
--------------------------------------------------------------------------------
class MonadBase b m => MonadBaseControl b m | m -> b where
-- | Monadic state of @m@.
type StM m a :: *
-- | @liftBaseWith@ is similar to 'liftIO' and 'liftBase' in that it
-- lifts a base computation to the constructed monad.
--
-- Instances should satisfy similar laws as the 'MonadIO' and 'MonadBase' laws:
--
-- @liftBaseWith . const . return = return@
--
-- @liftBaseWith (const (m >>= f)) = liftBaseWith (const m) >>= liftBaseWith . const . f@
--
-- The difference with 'liftBase' is that before lifting the base computation
-- @liftBaseWith@ captures the state of @m@. It then provides the base
-- computation with a 'RunInBase' function that allows running @m@
-- computations in the base monad on the captured state.
liftBaseWith :: (RunInBase m b -> b a) -> m a
-- | Construct a @m@ computation from the monadic state of @m@ that is
-- returned from a 'RunInBase' function.
--
-- Instances should satisfy:
--
-- @liftBaseWith (\\runInBase -> runInBase m) >>= restoreM = m@
restoreM :: StM m a -> m a
-- | A function that runs a @m@ computation on the monadic state that was
-- captured by 'liftBaseWith'
--
-- A @RunInBase m@ function yields a computation in the base monad of @m@ that
-- returns the monadic state of @m@. This state can later be used to restore the
-- @m@ computation using 'restoreM'.
type RunInBase m b = forall a. m a -> b (StM m a)
--------------------------------------------------------------------------------
-- MonadBaseControl instances for all monads in the base library
--------------------------------------------------------------------------------
#define BASE(M) \
instance MonadBaseControl (M) (M) where { \
type StM (M) a = a; \
liftBaseWith f = f id; \
restoreM = return; \
{-# INLINABLE liftBaseWith #-}; \
{-# INLINABLE restoreM #-}}
BASE(IO)
BASE(Maybe)
BASE(Either e)
BASE([])
BASE((->) r)
BASE(Identity)
BASE(STM)
#if MIN_VERSION_base(4,4,0)
BASE(Strict.ST s)
BASE( ST s)
#endif
#undef BASE
--------------------------------------------------------------------------------
-- Defaults for MonadBaseControl
--------------------------------------------------------------------------------
-- $MonadBaseControlDefaults
--
-- Note that by using the following default definitions it's easy to make a
-- monad transformer @T@ an instance of 'MonadBaseControl':
--
-- @
-- instance MonadBaseControl b m => MonadBaseControl b (T m) where
-- type StM (T m) a = 'ComposeSt' T m a
-- liftBaseWith = 'defaultLiftBaseWith'
-- restoreM = 'defaultRestoreM'
-- @
--
-- Defining an instance for a base monad @B@ is equally straightforward:
--
-- @
-- instance MonadBaseControl B B where
-- type StM B a = a
-- liftBaseWith f = f 'id'
-- restoreM = 'return'
-- @
-- | Handy type synonym that composes the monadic states of @t@ and @m@.
--
-- It can be used to define the 'StM' for new 'MonadBaseControl' instances.
type ComposeSt t m a = StM m (StT t a)
-- | A function like 'RunInBase' that runs a monad transformer @t@ in its base
-- monad @b@. It is used in 'defaultLiftBaseWith'.
type RunInBaseDefault t m b = forall a. t m a -> b (ComposeSt t m a)
-- | Default defintion for the 'liftBaseWith' method.
--
-- Note that it composes a 'liftWith' of @t@ with a 'liftBaseWith' of @m@ to
-- give a 'liftBaseWith' of @t m@:
--
-- @
-- defaultLiftBaseWith = \\f -> 'liftWith' $ \\run ->
-- 'liftBaseWith' $ \\runInBase ->
-- f $ runInBase . run
-- @
defaultLiftBaseWith :: (MonadTransControl t, MonadBaseControl b m)
=> (RunInBaseDefault t m b -> b a) -> t m a
defaultLiftBaseWith = \f -> liftWith $ \run ->
liftBaseWith $ \runInBase ->
f $ runInBase . run
{-# INLINABLE defaultLiftBaseWith #-}
-- | Default definition for the 'restoreM' method.
--
-- Note that: @defaultRestoreM = 'restoreT' . 'restoreM'@
defaultRestoreM :: (MonadTransControl t, MonadBaseControl b m)
=> ComposeSt t m a -> t m a
defaultRestoreM = restoreT . restoreM
{-# INLINABLE defaultRestoreM #-}
--------------------------------------------------------------------------------
-- MonadBaseControl transformer instances
--------------------------------------------------------------------------------
#define BODY(T) { \
type StM (T m) a = ComposeSt (T) m a; \
liftBaseWith = defaultLiftBaseWith; \
restoreM = defaultRestoreM; \
{-# INLINABLE liftBaseWith #-}; \
{-# INLINABLE restoreM #-}}
#define TRANS( T) \
instance ( MonadBaseControl b m) => MonadBaseControl b (T m) where BODY(T)
#define TRANS_CTX(CTX, T) \
instance (CTX, MonadBaseControl b m) => MonadBaseControl b (T m) where BODY(T)
TRANS(IdentityT)
TRANS(MaybeT)
TRANS(ListT)
TRANS(ReaderT r)
TRANS(Strict.StateT s)
TRANS( StateT s)
TRANS(ExceptT e)
TRANS_CTX(Error e, ErrorT e)
TRANS_CTX(Monoid w, Strict.WriterT w)
TRANS_CTX(Monoid w, WriterT w)
TRANS_CTX(Monoid w, Strict.RWST r w s)
TRANS_CTX(Monoid w, RWST r w s)
--------------------------------------------------------------------------------
-- * Utility functions
--------------------------------------------------------------------------------
-- | Capture the current state of a transformer
captureT :: (MonadTransControl t, Monad (t m), Monad m) => t m (StT t ())
captureT = liftWith $ \runInM -> runInM (return ())
{-# INLINABLE captureT #-}
-- | Capture the current state above the base monad
captureM :: MonadBaseControl b m => m (StM m ())
captureM = liftBaseWith $ \runInBase -> runInBase (return ())
{-# INLINABLE captureM #-}
-- | An often used composition: @control f = 'liftBaseWith' f >>= 'restoreM'@
control :: MonadBaseControl b m => (RunInBase m b -> b (StM m a)) -> m a
control f = liftBaseWith f >>= restoreM
{-# INLINABLE control #-}
-- | Embed a transformer function as an function in the base monad returning a
-- mutated transformer state.
embed :: MonadBaseControl b m => (a -> m c) -> m (a -> b (StM m c))
embed f = liftBaseWith $ \runInBase -> return (runInBase . f)
{-# INLINABLE embed #-}
-- | Performs the same function as 'embed', but discards transformer state
-- from the embedded function.
embed_ :: MonadBaseControl b m => (a -> m ()) -> m (a -> b ())
embed_ f = liftBaseWith $ \runInBase -> return (void . runInBase . f)
{-# INLINABLE embed_ #-}
-- | @liftBaseOp@ is a particular application of 'liftBaseWith' that allows
-- lifting control operations of type:
--
-- @((a -> b c) -> b c)@ to: @('MonadBaseControl' b m => (a -> m c) -> m c)@.
--
-- For example:
--
-- @liftBaseOp alloca :: 'MonadBaseControl' 'IO' m => (Ptr a -> m c) -> m c@
liftBaseOp :: MonadBaseControl b m
=> ((a -> b (StM m c)) -> b (StM m d))
-> ((a -> m c) -> m d)
liftBaseOp f = \g -> control $ \runInBase -> f $ runInBase . g
{-# INLINABLE liftBaseOp #-}
-- | @liftBaseOp_@ is a particular application of 'liftBaseWith' that allows
-- lifting control operations of type:
--
-- @(b a -> b a)@ to: @('MonadBaseControl' b m => m a -> m a)@.
--
-- For example:
--
-- @liftBaseOp_ mask_ :: 'MonadBaseControl' 'IO' m => m a -> m a@
liftBaseOp_ :: MonadBaseControl b m
=> (b (StM m a) -> b (StM m c))
-> ( m a -> m c)
liftBaseOp_ f = \m -> control $ \runInBase -> f $ runInBase m
{-# INLINABLE liftBaseOp_ #-}
-- | @liftBaseDiscard@ is a particular application of 'liftBaseWith' that allows
-- lifting control operations of type:
--
-- @(b () -> b a)@ to: @('MonadBaseControl' b m => m () -> m a)@.
--
-- Note that, while the argument computation @m ()@ has access to the captured
-- state, all its side-effects in @m@ are discarded. It is run only for its
-- side-effects in the base monad @b@.
--
-- For example:
--
-- @liftBaseDiscard forkIO :: 'MonadBaseControl' 'IO' m => m () -> m ThreadId@
liftBaseDiscard :: MonadBaseControl b m => (b () -> b a) -> (m () -> m a)
liftBaseDiscard f = \m -> liftBaseWith $ \runInBase -> f $ void $ runInBase m
{-# INLINABLE liftBaseDiscard #-}
-- | @liftBaseOpDiscard@ is a particular application of 'liftBaseWith' that allows
-- lifting control operations of type:
--
-- @((a -> b ()) -> b c)@ to: @('MonadBaseControl' b m => (a -> m ()) -> m c)@.
--
-- Note that, while the argument computation @m ()@ has access to the captured
-- state, all its side-effects in @m@ are discarded. It is run only for its
-- side-effects in the base monad @b@.
--
-- For example:
--
-- @liftBaseDiscard (runServer addr port) :: 'MonadBaseControl' 'IO' m => m () -> m ()@
liftBaseOpDiscard :: MonadBaseControl b m
=> ((a -> b ()) -> b c)
-> (a -> m ()) -> m c
liftBaseOpDiscard f g = liftBaseWith $ \runInBase -> f $ void . runInBase . g
{-# INLINABLE liftBaseOpDiscard #-}
| jwiegley/monad-control | Control/Monad/Trans/Control.hs | bsd-3-clause | 19,170 | 5 | 13 | 4,455 | 3,412 | 1,952 | 1,460 | -1 | -1 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveFunctor #-}
module Data.Piso (
-- * Partial isomorphisms
Piso(..), forward, backward,
FromPiso(..),
(:-)(..)
) where
import Prelude hiding (id, (.))
import Control.Monad ((>=>))
import Control.Category (Category(..))
-- | Bidirectional isomorphism that is total when applied in the forward
-- direction (@a -> b@), but partial when applied in the backward direction
-- (@b -> Maybe a@).
--
-- This can be used to express constructor-deconstructor pairs. For example:
--
-- > nil :: Piso t ([a] :- t)
-- > nil = Piso f g
-- > where
-- > f t = [] :- t
-- > g ([] :- t) = Just t
-- > g _ = Nothing
-- >
-- > cons :: Piso (a :- [a] :- t) ([a] :- t)
-- > cons = Piso f g
-- > where
-- > f (x :- xs :- t) = (x : xs) :- t
-- > g ((x : xs) :- t) = Just (x :- xs :- t)
-- > g _ = Nothing
--
-- Here ':-' can be read as \'cons\', forming a stack of values. For example,
-- @nil@ pushes @[]@ onto the stack; or, in the backward direction, tries to
-- remove @[]@ from the stack. Representing constructor-destructor pairs as
-- stack manipulators allows them to be composed more easily.
--
-- Module @Data.Piso.Common@ contains @Piso@s for some common datatypes.
--
-- Modules @Data.Piso.Generic@ and @Data.Piso.TH@ offer generic ways of
-- deriving @Piso@s for custom datatypes.
data Piso a b = Piso (a -> b) (b -> Maybe a)
instance Category Piso where
id = Piso id Just
~(Piso f1 g1) . ~(Piso f2 g2) = Piso (f1 . f2) (g1 >=> g2)
-- | Apply an isomorphism in forward direction.
forward :: Piso a b -> a -> b
forward (Piso f _) = f
-- | Apply an isomorphism in backward direction.
backward :: Piso a b -> b -> Maybe a
backward (Piso _ g) = g
-- | A type class that expresses that a category is able to embed 'Piso' values.
class Category cat => FromPiso cat where
fromPiso :: Piso a b -> cat a b
instance FromPiso Piso where
fromPiso = id
-- | Heterogenous stack with a head and a tail. Or: an infix way to write @(,)@.
data h :- t = h :- t
deriving (Eq, Show, Functor)
infixr 5 :-
| MedeaMelana/Piso | Data/Piso.hs | bsd-3-clause | 2,142 | 0 | 9 | 531 | 361 | 215 | 146 | 24 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- A convenience module, combining all raw modules containing NV extensions.
--
--------------------------------------------------------------------------------
module Graphics.GL.NV (
module Graphics.GL.NV.AlphaToCoverageDitherControl,
module Graphics.GL.NV.BindlessMultiDrawIndirect,
module Graphics.GL.NV.BindlessMultiDrawIndirectCount,
module Graphics.GL.NV.BindlessTexture,
module Graphics.GL.NV.BlendEquationAdvanced,
module Graphics.GL.NV.BlendEquationAdvancedCoherent,
module Graphics.GL.NV.BlendMinmaxFactor,
module Graphics.GL.NV.ClipSpaceWScaling,
module Graphics.GL.NV.CommandList,
module Graphics.GL.NV.ComputeProgram5,
module Graphics.GL.NV.ConditionalRender,
module Graphics.GL.NV.ConservativeRaster,
module Graphics.GL.NV.ConservativeRasterDilate,
module Graphics.GL.NV.ConservativeRasterPreSnap,
module Graphics.GL.NV.ConservativeRasterPreSnapTriangles,
module Graphics.GL.NV.CopyDepthToColor,
module Graphics.GL.NV.CopyImage,
module Graphics.GL.NV.DeepTexture3D,
module Graphics.GL.NV.DepthBufferFloat,
module Graphics.GL.NV.DepthClamp,
module Graphics.GL.NV.DrawTexture,
module Graphics.GL.NV.DrawVulkanImage,
module Graphics.GL.NV.Evaluators,
module Graphics.GL.NV.ExplicitMultisample,
module Graphics.GL.NV.Fence,
module Graphics.GL.NV.FillRectangle,
module Graphics.GL.NV.FloatBuffer,
module Graphics.GL.NV.FogDistance,
module Graphics.GL.NV.FragmentCoverageToColor,
module Graphics.GL.NV.FragmentProgram,
module Graphics.GL.NV.FragmentProgram2,
module Graphics.GL.NV.FramebufferMixedSamples,
module Graphics.GL.NV.FramebufferMultisampleCoverage,
module Graphics.GL.NV.GPUMulticast,
module Graphics.GL.NV.GPUProgram4,
module Graphics.GL.NV.GPUProgram5,
module Graphics.GL.NV.GPUShader5,
module Graphics.GL.NV.GeometryProgram4,
module Graphics.GL.NV.HalfFloat,
module Graphics.GL.NV.InternalformatSampleQuery,
module Graphics.GL.NV.LightMaxExponent,
module Graphics.GL.NV.MemoryAttachment,
module Graphics.GL.NV.MeshShader,
module Graphics.GL.NV.MultisampleCoverage,
module Graphics.GL.NV.MultisampleFilterHint,
module Graphics.GL.NV.OcclusionQuery,
module Graphics.GL.NV.PackedDepthStencil,
module Graphics.GL.NV.ParameterBufferObject,
module Graphics.GL.NV.PathRenderingCompatibility,
module Graphics.GL.NV.PathRenderingCore,
module Graphics.GL.NV.PathRenderingSharedEdge,
module Graphics.GL.NV.PixelDataRange,
module Graphics.GL.NV.PointSprite,
module Graphics.GL.NV.PresentVideo,
module Graphics.GL.NV.PrimitiveRestart,
module Graphics.GL.NV.QueryResource,
module Graphics.GL.NV.QueryResourceTag,
module Graphics.GL.NV.RegisterCombiners,
module Graphics.GL.NV.RegisterCombiners2,
module Graphics.GL.NV.RepresentativeFragmentTest,
module Graphics.GL.NV.RobustnessVideoMemoryPurge,
module Graphics.GL.NV.SampleLocations,
module Graphics.GL.NV.ScissorExclusive,
module Graphics.GL.NV.ShaderBufferLoad,
module Graphics.GL.NV.ShaderBufferStore,
module Graphics.GL.NV.ShaderSubgroupPartitioned,
module Graphics.GL.NV.ShaderThreadGroup,
module Graphics.GL.NV.ShadingRateImage,
module Graphics.GL.NV.TessellationProgram5,
module Graphics.GL.NV.TexgenEmboss,
module Graphics.GL.NV.TexgenReflection,
module Graphics.GL.NV.TextureBarrier,
module Graphics.GL.NV.TextureEnvCombine4,
module Graphics.GL.NV.TextureExpandNormal,
module Graphics.GL.NV.TextureMultisample,
module Graphics.GL.NV.TextureRectangle,
module Graphics.GL.NV.TextureShader,
module Graphics.GL.NV.TextureShader2,
module Graphics.GL.NV.TextureShader3,
module Graphics.GL.NV.TransformFeedback,
module Graphics.GL.NV.TransformFeedback2,
module Graphics.GL.NV.UniformBufferUnifiedMemory,
module Graphics.GL.NV.VDPAUInterop,
module Graphics.GL.NV.VDPAUInterop2,
module Graphics.GL.NV.VertexArrayRange,
module Graphics.GL.NV.VertexArrayRange2,
module Graphics.GL.NV.VertexAttribInteger64Bit,
module Graphics.GL.NV.VertexBufferUnifiedMemory,
module Graphics.GL.NV.VertexProgram,
module Graphics.GL.NV.VertexProgram2Option,
module Graphics.GL.NV.VertexProgram3,
module Graphics.GL.NV.VertexProgram4,
module Graphics.GL.NV.VideoCapture,
module Graphics.GL.NV.ViewportSwizzle
) where
import Graphics.GL.NV.AlphaToCoverageDitherControl
import Graphics.GL.NV.BindlessMultiDrawIndirect
import Graphics.GL.NV.BindlessMultiDrawIndirectCount
import Graphics.GL.NV.BindlessTexture
import Graphics.GL.NV.BlendEquationAdvanced
import Graphics.GL.NV.BlendEquationAdvancedCoherent
import Graphics.GL.NV.BlendMinmaxFactor
import Graphics.GL.NV.ClipSpaceWScaling
import Graphics.GL.NV.CommandList
import Graphics.GL.NV.ComputeProgram5
import Graphics.GL.NV.ConditionalRender
import Graphics.GL.NV.ConservativeRaster
import Graphics.GL.NV.ConservativeRasterDilate
import Graphics.GL.NV.ConservativeRasterPreSnap
import Graphics.GL.NV.ConservativeRasterPreSnapTriangles
import Graphics.GL.NV.CopyDepthToColor
import Graphics.GL.NV.CopyImage
import Graphics.GL.NV.DeepTexture3D
import Graphics.GL.NV.DepthBufferFloat
import Graphics.GL.NV.DepthClamp
import Graphics.GL.NV.DrawTexture
import Graphics.GL.NV.DrawVulkanImage
import Graphics.GL.NV.Evaluators
import Graphics.GL.NV.ExplicitMultisample
import Graphics.GL.NV.Fence
import Graphics.GL.NV.FillRectangle
import Graphics.GL.NV.FloatBuffer
import Graphics.GL.NV.FogDistance
import Graphics.GL.NV.FragmentCoverageToColor
import Graphics.GL.NV.FragmentProgram
import Graphics.GL.NV.FragmentProgram2
import Graphics.GL.NV.FramebufferMixedSamples
import Graphics.GL.NV.FramebufferMultisampleCoverage
import Graphics.GL.NV.GPUMulticast
import Graphics.GL.NV.GPUProgram4
import Graphics.GL.NV.GPUProgram5
import Graphics.GL.NV.GPUShader5
import Graphics.GL.NV.GeometryProgram4
import Graphics.GL.NV.HalfFloat
import Graphics.GL.NV.InternalformatSampleQuery
import Graphics.GL.NV.LightMaxExponent
import Graphics.GL.NV.MemoryAttachment
import Graphics.GL.NV.MeshShader
import Graphics.GL.NV.MultisampleCoverage
import Graphics.GL.NV.MultisampleFilterHint
import Graphics.GL.NV.OcclusionQuery
import Graphics.GL.NV.PackedDepthStencil
import Graphics.GL.NV.ParameterBufferObject
import Graphics.GL.NV.PathRenderingCompatibility
import Graphics.GL.NV.PathRenderingCore
import Graphics.GL.NV.PathRenderingSharedEdge
import Graphics.GL.NV.PixelDataRange
import Graphics.GL.NV.PointSprite
import Graphics.GL.NV.PresentVideo
import Graphics.GL.NV.PrimitiveRestart
import Graphics.GL.NV.QueryResource
import Graphics.GL.NV.QueryResourceTag
import Graphics.GL.NV.RegisterCombiners
import Graphics.GL.NV.RegisterCombiners2
import Graphics.GL.NV.RepresentativeFragmentTest
import Graphics.GL.NV.RobustnessVideoMemoryPurge
import Graphics.GL.NV.SampleLocations
import Graphics.GL.NV.ScissorExclusive
import Graphics.GL.NV.ShaderBufferLoad
import Graphics.GL.NV.ShaderBufferStore
import Graphics.GL.NV.ShaderSubgroupPartitioned
import Graphics.GL.NV.ShaderThreadGroup
import Graphics.GL.NV.ShadingRateImage
import Graphics.GL.NV.TessellationProgram5
import Graphics.GL.NV.TexgenEmboss
import Graphics.GL.NV.TexgenReflection
import Graphics.GL.NV.TextureBarrier
import Graphics.GL.NV.TextureEnvCombine4
import Graphics.GL.NV.TextureExpandNormal
import Graphics.GL.NV.TextureMultisample
import Graphics.GL.NV.TextureRectangle
import Graphics.GL.NV.TextureShader
import Graphics.GL.NV.TextureShader2
import Graphics.GL.NV.TextureShader3
import Graphics.GL.NV.TransformFeedback
import Graphics.GL.NV.TransformFeedback2
import Graphics.GL.NV.UniformBufferUnifiedMemory
import Graphics.GL.NV.VDPAUInterop
import Graphics.GL.NV.VDPAUInterop2
import Graphics.GL.NV.VertexArrayRange
import Graphics.GL.NV.VertexArrayRange2
import Graphics.GL.NV.VertexAttribInteger64Bit
import Graphics.GL.NV.VertexBufferUnifiedMemory
import Graphics.GL.NV.VertexProgram
import Graphics.GL.NV.VertexProgram2Option
import Graphics.GL.NV.VertexProgram3
import Graphics.GL.NV.VertexProgram4
import Graphics.GL.NV.VideoCapture
import Graphics.GL.NV.ViewportSwizzle
| haskell-opengl/OpenGLRaw | src/Graphics/GL/NV.hs | bsd-3-clause | 8,305 | 0 | 5 | 638 | 1,432 | 1,053 | 379 | 189 | 0 |
module Gidl.Backend.Ivory where
import Ivory.Artifact
import Ivory.Artifact.Template
import Data.Char (isSpace)
import Data.List (intercalate, nub)
import Text.PrettyPrint.Mainland
import qualified Paths_gidl as P
import Gidl.Interface
import Gidl.Schema
import Gidl.Backend.Cabal
import Gidl.Backend.Ivory.Types
import Gidl.Backend.Ivory.Schema
ivoryBackend :: FilePath -> [Interface] -> String -> String -> [Artifact]
ivoryBackend ivoryRepo iis pkgname namespace_raw =
[ cabalFileArtifact cf
, makefile
, stackfile ivoryRepo
, artifactPath "tests" $ codegenTest namespace
] ++ map (artifactPath "src") sources
where
sources = ivorySources iis namespace
namespace = dotwords namespace_raw
cf = (defaultCabalFile pkgname cabalmods ivoryDeps) { tests = [ cg_test ] }
cg_test = defaultCabalTest cg_test_name "CodeGen.hs"
(ivoryDeps ++ ivoryTestDeps ++ [pkgname])
cg_test_name = pkgname ++ "-gen"
cabalmods = map (filePathToPackage . artifactFileName) sources
ivoryDeps :: [String]
ivoryDeps =
[ "ivory"
, "ivory-serialize"
, "ivory-stdlib"
]
ivoryTestDeps :: [String]
ivoryTestDeps =
[ "ivory-backend-c"
]
ivorySources :: [Interface] -> [String] -> [Artifact]
ivorySources iis namespace =
tmods ++ concat smods ++ [ typeUmbrella namespace userDefinedTypes
, unpackModule namespace
]
where
userDefinedTypes = nub [ t | i <- iis, t <- interfaceTypes i, isUserDefined t ]
tmods = [ typeModule (namespace ++ ["Types"]) t
| t <- userDefinedTypes ]
smods = [ [ schemaModule (namespace ++ ["Interface"]) i (producerSchema i)
, schemaModule (namespace ++ ["Interface"]) i (consumerSchema i) ]
| i <- iis ]
dotwords :: String -> [String]
dotwords s = case dropWhile isDot s of
"" -> []
s' -> let (w, s'') = break isDot s' in w : dotwords s''
where
isDot c = (c == '.') || isSpace c
makefile :: Artifact
makefile =
artifactCabalFileTemplate P.getDataDir "support/ivory/Makefile.template" []
stackfile :: FilePath -> Artifact
stackfile ivory = artifactText "stack.yaml" $
prettyLazyText 1000 $ stack
[ text "resolver: lts-6.10"
, empty
, text "packages:"
, text "- '.'"
, text ("- location: " ++ ivory)
, text " extra-dep: true"
, text " subdirs:"
, text " - ivory"
, text " - ivory-artifact"
, text " - ivory-backend-c"
, text " - ivory-opts"
, text " - ivory-serialize"
, text " - ivory-stdlib"
, empty
, text "extra-deps:"
, text " - exception-mtl-0.4"
, text " - ghc-srcspan-plugin-0.2.1.0"
, text " - language-c-quote-0.11.6"
, text " - mainland-pretty-0.4.1.2"
, text " - symbol-0.2.4"
, text " - th-abstraction-0.2.5.0"
, empty
, text "install-ghc: true"
, empty
]
codegenTest :: [String] -> Artifact
codegenTest modulepath =
artifactCabalFileTemplate P.getDataDir fname
[("module_path", intercalate "." modulepath )]
where
fname = "support/ivory/CodeGen.hs.template"
unpackModule :: [String] -> Artifact
unpackModule modulepath =
artifactPath (intercalate "/" modulepath) $
artifactCabalFileTemplate P.getDataDir fname
[("module_path", intercalate "." modulepath )]
where
fname = "support/ivory/Unpack.hs.template"
| GaloisInc/gidl | src/Gidl/Backend/Ivory.hs | bsd-3-clause | 3,332 | 0 | 12 | 746 | 904 | 486 | 418 | 90 | 2 |
---------------------------------------------------------------------------
-- | Module : Math.Statistics.Dirichlet.Mixture
-- Copyright : (c) 2009-2012 Felipe Lessa
-- License : BSD3
--
-- Maintainer : felipe.lessa@gmail.com
-- Stability : experimental
-- Portability : portable
--
--------------------------------------------------------------------------
module Math.Statistics.Dirichlet.Mixture
( -- * Data types
DirichletMixture(..)
, dmComponents
, dmParameters
, dmDensitiesL
, (!!!)
, empty
, Component
, fromList
, toList
, fromDD
-- * Training data
, TrainingData
, prepareTraining
-- * Functions
, derive
, cost
, del_cost_w
) where
import qualified Data.Vector as V
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import Control.DeepSeq (NFData(..))
import Control.Monad.ST
import Data.Bits
import Data.Function (fix)
import Numeric.GSL.Special.Gamma (lngamma)
import Numeric.GSL.Special.Psi (psi)
import qualified Numeric.Optimization.Algorithms.HagerZhang05 as CG
import qualified Math.Statistics.Dirichlet.Density as D
import qualified Math.Statistics.Dirichlet.Matrix as M
import Math.Statistics.Dirichlet.Density (DirichletDensity(..))
import Math.Statistics.Dirichlet.Matrix (Matrix (..))
import Math.Statistics.Dirichlet.Options
import Math.Statistics.Dirichlet.Util
-- | A Dirichlet mixture.
data DirichletMixture =
DM { dmWeights :: !(U.Vector Double)
-- ^ Weights of each density.
, dmDensities :: !M.Matrix
-- ^ Values of all parameters of all densities. This
-- matrix has @length dmWeights@ rows.
} deriving (Eq)
instance Show DirichletMixture where
showsPrec prec dm =
showParen (prec > 10) $
showString "fromList " .
showsPrec 11 (toList dm)
instance Read DirichletMixture where
readsPrec p ('(':xs) = let (ys,')':zs) = break (== ')') xs
in map (\(x,s) -> (x,s++zs)) $
readsPrec p ys
readsPrec p xs = let [("fromList",list)] = lex xs
in map (\(x,s) -> (fromList x,s)) $
readsPrec p list
instance NFData DirichletMixture where
rnf DM {} = ()
-- | Number of components in a dirichlet mixture.
dmComponents :: DirichletMixture -> Int
dmComponents = U.length . dmWeights
-- | Number of parameters each component has.
dmParameters :: DirichletMixture -> Int
dmParameters = mCols . dmDensities
-- | Separated list of densities.
dmDensitiesL :: DirichletMixture -> [DirichletDensity]
dmDensitiesL (DM _ as) = map DD $ V.toList $ M.rows as
-- | @dm !!! i@ is the @i@-th density. No bounding checks are
-- made.
(!!!) :: DirichletMixture -> Int -> U.Vector Double
(DM _ as) !!! i = as M.!!! i
{-# INLINE (!!!) #-}
dmap :: (U.Vector Double -> Double) -> DirichletMixture -> U.Vector Double
dmap f = M.rowmap f . dmDensities
-- | @empty q n x@ is an \"empty\" Dirichlet mixture with @q@
-- components and @n@ parameters. Each component has size @n@,
-- weight inversely proportional to its index and all alphas set
-- to @x@.
empty :: Int -> Int -> Double -> DirichletMixture
empty q n x = let (DD d) = D.empty n x
f i = fromIntegral (q-i) / sum_
sum_ = fromIntegral (q*(q+1)`div`2)
in DM {dmWeights = U.generate q f
,dmDensities = M.replicateRows q d}
{-# INLINE empty #-}
-- | A list representation of a component of a Dirichlet mixture.
-- Used by 'fromList' and 'toList' only.
type Component = (Double, [Double])
-- | @fromList xs@ constructs a Dirichlet mixture from a
-- non-empty list of components. Each component has a weight and
-- a list of alpha values. The weights sum to 1, all lists must
-- have the same number of values and every number must be
-- non-negative. None of these preconditions are verified.
fromList :: [Component] -> DirichletMixture
fromList components =
let -- Vectors
qs = U.fromList $ map fst components
as = M q n $ U.fromList $ concatMap snd components
-- Properties of the mixture
q = length components
n = length (snd $ head components)
in DM qs as
-- | @toList dm@ is the inverse of @fromList@, constructs a list
-- of components from a Dirichlet mixture. There are no error
-- conditions and @toList . fromList == id@.
toList :: DirichletMixture -> [Component]
toList dm =
let qs' = U.toList $ dmWeights dm
as' = map (U.toList . unDD) (dmDensitiesL dm)
in zip qs' as'
-- | Constructs a Dirichlet mixture of one component from a
-- Dirichlet density.
fromDD :: DirichletDensity -> DirichletMixture
fromDD (DD d) = DM (U.singleton 1) (M.replicateRows 1 d)
-- | Prepares training vectors to be used as training data.
-- Anything that depends only on the training vectors is
-- precalculated here.
--
-- We also try to find columns where all training vectors are
-- zero. Those columns are removed from the derivation process
-- and every component will have zero value on that column. Note
-- that at least one column should have non-zero training
-- vectors.
prepareTraining :: TrainingVectors -> TrainingData
prepareTraining ns_0 =
let zeroes = zeroedCols ns_0
ns = removeZeroes ns_0 zeroes
ns_sums = G.unstream $ G.stream $ V.map U.sum ns
tns = M.fromVectorT ns
in TD {..}
-- | Pre-processed training vectors (see 'prepareTraining').
data TrainingData = TD { ns :: !TrainingVectors
, ns_sums :: !(U.Vector Double)
, tns :: !Matrix
, zeroes :: ![Int]}
deriving (Eq, Show)
-- | Return the list of columns that are zeroed, counting from zero.
zeroedCols :: TrainingVectors -> [Int]
zeroedCols =
-- We set the i-th bit whenever the i-th column was zeroed.
let fold (acc, mask) 0 = (acc .|. mask, shiftL mask 1)
fold (acc, mask) _ = (acc :: Integer, shiftL mask 1)
unBits !_ 0 = []
unBits !i x = (if testBit x 0 then (i:) else id)
(unBits (i+1) (shiftR x 1))
in unBits 0 . V.foldl1' (.&.) . V.map (fst . U.foldl' fold (0,1))
-- | Remove zeroed columns from training vectors.
removeZeroes :: TrainingVectors -> [Int] -> TrainingVectors
removeZeroes ns [] = ns
removeZeroes ns zs =
let cols_orig = U.length (V.head ns)
cols_new = U.filter (`notElem` zs) $ U.enumFromN 0 cols_orig
in V.map (flip U.backpermute cols_new) ns
-- | Remove zeroed columns from a Dirichlet mixture matrix of
-- densities.
removeZeroesM :: [Int] -> Matrix -> Matrix
removeZeroesM [] as = as
removeZeroesM zs as =
let size = M.mCols as * M.mRows as
cols_orig = M.mCols as
cols_new = U.filter ((`notElem` zs) . (`rem` cols_orig)) $
U.enumFromN 0 size
in M {mCols = M.mCols as - length zs
,mRows = M.mRows as
,mData = U.backpermute (M.mData as) cols_new}
-- | Add zeroed columns back to a Dirichlet mixture matrix of
-- densities.
addZeroesM :: [Int] -> Matrix -> Matrix
addZeroesM [] = id
addZeroesM zs' = M.fromVector .
V.map (U.fromList . add 0 zs' . U.toList) .
M.rows
where
add !_ [] xs = xs
add _ zs [] = map (const zero) zs
add i (z:zs) (x:xs) | i == z = zero : add (i+1) zs (x:xs)
| otherwise = x : add (i+1) (z:zs) xs
zero = 0.00001
-- | /Prob(a_j | n, theta)/ Defined in equation (16), "the
-- posterior probability of the j-th component of the mixture
-- given the vector of counts n". We return the probabilities
-- for all /j/ in each vector.
--
-- The order of the result is inversed for performance. In the
-- outer boxed vector there are /j/ elements. The /i/-th inner
-- unboxed vector contains that probability for each of the
-- training vectors.
--
-- Calculated as per equation (39) using 'logBeta'. If we take
-- the numerator of the right hand side of equation (39) as /Y_j/
-- and the left hand side as /P_j/, then /P_j/ is proportional to
-- /Y_j/ normalized to sum to 1. We may have problems if /P_j/
-- is too large or too small. Using the suggestion from the
-- paper, we may multiply all /P_j/ by a constant before
-- normalizing everything. We calculate /P_j/ using a logarithm,
-- so that means we may freely add or subtract a constant from
-- the logarithm before appling the exponential function. This
-- is really essencial.
prob_a_n_theta :: TrainingVectors -> DirichletMixture -> Matrix
prob_a_n_theta ns dm@(DM qs _) =
let -- Precalculate logBeta of all components
!logBetaAlphas = dmap logBeta dm
-- Calculate the factors for one of the training vectors.
calc n i lb_a = let !a = dm !!! i
in logBeta (U.zipWith (+) n a) - lb_a
factors n = let fs = U.imap (calc n) logBetaAlphas
!c = U.maximum fs -- see the note above
fs' = U.zipWith (\q f -> q * exp (f - c)) qs fs
!total = U.sum fs'
in U.map (/ total) fs'
in M.fromVectorT $ V.map factors ns
-- | Customized version of @prob_a_n_theta@ used when the weights
-- are being estimated. Precomputes everything that doesn't
-- depend on the weight.
prob_a_n_theta_weights :: TrainingVectors -> Matrix
-> (U.Vector Double -> Matrix)
prob_a_n_theta_weights ns as =
let -- Precalculate logBeta of all components
!logBetaAlphas = M.rowmap logBeta as
-- Precalculate the factors for one of the training vectors.
precalc n i lb_a = let !a = as M.!!! i
in logBeta (U.zipWith (+) n a) - lb_a
norm fs = let !c = U.maximum fs
in U.map (exp . subtract c) fs
!prefactors = V.map (norm . flip U.imap logBetaAlphas . precalc) ns
in \qs ->
let -- Calculate the final factors.
calc pfs = let fs = U.zipWith (*) pfs qs
total = U.sum fs
in U.map (/ total) fs
in M.fromVectorT $ V.map calc prefactors
-- | Cost function for deriving a Dirichlet mixture (equation
-- 18). This function is minimized by 'derive'. Calculated
-- using (17) and (54).
cost :: TrainingData -> DirichletMixture -> Double
cost td dm =
let as_sums = dmap U.sum dm
in cost_worker td dm as_sums
-- | Worker of 'cost' function that avoids repeating some
-- computations that are done when reestimating alphas.
cost_worker :: TrainingData -> DirichletMixture
-> U.Vector Double -> Double
cost_worker TD {ns, ns_sums} dm@(DM !qs _) !as_sums =
let -- From the equation (54).
prob_n_a !n !n_sum !a !a_sum !lngamma_a_sum =
let !s = lngamma (n_sum+1) + lngamma_a_sum - lngamma (n_sum+a_sum)
f n_i a_i = lngamma (n_i + a_i) - lngamma (n_i + 1) - lngamma a_i
in exp $ s + U.sum (U.zipWith f n a)
-- From equation (17).
prob_n_theta i n =
let !n_sum = ns_sums U.! i
in U.sum $ U.zipWith (*) qs $
U.izipWith (prob_n_a n n_sum . (dm !!!))
as_sums lngamma_as_sums
!lngamma_as_sums = U.map lngamma as_sums
in negate $ V.sum $ V.imap ((log .) . prob_n_theta) ns
-- | Version of 'cost' function that avoids repeating a lot of
-- computations that are done when reestimating weights.
cost_weight :: TrainingData -> Matrix
-> U.Vector Double -> (U.Vector Double -> Double)
cost_weight TD {ns, ns_sums} !as !as_sums =
let -- From the equation (54).
prob_n_a !n !n_sum !a !a_sum !lngamma_a_sum =
let !s = lngamma (n_sum+1) + lngamma_a_sum - lngamma (n_sum+a_sum)
f n_i a_i = lngamma (n_i + a_i) - lngamma (n_i + 1) - lngamma a_i
in exp $ s + U.sum (U.zipWith f n a)
-- From equation (17).
prepare_prob_n_theta i n =
let !n_sum = ns_sums U.! i
in {- U.sum $ U.zipWith (*) qs $ -}
U.izipWith (prob_n_a n n_sum . (as M.!!!))
as_sums lngamma_as_sums
!lngamma_as_sums = U.map lngamma as_sums
!prepared = V.imap prepare_prob_n_theta ns
-- Final worker function.
final qs = log . U.sum . U.zipWith (*) qs
in \(!qs) -> negate $ V.sum $ V.map (final qs) prepared
-- | Derivative of the cost function with respect @w_{i,j}@,
-- defined by Equation (22). The result is given in the same
-- size and order as the 'dmDensitites' vector.
del_cost_w :: TrainingData -> DirichletMixture -> Matrix
del_cost_w td dm =
let as_sums = dmap U.sum dm
in del_cost_w_worker td dm as_sums
-- | Worker function of 'del_cost_w'.
del_cost_w_worker :: TrainingData -> DirichletMixture
-> U.Vector Double -> Matrix
del_cost_w_worker TD {ns, ns_sums, tns} dm !as_sums =
let -- Calculate Prob(a | n, theta)
!probs_a_n = prob_a_n_theta ns dm
-- Calculate all S_j's.
!sjs = M.rowmap U.sum probs_a_n
-- @calc j _ i _ _@ calculates the derivative of the
-- cost function with respect to @w_{i,j}@. The other
-- arguments come from vector that we @zipWith@ below.
calc j probs =
-- Everything that doesn't depend on i, just on j.
let !a_sum = as_sums U.! j
!psi_a_sum = psi a_sum
!sum_prob_psi = U.sum $ U.zipWith (*) probs $
U.map (psi . (+) a_sum) ns_sums
-----
in \i a_i ->
let !s1 = (sjs U.! j) * (psi_a_sum - psi a_i)
!s2 = U.sum $ U.zipWith (\p_i n_i -> p_i * psi (n_i + a_i)) probs (tns M.!!! i)
in - a_i * (s1 + s2 - sum_prob_psi)
in M.fromVector $ V.imap (\j p_j -> let !f = calc j p_j
in U.imap f (dm !!! j))
(M.rows probs_a_n)
-- | Derive a Dirichlet mixture using a maximum likelihood method
-- as described by Karplus et al (equation 25) using CG_DESCENT
-- method by Hager and Zhang (see
-- "Numeric.Optimization.Algorithms.HagerZhang05"). All training
-- vectors should have the same length, however this is not
-- verified.
derive :: DirichletMixture -> Predicate -> StepSize
-> TrainingData -> Result DirichletMixture
derive (DM initial_qs initial_as') (Pred {..}) _ td@(TD {ns,zeroes})
| V.length ns == 0 = err "empty training data"
| U.length initial_qs < 1 = err "empty initial weights vector"
| M.size initial_as < (1,1) = err "empty initial alphas vector"
| maxIter < 1 = err "non-positive maxIter"
| minDelta < 0 = err "negative minDelta"
| jumpDelta < 0 = err "negative jumpDelta"
| jumpDelta < minDelta = err "minDelta greater than jumpDelta"
| otherwise = runST train
where
err = error . ("Dirichlet.derive: " ++)
singleDensity = U.length initial_qs == 1
-- Remove zeroes from initial_as'.
initial_as = removeZeroesM zeroes initial_as'
-- Reciprocal of the number of training sequences.
!recip_m = recip $ fromIntegral $ V.length ns
-- Calculate the sums of the alphas.
calc_as_sums = M.rowmap U.sum
-- Parameters used by CG_DESCENT.
verbose = False
parameters = CG.defaultParameters
{ CG.printFinal = verbose
, CG.printParams = verbose
, CG.verbose = if verbose then CG.VeryVerbose else CG.Quiet
, CG.maxItersFac = max 1 $ fromIntegral maxIter / 20
, CG.estimateError = CG.RelativeEpsilon (1e-6 * s)
}
where (w,h) = M.size initial_as
s = fromIntegral (w * h * V.length ns)
-- Transform a U.Vector from/to a M.Matrix in the case that
-- the matrix has the same shape as initial_as (i.e. all
-- as's and ws's).
fromMatrix = M.mData
toMatrix v = initial_as {M.mData = v}
-- Create specialized functions that are optimized by
-- CG_DESCENT. They depend only on @qs@, the weights.
createFunctions !qs =
let calc f = \ws -> let !as = M.map exp (toMatrix ws)
!as_sums = calc_as_sums as
dm = DM qs as
in f dm as_sums
grad_worker = ((fromMatrix .) .) . del_cost_w_worker
func = CG.VFunction $ calc $ cost_worker td
grad = CG.VGradient $ calc $ grad_worker td
comb = CG.VCombined $ calc $ \dm as_sums ->
(cost_worker td dm as_sums
,grad_worker td dm as_sums)
in (func, grad, comb)
-- Start training in the zero-th iteration and with
-- infinite inital cost.
train = trainAlphas 0 infinity initial_qs $ M.map log initial_as
trainAlphas !iter !oldCost !qs !ws = {-# SCC "trainAlphas" #-} do
-- Optimize using CG_DESCENT
let (func, grad, comb) = createFunctions qs
opt = CG.optimize parameters minDelta (fromMatrix ws)
func grad (Just comb)
(!pre_ws', result, stats) <- unsafeIOToST opt
let !ws' = toMatrix (G.unstream $ G.stream pre_ws')
-- Recalculate everything.
let !as' = M.map exp ws'
as_sums' = calc_as_sums as'
!iter' = iter + fromIntegral (CG.totalIters stats)
!cost' = CG.finalValue stats
!delta = abs (cost' - oldCost)
dm = DM qs $ addZeroesM zeroes as'
-- Verify convergence. Even with MaxIter we only stop
-- iterating if the delta was calculated. Otherwise we
-- wouldn't be able to tell the caller why the delta was
-- still big when we reached the limit.
case (decide result
,delta <= minDelta
,iter' >= maxIter
,singleDensity) of
(Stop r,_,_,_) -> return $ Result r iter' delta cost' dm
(_,True,_,_) -> return $ Result Delta iter' delta cost' dm
(_,_,True,_) -> return $ Result MaxIter iter' delta cost' dm
(_,_,_,True) -> return $ Result Delta iter' delta cost' dm
(GoOn,_,_,_) -> trainWeights iter' cost' qs ws' as' as_sums'
trainWeights !oldIter !veryOldCost !oldQs !ws !as !as_sums =
{-# SCC "trainWeights" #-}
-- Prepare invariant parts.
let !probs_a_n_mk = prob_a_n_theta_weights ns as
!cost_mk = cost_weight td as as_sums
in ($ oldQs) . ($ veryOldCost) . ($ maxWeightIter) . fix $
\again !itersLeft !oldCost !qs ->
-- Reestimate weight's.
let !probs_a_n = probs_a_n_mk qs
qs' = M.rowmap ((*) recip_m . U.sum) probs_a_n
-- Recalculate constants.
!cost' = cost_mk qs'
!delta = abs (cost' - oldCost)
-- Verify convergence. We never stop the process here.
in case (delta <= jumpDelta, itersLeft <= 0) of
(False,False) -> again (itersLeft-1) cost' qs'
_ -> trainAlphas oldIter cost' qs' ws
-- | Decide what we should do depending on the result of the
-- CG_DESCENT routine.
decide :: CG.Result -> Decision
decide CG.ToleranceStatisfied = GoOn
decide CG.FunctionChange = GoOn
decide CG.MaxTotalIter = GoOn
decide CG.MaxSecantIter = GoOn
decide other = Stop (CG other)
data Decision = GoOn | Stop Reason
| meteficha/statistics-dirichlet | src/Math/Statistics/Dirichlet/Mixture.hs | bsd-3-clause | 19,883 | 0 | 23 | 6,026 | 4,917 | 2,564 | 2,353 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Specify.Config where
import Specify.Constraint
import Autolib.Reader
import Autolib.ToDoc
import Data.Typeable
data Config =
Config { constraints :: System
, checks_per_constraint :: Int
}
deriving Typeable
$(derives [makeReader, makeToDoc] [''Config])
example :: Config
example = Config
{ constraints = Specify.Constraint.example
, checks_per_constraint = 1000
}
-- local variables:
-- mode: haskell
-- end:
| florianpilz/autotool | src/Specify/Config.hs | gpl-2.0 | 482 | 6 | 9 | 90 | 110 | 67 | 43 | 15 | 1 |
{-# LANGUAGE DeriveDataTypeable, CPP, MultiParamTypeClasses,
FlexibleContexts, ScopedTypeVariables, PatternGuards,
ViewPatterns #-}
{-
Copyright (C) 2006-2015 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Shared
Copyright : Copyright (C) 2006-2015 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Utility functions and definitions used by the various Pandoc modules.
-}
module Text.Pandoc.Shared (
-- * List processing
splitBy,
splitByIndices,
splitStringByIndices,
substitute,
ordNub,
-- * Text processing
backslashEscapes,
escapeStringUsing,
stripTrailingNewlines,
trim,
triml,
trimr,
stripFirstAndLast,
camelCaseToHyphenated,
toRomanNumeral,
escapeURI,
tabFilter,
-- * Date/time
normalizeDate,
-- * Pandoc block and inline list processing
orderedListMarkers,
normalizeSpaces,
extractSpaces,
normalize,
normalizeInlines,
normalizeBlocks,
removeFormatting,
stringify,
capitalize,
compactify,
compactify',
compactify'DL,
Element (..),
hierarchicalize,
uniqueIdent,
isHeaderBlock,
headerShift,
isTightList,
addMetaField,
makeMeta,
-- * TagSoup HTML handling
renderTags',
-- * File handling
inDirectory,
getDefaultReferenceDocx,
getDefaultReferenceODT,
readDataFile,
readDataFileUTF8,
fetchItem,
fetchItem',
openURL,
collapseFilePath,
-- * Error handling
err,
warn,
mapLeft,
hush,
-- * Safe read
safeRead,
-- * Temp directory
withTempDir,
-- * Version
pandocVersion
) where
import Text.Pandoc.Definition
import Text.Pandoc.Walk
import Text.Pandoc.MediaBag (MediaBag, lookupMedia)
import Text.Pandoc.Builder (Inlines, Blocks, ToMetaValue(..))
import qualified Text.Pandoc.Builder as B
import qualified Text.Pandoc.UTF8 as UTF8
import System.Environment (getProgName)
import System.Exit (exitWith, ExitCode(..))
import Data.Char ( toLower, isLower, isUpper, isAlpha,
isLetter, isDigit, isSpace )
import Data.List ( find, stripPrefix, intercalate )
import Data.Version ( showVersion )
import qualified Data.Map as M
import Network.URI ( escapeURIString, isURI, nonStrictRelativeTo,
unEscapeString, parseURIReference, isAllowedInURI )
import qualified Data.Set as Set
import System.Directory
import System.FilePath (splitDirectories, isPathSeparator)
import qualified System.FilePath.Posix as Posix
import Text.Pandoc.MIME (MimeType, getMimeType)
import System.FilePath ( (</>), takeExtension, dropExtension)
import Data.Generics (Typeable, Data)
import qualified Control.Monad.State as S
import qualified Control.Exception as E
import Control.Monad (msum, unless, MonadPlus(..))
import Text.Pandoc.Pretty (charWidth)
import Text.Pandoc.Compat.Time
import Data.Time.Clock.POSIX
import System.IO (stderr)
import System.IO.Temp
import Text.HTML.TagSoup (renderTagsOptions, RenderOptions(..), Tag(..),
renderOptions)
import Text.Pandoc.Compat.Monoid ((<>))
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.ByteString.Base64 (decodeLenient)
import Data.Sequence (ViewR(..), ViewL(..), viewl, viewr)
import qualified Data.Text as T (toUpper, pack, unpack)
import Data.ByteString.Lazy (toChunks, fromChunks)
import qualified Data.ByteString.Lazy as BL
import Paths_pandoc (version)
import Codec.Archive.Zip
#ifdef EMBED_DATA_FILES
import Text.Pandoc.Data (dataFiles)
#else
import Paths_pandoc (getDataFileName)
#endif
#ifdef HTTP_CLIENT
import Network.HTTP.Client (httpLbs, parseUrl,
responseBody, responseHeaders,
Request(port,host))
#if MIN_VERSION_http_client(0,4,18)
import Network.HTTP.Client (newManager)
#else
import Network.HTTP.Client (withManager)
#endif
import Network.HTTP.Client.Internal (addProxy)
import Network.HTTP.Client.TLS (tlsManagerSettings)
import System.Environment (getEnv)
import Network.HTTP.Types.Header ( hContentType)
import Network (withSocketsDo)
#else
import Network.URI (parseURI)
import Network.HTTP (findHeader, rspBody,
RequestMethod(..), HeaderName(..), mkRequest)
import Network.Browser (browse, setAllowRedirects, setOutHandler, request)
#endif
-- | Version number of pandoc library.
pandocVersion :: String
pandocVersion = showVersion version
--
-- List processing
--
-- | Split list by groups of one or more sep.
splitBy :: (a -> Bool) -> [a] -> [[a]]
splitBy _ [] = []
splitBy isSep lst =
let (first, rest) = break isSep lst
rest' = dropWhile isSep rest
in first:(splitBy isSep rest')
splitByIndices :: [Int] -> [a] -> [[a]]
splitByIndices [] lst = [lst]
splitByIndices (x:xs) lst = first:(splitByIndices (map (\y -> y - x) xs) rest)
where (first, rest) = splitAt x lst
-- | Split string into chunks divided at specified indices.
splitStringByIndices :: [Int] -> [Char] -> [[Char]]
splitStringByIndices [] lst = [lst]
splitStringByIndices (x:xs) lst =
let (first, rest) = splitAt' x lst in
first : (splitStringByIndices (map (\y -> y - x) xs) rest)
splitAt' :: Int -> [Char] -> ([Char],[Char])
splitAt' _ [] = ([],[])
splitAt' n xs | n <= 0 = ([],xs)
splitAt' n (x:xs) = (x:ys,zs)
where (ys,zs) = splitAt' (n - charWidth x) xs
-- | Replace each occurrence of one sublist in a list with another.
substitute :: (Eq a) => [a] -> [a] -> [a] -> [a]
substitute _ _ [] = []
substitute [] _ xs = xs
substitute target replacement lst@(x:xs) =
case stripPrefix target lst of
Just lst' -> replacement ++ substitute target replacement lst'
Nothing -> x : substitute target replacement xs
ordNub :: (Ord a) => [a] -> [a]
ordNub l = go Set.empty l
where
go _ [] = []
go s (x:xs) = if x `Set.member` s then go s xs
else x : go (Set.insert x s) xs
--
-- Text processing
--
-- | Returns an association list of backslash escapes for the
-- designated characters.
backslashEscapes :: [Char] -- ^ list of special characters to escape
-> [(Char, String)]
backslashEscapes = map (\ch -> (ch, ['\\',ch]))
-- | Escape a string of characters, using an association list of
-- characters and strings.
escapeStringUsing :: [(Char, String)] -> String -> String
escapeStringUsing _ [] = ""
escapeStringUsing escapeTable (x:xs) =
case (lookup x escapeTable) of
Just str -> str ++ rest
Nothing -> x:rest
where rest = escapeStringUsing escapeTable xs
-- | Strip trailing newlines from string.
stripTrailingNewlines :: String -> String
stripTrailingNewlines = reverse . dropWhile (== '\n') . reverse
-- | Remove leading and trailing space (including newlines) from string.
trim :: String -> String
trim = triml . trimr
-- | Remove leading space (including newlines) from string.
triml :: String -> String
triml = dropWhile (`elem` " \r\n\t")
-- | Remove trailing space (including newlines) from string.
trimr :: String -> String
trimr = reverse . triml . reverse
-- | Strip leading and trailing characters from string
stripFirstAndLast :: String -> String
stripFirstAndLast str =
drop 1 $ take ((length str) - 1) str
-- | Change CamelCase word to hyphenated lowercase (e.g., camel-case).
camelCaseToHyphenated :: String -> String
camelCaseToHyphenated [] = ""
camelCaseToHyphenated (a:b:rest) | isLower a && isUpper b =
a:'-':(toLower b):(camelCaseToHyphenated rest)
camelCaseToHyphenated (a:rest) = (toLower a):(camelCaseToHyphenated rest)
-- | Convert number < 4000 to uppercase roman numeral.
toRomanNumeral :: Int -> String
toRomanNumeral x =
if x >= 4000 || x < 0
then "?"
else case x of
_ | x >= 1000 -> "M" ++ toRomanNumeral (x - 1000)
_ | x >= 900 -> "CM" ++ toRomanNumeral (x - 900)
_ | x >= 500 -> "D" ++ toRomanNumeral (x - 500)
_ | x >= 400 -> "CD" ++ toRomanNumeral (x - 400)
_ | x >= 100 -> "C" ++ toRomanNumeral (x - 100)
_ | x >= 90 -> "XC" ++ toRomanNumeral (x - 90)
_ | x >= 50 -> "L" ++ toRomanNumeral (x - 50)
_ | x >= 40 -> "XL" ++ toRomanNumeral (x - 40)
_ | x >= 10 -> "X" ++ toRomanNumeral (x - 10)
_ | x == 9 -> "IX"
_ | x >= 5 -> "V" ++ toRomanNumeral (x - 5)
_ | x == 4 -> "IV"
_ | x >= 1 -> "I" ++ toRomanNumeral (x - 1)
_ -> ""
-- | Escape whitespace and some punctuation characters in URI.
escapeURI :: String -> String
escapeURI = escapeURIString (not . needsEscaping)
where needsEscaping c = isSpace c || c `elem`
['<','>','|','"','{','}','[',']','^', '`']
-- | Convert tabs to spaces and filter out DOS line endings.
-- Tabs will be preserved if tab stop is set to 0.
tabFilter :: Int -- ^ Tab stop
-> String -- ^ Input
-> String
tabFilter tabStop =
let go _ [] = ""
go _ ('\n':xs) = '\n' : go tabStop xs
go _ ('\r':'\n':xs) = '\n' : go tabStop xs
go _ ('\r':xs) = '\n' : go tabStop xs
go spsToNextStop ('\t':xs) =
if tabStop == 0
then '\t' : go tabStop xs
else replicate spsToNextStop ' ' ++ go tabStop xs
go 1 (x:xs) =
x : go tabStop xs
go spsToNextStop (x:xs) =
x : go (spsToNextStop - 1) xs
in go tabStop
--
-- Date/time
--
-- | Parse a date and convert (if possible) to "YYYY-MM-DD" format.
normalizeDate :: String -> Maybe String
normalizeDate s = fmap (formatTime defaultTimeLocale "%F")
(msum $ map (\fs -> parsetimeWith fs s) formats :: Maybe Day)
where parsetimeWith =
#if MIN_VERSION_time(1,5,0)
parseTimeM True defaultTimeLocale
#else
parseTime defaultTimeLocale
#endif
formats = ["%x","%m/%d/%Y", "%D","%F", "%d %b %Y",
"%d %B %Y", "%b. %d, %Y", "%B %d, %Y", "%Y"]
--
-- Pandoc block and inline list processing
--
-- | Generate infinite lazy list of markers for an ordered list,
-- depending on list attributes.
orderedListMarkers :: (Int, ListNumberStyle, ListNumberDelim) -> [String]
orderedListMarkers (start, numstyle, numdelim) =
let singleton c = [c]
nums = case numstyle of
DefaultStyle -> map show [start..]
Example -> map show [start..]
Decimal -> map show [start..]
UpperAlpha -> drop (start - 1) $ cycle $
map singleton ['A'..'Z']
LowerAlpha -> drop (start - 1) $ cycle $
map singleton ['a'..'z']
UpperRoman -> map toRomanNumeral [start..]
LowerRoman -> map (map toLower . toRomanNumeral) [start..]
inDelim str = case numdelim of
DefaultDelim -> str ++ "."
Period -> str ++ "."
OneParen -> str ++ ")"
TwoParens -> "(" ++ str ++ ")"
in map inDelim nums
-- | Normalize a list of inline elements: remove leading and trailing
-- @Space@ elements, collapse double @Space@s into singles, and
-- remove empty Str elements.
normalizeSpaces :: [Inline] -> [Inline]
normalizeSpaces = cleanup . dropWhile isSpaceOrEmpty
where cleanup [] = []
cleanup (Space:rest) = case dropWhile isSpaceOrEmpty rest of
[] -> []
(x:xs) -> Space : x : cleanup xs
cleanup ((Str ""):rest) = cleanup rest
cleanup (x:rest) = x : cleanup rest
isSpaceOrEmpty :: Inline -> Bool
isSpaceOrEmpty Space = True
isSpaceOrEmpty (Str "") = True
isSpaceOrEmpty _ = False
-- | Extract the leading and trailing spaces from inside an inline element
-- and place them outside the element.
extractSpaces :: (Inlines -> Inlines) -> Inlines -> Inlines
extractSpaces f is =
let contents = B.unMany is
left = case viewl contents of
(Space :< _) -> B.space
_ -> mempty
right = case viewr contents of
(_ :> Space) -> B.space
_ -> mempty in
(left <> f (B.trimInlines . B.Many $ contents) <> right)
-- | Normalize @Pandoc@ document, consolidating doubled 'Space's,
-- combining adjacent 'Str's and 'Emph's, remove 'Null's and
-- empty elements, etc.
normalize :: Pandoc -> Pandoc
normalize (Pandoc (Meta meta) blocks) =
Pandoc (Meta $ M.map go meta) (normalizeBlocks blocks)
where go (MetaInlines xs) = MetaInlines $ normalizeInlines xs
go (MetaBlocks xs) = MetaBlocks $ normalizeBlocks xs
go (MetaList ms) = MetaList $ map go ms
go (MetaMap m) = MetaMap $ M.map go m
go x = x
normalizeBlocks :: [Block] -> [Block]
normalizeBlocks (Null : xs) = normalizeBlocks xs
normalizeBlocks (Div attr bs : xs) =
Div attr (normalizeBlocks bs) : normalizeBlocks xs
normalizeBlocks (BlockQuote bs : xs) =
case normalizeBlocks bs of
[] -> normalizeBlocks xs
bs' -> BlockQuote bs' : normalizeBlocks xs
normalizeBlocks (BulletList [] : xs) = normalizeBlocks xs
normalizeBlocks (BulletList items : xs) =
BulletList (map normalizeBlocks items) : normalizeBlocks xs
normalizeBlocks (OrderedList _ [] : xs) = normalizeBlocks xs
normalizeBlocks (OrderedList attr items : xs) =
OrderedList attr (map normalizeBlocks items) : normalizeBlocks xs
normalizeBlocks (DefinitionList [] : xs) = normalizeBlocks xs
normalizeBlocks (DefinitionList items : xs) =
DefinitionList (map go items) : normalizeBlocks xs
where go (ils, bs) = (normalizeInlines ils, map normalizeBlocks bs)
normalizeBlocks (RawBlock _ "" : xs) = normalizeBlocks xs
normalizeBlocks (RawBlock f x : xs) =
case normalizeBlocks xs of
(RawBlock f' x' : rest) | f' == f ->
RawBlock f (x ++ ('\n':x')) : rest
rest -> RawBlock f x : rest
normalizeBlocks (Para ils : xs) =
case normalizeInlines ils of
[] -> normalizeBlocks xs
ils' -> Para ils' : normalizeBlocks xs
normalizeBlocks (Plain ils : xs) =
case normalizeInlines ils of
[] -> normalizeBlocks xs
ils' -> Plain ils' : normalizeBlocks xs
normalizeBlocks (Header lev attr ils : xs) =
Header lev attr (normalizeInlines ils) : normalizeBlocks xs
normalizeBlocks (Table capt aligns widths hdrs rows : xs) =
Table (normalizeInlines capt) aligns widths
(map normalizeBlocks hdrs) (map (map normalizeBlocks) rows)
: normalizeBlocks xs
normalizeBlocks (x:xs) = x : normalizeBlocks xs
normalizeBlocks [] = []
normalizeInlines :: [Inline] -> [Inline]
normalizeInlines (Str x : ys) =
case concat (x : map fromStr strs) of
"" -> rest
n -> Str n : rest
where
(strs, rest) = span isStr $ normalizeInlines ys
isStr (Str _) = True
isStr _ = False
fromStr (Str z) = z
fromStr _ = error "normalizeInlines - fromStr - not a Str"
normalizeInlines (Space : ys) =
if null rest
then []
else Space : rest
where isSp Space = True
isSp _ = False
rest = dropWhile isSp $ normalizeInlines ys
normalizeInlines (Emph xs : zs) =
case normalizeInlines zs of
(Emph ys : rest) -> normalizeInlines $
Emph (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> Emph xs' : rest
normalizeInlines (Strong xs : zs) =
case normalizeInlines zs of
(Strong ys : rest) -> normalizeInlines $
Strong (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> Strong xs' : rest
normalizeInlines (Subscript xs : zs) =
case normalizeInlines zs of
(Subscript ys : rest) -> normalizeInlines $
Subscript (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> Subscript xs' : rest
normalizeInlines (Superscript xs : zs) =
case normalizeInlines zs of
(Superscript ys : rest) -> normalizeInlines $
Superscript (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> Superscript xs' : rest
normalizeInlines (SmallCaps xs : zs) =
case normalizeInlines zs of
(SmallCaps ys : rest) -> normalizeInlines $
SmallCaps (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> SmallCaps xs' : rest
normalizeInlines (Strikeout xs : zs) =
case normalizeInlines zs of
(Strikeout ys : rest) -> normalizeInlines $
Strikeout (normalizeInlines $ xs ++ ys) : rest
rest -> case normalizeInlines xs of
[] -> rest
xs' -> Strikeout xs' : rest
normalizeInlines (RawInline _ [] : ys) = normalizeInlines ys
normalizeInlines (RawInline f xs : zs) =
case normalizeInlines zs of
(RawInline f' ys : rest) | f == f' -> normalizeInlines $
RawInline f (xs ++ ys) : rest
rest -> RawInline f xs : rest
normalizeInlines (Code _ "" : ys) = normalizeInlines ys
normalizeInlines (Code attr xs : zs) =
case normalizeInlines zs of
(Code attr' ys : rest) | attr == attr' -> normalizeInlines $
Code attr (xs ++ ys) : rest
rest -> Code attr xs : rest
-- allow empty spans, they may carry identifiers etc.
-- normalizeInlines (Span _ [] : ys) = normalizeInlines ys
normalizeInlines (Span attr xs : zs) =
case normalizeInlines zs of
(Span attr' ys : rest) | attr == attr' -> normalizeInlines $
Span attr (normalizeInlines $ xs ++ ys) : rest
rest -> Span attr (normalizeInlines xs) : rest
normalizeInlines (Note bs : ys) = Note (normalizeBlocks bs) :
normalizeInlines ys
normalizeInlines (Quoted qt ils : ys) =
Quoted qt (normalizeInlines ils) : normalizeInlines ys
normalizeInlines (Link ils t : ys) =
Link (normalizeInlines ils) t : normalizeInlines ys
normalizeInlines (Image ils t : ys) =
Image (normalizeInlines ils) t : normalizeInlines ys
normalizeInlines (Cite cs ils : ys) =
Cite cs (normalizeInlines ils) : normalizeInlines ys
normalizeInlines (x : xs) = x : normalizeInlines xs
normalizeInlines [] = []
-- | Extract inlines, removing formatting.
removeFormatting :: Walkable Inline a => a -> [Inline]
removeFormatting = query go . walk deNote
where go :: Inline -> [Inline]
go (Str xs) = [Str xs]
go Space = [Space]
go (Code _ x) = [Str x]
go (Math _ x) = [Str x]
go LineBreak = [Space]
go _ = []
deNote (Note _) = Str ""
deNote x = x
-- | Convert pandoc structure to a string with formatting removed.
-- Footnotes are skipped (since we don't want their contents in link
-- labels).
stringify :: Walkable Inline a => a -> String
stringify = query go . walk deNote
where go :: Inline -> [Char]
go Space = " "
go (Str x) = x
go (Code _ x) = x
go (Math _ x) = x
go (RawInline (Format "html") ('<':'b':'r':_)) = " " -- see #2105
go LineBreak = " "
go _ = ""
deNote (Note _) = Str ""
deNote x = x
-- | Bring all regular text in a pandoc structure to uppercase.
--
-- This function correctly handles cases where a lowercase character doesn't
-- match to a single uppercase character – e.g. “Straße” would be converted
-- to “STRASSE”, not “STRAßE”.
capitalize :: Walkable Inline a => a -> a
capitalize = walk go
where go :: Inline -> Inline
go (Str s) = Str (T.unpack $ T.toUpper $ T.pack s)
go x = x
-- | Change final list item from @Para@ to @Plain@ if the list contains
-- no other @Para@ blocks.
compactify :: [[Block]] -- ^ List of list items (each a list of blocks)
-> [[Block]]
compactify [] = []
compactify items =
case (init items, last items) of
(_,[]) -> items
(others, final) ->
case last final of
Para a -> case (filter isPara $ concat items) of
-- if this is only Para, change to Plain
[_] -> others ++ [init final ++ [Plain a]]
_ -> items
_ -> items
-- | Change final list item from @Para@ to @Plain@ if the list contains
-- no other @Para@ blocks. Like compactify, but operates on @Blocks@ rather
-- than @[Block]@.
compactify' :: [Blocks] -- ^ List of list items (each a list of blocks)
-> [Blocks]
compactify' [] = []
compactify' items =
let (others, final) = (init items, last items)
in case reverse (B.toList final) of
(Para a:xs) -> case [Para x | Para x <- concatMap B.toList items] of
-- if this is only Para, change to Plain
[_] -> others ++ [B.fromList (reverse $ Plain a : xs)]
_ -> items
_ -> items
-- | Like @compactify'@, but acts on items of definition lists.
compactify'DL :: [(Inlines, [Blocks])] -> [(Inlines, [Blocks])]
compactify'DL items =
let defs = concatMap snd items
in case reverse (concatMap B.toList defs) of
(Para x:xs)
| not (any isPara xs) ->
let (t,ds) = last items
lastDef = B.toList $ last ds
ds' = init ds ++
if null lastDef
then [B.fromList lastDef]
else [B.fromList $ init lastDef ++ [Plain x]]
in init items ++ [(t, ds')]
| otherwise -> items
_ -> items
isPara :: Block -> Bool
isPara (Para _) = True
isPara _ = False
-- | Data structure for defining hierarchical Pandoc documents
data Element = Blk Block
| Sec Int [Int] Attr [Inline] [Element]
-- lvl num attributes label contents
deriving (Eq, Read, Show, Typeable, Data)
instance Walkable Inline Element where
walk f (Blk x) = Blk (walk f x)
walk f (Sec lev nums attr ils elts) = Sec lev nums attr (walk f ils) (walk f elts)
walkM f (Blk x) = Blk `fmap` walkM f x
walkM f (Sec lev nums attr ils elts) = do
ils' <- walkM f ils
elts' <- walkM f elts
return $ Sec lev nums attr ils' elts'
query f (Blk x) = query f x
query f (Sec _ _ _ ils elts) = query f ils <> query f elts
instance Walkable Block Element where
walk f (Blk x) = Blk (walk f x)
walk f (Sec lev nums attr ils elts) = Sec lev nums attr (walk f ils) (walk f elts)
walkM f (Blk x) = Blk `fmap` walkM f x
walkM f (Sec lev nums attr ils elts) = do
ils' <- walkM f ils
elts' <- walkM f elts
return $ Sec lev nums attr ils' elts'
query f (Blk x) = query f x
query f (Sec _ _ _ ils elts) = query f ils <> query f elts
-- | Convert Pandoc inline list to plain text identifier. HTML
-- identifiers must start with a letter, and may contain only
-- letters, digits, and the characters _-.
inlineListToIdentifier :: [Inline] -> String
inlineListToIdentifier =
dropWhile (not . isAlpha) . intercalate "-" . words .
map (nbspToSp . toLower) .
filter (\c -> isLetter c || isDigit c || c `elem` "_-. ") .
stringify
where nbspToSp '\160' = ' '
nbspToSp x = x
-- | Convert list of Pandoc blocks into (hierarchical) list of Elements
hierarchicalize :: [Block] -> [Element]
hierarchicalize blocks = S.evalState (hierarchicalizeWithIds blocks) []
hierarchicalizeWithIds :: [Block] -> S.State [Int] [Element]
hierarchicalizeWithIds [] = return []
hierarchicalizeWithIds ((Header level attr@(_,classes,_) title'):xs) = do
lastnum <- S.get
let lastnum' = take level lastnum
let newnum = case length lastnum' of
x | "unnumbered" `elem` classes -> []
| x >= level -> init lastnum' ++ [last lastnum' + 1]
| otherwise -> lastnum ++
replicate (level - length lastnum - 1) 0 ++ [1]
unless (null newnum) $ S.put newnum
let (sectionContents, rest) = break (headerLtEq level) xs
sectionContents' <- hierarchicalizeWithIds sectionContents
rest' <- hierarchicalizeWithIds rest
return $ Sec level newnum attr title' sectionContents' : rest'
hierarchicalizeWithIds ((Div ("",["references"],[])
(Header level (ident,classes,kvs) title' : xs)):ys) =
hierarchicalizeWithIds ((Header level (ident,("references":classes),kvs)
title') : (xs ++ ys))
hierarchicalizeWithIds (x:rest) = do
rest' <- hierarchicalizeWithIds rest
return $ (Blk x) : rest'
headerLtEq :: Int -> Block -> Bool
headerLtEq level (Header l _ _) = l <= level
headerLtEq level (Div ("",["references"],[]) (Header l _ _ : _)) = l <= level
headerLtEq _ _ = False
-- | Generate a unique identifier from a list of inlines.
-- Second argument is a list of already used identifiers.
uniqueIdent :: [Inline] -> [String] -> String
uniqueIdent title' usedIdents
= let baseIdent = case inlineListToIdentifier title' of
"" -> "section"
x -> x
numIdent n = baseIdent ++ "-" ++ show n
in if baseIdent `elem` usedIdents
then case find (\x -> numIdent x `notElem` usedIdents) ([1..60000] :: [Int]) of
Just x -> numIdent x
Nothing -> baseIdent -- if we have more than 60,000, allow repeats
else baseIdent
-- | True if block is a Header block.
isHeaderBlock :: Block -> Bool
isHeaderBlock (Header _ _ _) = True
isHeaderBlock _ = False
-- | Shift header levels up or down.
headerShift :: Int -> Pandoc -> Pandoc
headerShift n = walk shift
where shift :: Block -> Block
shift (Header level attr inner) = Header (level + n) attr inner
shift x = x
-- | Detect if a list is tight.
isTightList :: [[Block]] -> Bool
isTightList = all firstIsPlain
where firstIsPlain (Plain _ : _) = True
firstIsPlain _ = False
-- | Set a field of a 'Meta' object. If the field already has a value,
-- convert it into a list with the new value appended to the old value(s).
addMetaField :: ToMetaValue a
=> String
-> a
-> Meta
-> Meta
addMetaField key val (Meta meta) =
Meta $ M.insertWith combine key (toMetaValue val) meta
where combine newval (MetaList xs) = MetaList (xs ++ tolist newval)
combine newval x = MetaList [x, newval]
tolist (MetaList ys) = ys
tolist y = [y]
-- | Create 'Meta' from old-style title, authors, date. This is
-- provided to ease the transition from the old API.
makeMeta :: [Inline] -> [[Inline]] -> [Inline] -> Meta
makeMeta title authors date =
addMetaField "title" (B.fromList title)
$ addMetaField "author" (map B.fromList authors)
$ addMetaField "date" (B.fromList date)
$ nullMeta
--
-- TagSoup HTML handling
--
-- | Render HTML tags.
renderTags' :: [Tag String] -> String
renderTags' = renderTagsOptions
renderOptions{ optMinimize = matchTags ["hr", "br", "img",
"meta", "link"]
, optRawTag = matchTags ["script", "style"] }
where matchTags = \tags -> flip elem tags . map toLower
--
-- File handling
--
-- | Perform an IO action in a directory, returning to starting directory.
inDirectory :: FilePath -> IO a -> IO a
inDirectory path action = E.bracket
getCurrentDirectory
setCurrentDirectory
(const $ setCurrentDirectory path >> action)
getDefaultReferenceDocx :: Maybe FilePath -> IO Archive
getDefaultReferenceDocx datadir = do
let paths = ["[Content_Types].xml",
"_rels/.rels",
"docProps/app.xml",
"docProps/core.xml",
"word/document.xml",
"word/fontTable.xml",
"word/footnotes.xml",
"word/numbering.xml",
"word/settings.xml",
"word/webSettings.xml",
"word/styles.xml",
"word/_rels/document.xml.rels",
"word/_rels/footnotes.xml.rels",
"word/theme/theme1.xml"]
let toLazy = fromChunks . (:[])
let pathToEntry path = do epochtime <- (floor . utcTimeToPOSIXSeconds) <$>
getCurrentTime
contents <- toLazy <$> readDataFile datadir
("docx/" ++ path)
return $ toEntry path epochtime contents
mbArchive <- case datadir of
Nothing -> return Nothing
Just d -> do
exists <- doesFileExist (d </> "reference.docx")
if exists
then return (Just (d </> "reference.docx"))
else return Nothing
case mbArchive of
Just arch -> toArchive <$> BL.readFile arch
Nothing -> foldr addEntryToArchive emptyArchive <$>
mapM pathToEntry paths
getDefaultReferenceODT :: Maybe FilePath -> IO Archive
getDefaultReferenceODT datadir = do
let paths = ["mimetype",
"manifest.rdf",
"styles.xml",
"content.xml",
"meta.xml",
"settings.xml",
"Configurations2/accelerator/current.xml",
"Thumbnails/thumbnail.png",
"META-INF/manifest.xml"]
let pathToEntry path = do epochtime <- floor `fmap` getPOSIXTime
contents <- (fromChunks . (:[])) `fmap`
readDataFile datadir ("odt/" ++ path)
return $ toEntry path epochtime contents
mbArchive <- case datadir of
Nothing -> return Nothing
Just d -> do
exists <- doesFileExist (d </> "reference.odt")
if exists
then return (Just (d </> "reference.odt"))
else return Nothing
case mbArchive of
Just arch -> toArchive <$> BL.readFile arch
Nothing -> foldr addEntryToArchive emptyArchive <$>
mapM pathToEntry paths
readDefaultDataFile :: FilePath -> IO BS.ByteString
readDefaultDataFile "reference.docx" =
(BS.concat . toChunks . fromArchive) <$> getDefaultReferenceDocx Nothing
readDefaultDataFile "reference.odt" =
(BS.concat . toChunks . fromArchive) <$> getDefaultReferenceODT Nothing
readDefaultDataFile fname =
#ifdef EMBED_DATA_FILES
case lookup (makeCanonical fname) dataFiles of
Nothing -> err 97 $ "Could not find data file " ++ fname
Just contents -> return contents
where makeCanonical = Posix.joinPath . transformPathParts . splitDirectories
transformPathParts = reverse . foldl go []
go as "." = as
go (_:as) ".." = as
go as x = x : as
#else
getDataFileName fname' >>= checkExistence >>= BS.readFile
where fname' = if fname == "README" then fname else "data" </> fname
checkExistence :: FilePath -> IO FilePath
checkExistence fn = do
exists <- doesFileExist fn
if exists
then return fn
else err 97 ("Could not find data file " ++ fn)
#endif
-- | Read file from specified user data directory or, if not found there, from
-- Cabal data directory.
readDataFile :: Maybe FilePath -> FilePath -> IO BS.ByteString
readDataFile Nothing fname = readDefaultDataFile fname
readDataFile (Just userDir) fname = do
exists <- doesFileExist (userDir </> fname)
if exists
then BS.readFile (userDir </> fname)
else readDefaultDataFile fname
-- | Same as 'readDataFile' but returns a String instead of a ByteString.
readDataFileUTF8 :: Maybe FilePath -> FilePath -> IO String
readDataFileUTF8 userDir fname =
UTF8.toString `fmap` readDataFile userDir fname
-- | Fetch an image or other item from the local filesystem or the net.
-- Returns raw content and maybe mime type.
fetchItem :: Maybe String -> String
-> IO (Either E.SomeException (BS.ByteString, Maybe MimeType))
fetchItem sourceURL s =
case (sourceURL >>= parseURIReference . ensureEscaped, ensureEscaped s) of
(_, s') | isURI s' -> openURL s'
(Just u, s') -> -- try fetching from relative path at source
case parseURIReference s' of
Just u' -> openURL $ show $ u' `nonStrictRelativeTo` u
Nothing -> openURL s' -- will throw error
(Nothing, _) -> E.try readLocalFile -- get from local file system
where readLocalFile = do
cont <- BS.readFile fp
return (cont, mime)
dropFragmentAndQuery = takeWhile (\c -> c /= '?' && c /= '#')
fp = unEscapeString $ dropFragmentAndQuery s
mime = case takeExtension fp of
".gz" -> getMimeType $ dropExtension fp
".svgz" -> getMimeType $ dropExtension fp ++ ".svg"
x -> getMimeType x
ensureEscaped x@(_:':':'\\':_) = x -- likely windows path
ensureEscaped x = escapeURIString isAllowedInURI x
-- | Like 'fetchItem', but also looks for items in a 'MediaBag'.
fetchItem' :: MediaBag -> Maybe String -> String
-> IO (Either E.SomeException (BS.ByteString, Maybe MimeType))
fetchItem' media sourceURL s = do
case lookupMedia s media of
Nothing -> fetchItem sourceURL s
Just (mime, bs) -> return $ Right (BS.concat $ toChunks bs, Just mime)
-- | Read from a URL and return raw data and maybe mime type.
openURL :: String -> IO (Either E.SomeException (BS.ByteString, Maybe MimeType))
openURL u
| Just u'' <- stripPrefix "data:" u =
let mime = takeWhile (/=',') u''
contents = B8.pack $ unEscapeString $ drop 1 $ dropWhile (/=',') u''
in return $ Right (decodeLenient contents, Just mime)
#ifdef HTTP_CLIENT
| otherwise = withSocketsDo $ E.try $ do
req <- parseUrl u
(proxy :: Either E.SomeException String) <- E.try $ getEnv "http_proxy"
let req' = case proxy of
Left _ -> req
Right pr -> case parseUrl pr of
Just r -> addProxy (host r) (port r) req
Nothing -> req
#if MIN_VERSION_http_client(0,4,18)
resp <- newManager tlsManagerSettings >>= httpLbs req'
#else
resp <- withManager tlsManagerSettings $ httpLbs req'
#endif
return (BS.concat $ toChunks $ responseBody resp,
UTF8.toString `fmap` lookup hContentType (responseHeaders resp))
#else
| otherwise = E.try $ getBodyAndMimeType `fmap` browse
(do S.liftIO $ UTF8.hPutStrLn stderr $ "Fetching " ++ u ++ "..."
setOutHandler $ const (return ())
setAllowRedirects True
request (getRequest' u'))
where getBodyAndMimeType (_, r) = (rspBody r, findHeader HdrContentType r)
getRequest' uriString = case parseURI uriString of
Nothing -> error ("Not a valid URL: " ++
uriString)
Just v -> mkRequest GET v
u' = escapeURIString (/= '|') u -- pipes are rejected by Network.URI
#endif
--
-- Error reporting
--
err :: Int -> String -> IO a
err exitCode msg = do
name <- getProgName
UTF8.hPutStrLn stderr $ name ++ ": " ++ msg
exitWith $ ExitFailure exitCode
return undefined
warn :: String -> IO ()
warn msg = do
name <- getProgName
UTF8.hPutStrLn stderr $ name ++ ": " ++ msg
mapLeft :: (a -> b) -> Either a c -> Either b c
mapLeft f (Left x) = Left (f x)
mapLeft _ (Right x) = Right x
hush :: Either a b -> Maybe b
hush (Left _) = Nothing
hush (Right x) = Just x
-- | Remove intermediate "." and ".." directories from a path.
--
-- > collapseFilePath "./foo" == "foo"
-- > collapseFilePath "/bar/../baz" == "/baz"
-- > collapseFilePath "/../baz" == "/../baz"
-- > collapseFilePath "parent/foo/baz/../bar" == "parent/foo/bar"
-- > collapseFilePath "parent/foo/baz/../../bar" == "parent/bar"
-- > collapseFilePath "parent/foo/.." == "parent"
-- > collapseFilePath "/parent/foo/../../bar" == "/bar"
collapseFilePath :: FilePath -> FilePath
collapseFilePath = Posix.joinPath . reverse . foldl go [] . splitDirectories
where
go rs "." = rs
go r@(p:rs) ".." = case p of
".." -> ("..":r)
(checkPathSeperator -> Just True) -> ("..":r)
_ -> rs
go _ (checkPathSeperator -> Just True) = [[Posix.pathSeparator]]
go rs x = x:rs
isSingleton [] = Nothing
isSingleton [x] = Just x
isSingleton _ = Nothing
checkPathSeperator = fmap isPathSeparator . isSingleton
--
-- Safe read
--
safeRead :: (MonadPlus m, Read a) => String -> m a
safeRead s = case reads s of
(d,x):_
| all isSpace x -> return d
_ -> mzero
--
-- Temp directory
--
withTempDir :: String -> (FilePath -> IO a) -> IO a
withTempDir =
#ifdef _WINDOWS
withTempDirectory "."
#else
withSystemTempDirectory
#endif
| alexvong1995/pandoc | src/Text/Pandoc/Shared.hs | gpl-2.0 | 39,693 | 687 | 16 | 12,375 | 10,357 | 5,593 | 4,764 | 731 | 21 |
-- Copyright (c) 2010 - Seweryn Dynerowicz
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- imitations under the License.
module Algebra.Products
( Lexico(..)
, lexico
, Direct(..)
, direct
) where
import Algebra.Matrix
import Algebra.Semiring
data Lexico s t = Lex (s,t)
deriving (Eq)
data Direct s t = Dir (s,t)
deriving (Eq)
instance (Show s, Show t) => Show (Lexico s t) where
show (Lex l) = show l
instance (Show s, Show t) => Show (Direct s t) where
show (Dir d) = show d
instance (Semiring s, Semiring t) => Semiring (Lexico s t) where
addId = Lex (addId, addId)
mulId = Lex (mulId, mulId)
add (Lex (s1,t1)) (Lex (s2,t2))
| (addS == s1 && addS == s2) = Lex (addS,add t1 t2)
| (addS == s1) = Lex (s1,t1)
| (addS == s2) = Lex (s2,t2)
| otherwise = Lex (addS, addId)
where addS = add s1 s2
mul (Lex (s1, t1)) (Lex (s2, t2)) = Lex (mul s1 s2, mul t1 t2)
lexico :: (Semiring s, Semiring t) => Matrix s -> Matrix t -> Matrix (Lexico s t)
lexico as bs | (order as) == (order bs) = pointwise as bs zipL
| otherwise = error "Incompatibles matrice sizes"
zipL :: s -> t -> Lexico s t
zipL s t = Lex (s, t)
instance (Semiring s, Semiring t) => Semiring (Direct s t) where
addId = Dir (addId, addId)
mulId = Dir (mulId, mulId)
add (Dir (s1, t1)) (Dir (s2, t2)) = Dir (add s1 s2, add t1 t2)
mul (Dir (s1, t1)) (Dir (s2, t2)) = Dir (mul s1 s2, mul t1 t2)
direct :: (Semiring s, Semiring t) => Matrix s -> Matrix t -> Matrix (Direct s t)
direct as bs
| (order as) == (order bs)
= pointwise as bs zipD
| otherwise
= error "Incompatibles matrice sizes"
zipD :: s -> t -> Direct s t
zipD s t = Dir (s, t)
| sdynerow/Semirings-Library | haskell/Algebra/Constructs/pouet.hs | apache-2.0 | 2,174 | 1 | 12 | 512 | 887 | 468 | 419 | 43 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | Helpers for testing
module Tests.Helpers (
-- * helpers
T(..)
, typeName
, Double01(..)
-- * IEEE 754
, isDenorm
-- * Generic QC tests
, monotonicallyIncreases
, monotonicallyIncreasesIEEE
-- * HUnit helpers
, testAssertion
, testEquality
-- * QC helpers
, small
, unsquare
, shrinkFixedList
) where
import Data.Typeable
import Numeric.MathFunctions.Constants (m_tiny)
import Test.Tasty
import Test.Tasty.HUnit
import Test.QuickCheck
import qualified Numeric.IEEE as IEEE
import qualified Test.Tasty.HUnit as HU
-- | Phantom typed value used to select right instance in QC tests
data T a = T
-- | String representation of type name
typeName :: Typeable a => T a -> String
typeName = show . typeOf . typeParam
where
typeParam :: T a -> a
typeParam _ = undefined
-- | Check if Double denormalized
isDenorm :: Double -> Bool
isDenorm x = let ax = abs x in ax > 0 && ax < m_tiny
-- | Generates Doubles in range [0,1]
newtype Double01 = Double01 Double
deriving (Show)
instance Arbitrary Double01 where
arbitrary = do
(_::Int, x) <- fmap properFraction arbitrary
return $ Double01 x
----------------------------------------------------------------
-- Generic QC
----------------------------------------------------------------
-- Check that function is nondecreasing
monotonicallyIncreases :: (Ord a, Ord b) => (a -> b) -> a -> a -> Bool
monotonicallyIncreases f x1 x2 = f (min x1 x2) <= f (max x1 x2)
-- Check that function is nondecreasing taking rounding errors into
-- account.
--
-- In fact funstion is allowed to decrease less than one ulp in order
-- to guard againist problems with excess precision. On x86 FPU works
-- with 80-bit numbers but doubles are 64-bit so rounding happens
-- whenever values are moved from registers to memory
monotonicallyIncreasesIEEE :: (Ord a, IEEE.IEEE b) => (a -> b) -> a -> a -> Bool
monotonicallyIncreasesIEEE f x1 x2 =
y1 <= y2 || (y1 - y2) < y2 * IEEE.epsilon
where
y1 = f (min x1 x2)
y2 = f (max x1 x2)
----------------------------------------------------------------
-- HUnit helpers
----------------------------------------------------------------
testAssertion :: String -> Bool -> TestTree
testAssertion str cont = testCase str $ HU.assertBool str cont
testEquality :: (Show a, Eq a) => String -> a -> a -> TestTree
testEquality msg a b = testCase msg $ HU.assertEqual msg a b
unsquare :: (Arbitrary a, Show a, Testable b) => (a -> b) -> Property
unsquare = forAll (small arbitrary)
small :: Gen a -> Gen a
small act = sized $ \n -> resize (smallish n) act
where smallish = round . (sqrt :: Double -> Double) . fromIntegral . abs
shrinkFixedList :: (a -> [a]) -> [a] -> [[a]]
shrinkFixedList shr (x:xs) = map (:xs) (shr x) ++ map (x:) (shrinkFixedList shr xs)
shrinkFixedList _ [] = []
| bos/statistics | tests/Tests/Helpers.hs | bsd-2-clause | 2,897 | 0 | 11 | 582 | 796 | 438 | 358 | 52 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Main (main) where
import Control.Monad
import Data.Bits
import Data.Word (Word8, Word64)
import Data.List (unfoldr)
import Succinct.Internal.Broadword
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.Framework.TH
import Test.HUnit hiding (Test, assert)
case_select_shifted_ones :: IO ()
case_select_shifted_ones =
forM_ [0..63] $ \i ->
assertEqual ("for index " ++ show i) i
(selectWord64 (shiftL 1 i) 0)
listBytes :: Word64 -> [Word8]
listBytes = take 8 . unfoldr (\x -> Just (fromIntegral x, shiftR x 8))
unlistBytes :: [Word8] -> Word64
unlistBytes = foldr1 (\x w -> x .|. shiftL w 8) . map fromIntegral
referenceByteCounts :: Word64 -> Word64
referenceByteCounts = unlistBytes . map (fromIntegral . popCount) . listBytes
prop_byteCounts w =
referenceByteCounts w == byteCounts w
main :: IO ()
main = $defaultMainGenerator
| Gabriel439/succinct | tests/selectWordTest.hs | bsd-2-clause | 982 | 0 | 11 | 150 | 313 | 172 | 141 | 27 | 1 |
{-|
Module : Data.STM.PriorityQueue.Internal.ListPQ
Description : STM-based Concurrent Priority Queue data structure class implementation
Copyright : (c) Alex Semin, 2015
License : BSD3
Maintainer : alllex.semin@gmail.com
Stability : experimental
Portability : portable
An implementation of 'Data.STM.PriorityQueue.Class' based on
__coarse-grained__ list of key-value pairs.
__WARNING__: this implementation has specific time complexity of operations.
Insertion is slower: /O(n)/. Deletion is faster: /O(1)/.
-}
module Data.STM.PriorityQueue.Internal.ListPQ(
ListPQ
) where
import Control.Concurrent.STM
import Data.STM.PriorityQueue.Class
data ListPQ k v = PQ (TVar [(k, v)])
pqNew :: STM (ListPQ k v)
pqNew = PQ `fmap` newTVar []
pqInsert :: (Ord k) => ListPQ k v -> k -> v -> STM ()
pqInsert (PQ xsv) k v =
do
xs <- readTVar xsv
writeTVar xsv $ push xs
where
push [] = [(k, v)]
push (p@(k', _):xs) | k >= k' = p : push xs
| otherwise = (k, v):p:xs
pqPeekMin :: ListPQ k v -> STM v
pqPeekMin (PQ xsv) = do
xs <- readTVar xsv
case xs of
[] -> retry
((_, v):_) -> return v
pqDeleteMin :: ListPQ k v -> STM v
pqDeleteMin (PQ xsv) = do
xs <- readTVar xsv
case xs of
[] -> retry
((_, v):xs') -> do
writeTVar xsv xs'
return v
instance PriorityQueue ListPQ where
new = pqNew
insert = pqInsert
peekMin = pqPeekMin
deleteMin = pqDeleteMin
| Alllex/stm-data-collection | src/Data/STM/PriorityQueue/Internal/ListPQ.hs | bsd-3-clause | 1,530 | 0 | 12 | 419 | 443 | 230 | 213 | 34 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- From github package: https://hackage.haskell.org/package/github
-- Copyright 2011-2013 Mike Burns, 2013-2015 John Wiegley, 2016-2020 Oleg Grenrus
--
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- * Redistributions in binary form must reproduce the above
-- copyright notice, this list of conditions and the following
-- disclaimer in the documentation and/or other materials provided
-- with the distribution.
--
-- * Neither the name of Mike Burns nor the names of other
-- contributors may be used to endorse or promote products derived
-- from this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module GitHub where
import Prelude.Compat
import Control.DeepSeq (NFData (..))
import Data.Text (Text)
import Data.Time (UTCTime)
import Data.Vector (Vector)
import qualified Data.Text as T
import Data.Aeson
-------------------------------------------------------------------------------
-- Tags
-------------------------------------------------------------------------------
data User
-------------------------------------------------------------------------------
-- Id
-------------------------------------------------------------------------------
newtype Id entity = Id Int
deriving (Show)
untagId :: Id entity -> Int
untagId (Id name) = name
instance NFData (Id entity) where
rnf (Id s) = rnf s
instance FromJSON (Id entity) where
parseJSON = fmap Id . parseJSON
instance ToJSON (Id entity) where
toJSON = toJSON . untagId
-------------------------------------------------------------------------------
-- Name
-------------------------------------------------------------------------------
newtype Name entity = N Text
deriving (Show)
untagName :: Name entity -> Text
untagName (N name) = name
instance NFData (Name entity) where
rnf (N s) = rnf s
instance FromJSON (Name entity) where
parseJSON = fmap N . parseJSON
instance ToJSON (Name entity) where
toJSON = toJSON . untagName
-------------------------------------------------------------------------------
-- SimpleUser
-------------------------------------------------------------------------------
data SimpleUser = SimpleUser
{ simpleUserId :: !(Id User)
, simpleUserLogin :: !(Name User)
, simpleUserAvatarUrl :: !URL
, simpleUserUrl :: !URL
}
deriving (Show)
instance NFData SimpleUser where
rnf x = x `seq` ()
instance FromJSON SimpleUser where
parseJSON = withObject "SimpleUser" $ \obj -> SimpleUser
<$> obj .: "id"
<*> obj .: "login"
<*> obj .: "avatar_url"
<*> obj .: "url"
-------------------------------------------------------------------------------
-- URL
-------------------------------------------------------------------------------
newtype URL = URL Text
deriving (Show)
instance NFData URL where
rnf (URL t) = rnf t
instance FromJSON URL where
parseJSON = withText "URL" (pure . URL)
-------------------------------------------------------------------------------
-- IssueState
-------------------------------------------------------------------------------
-- | 'GitHub.Data.Issues.Issue' or 'GitHub.Data.PullRequests.PullRequest' state
data IssueState
= StateOpen
| StateClosed
deriving (Show)
instance ToJSON IssueState where
toJSON StateOpen = String "open"
toJSON StateClosed = String "closed"
instance FromJSON IssueState where
parseJSON = withText "IssueState" $ \t -> case T.toLower t of
"open" -> pure StateOpen
"closed" -> pure StateClosed
_ -> fail $ "Unknown IssueState: " <> T.unpack t
instance NFData IssueState where rnf x = x `seq` ()
-------------------------------------------------------------------------------
-- IssueNumber
-------------------------------------------------------------------------------
newtype IssueNumber = IssueNumber Int
deriving (Show)
unIssueNumber :: IssueNumber -> Int
unIssueNumber (IssueNumber i) = i
instance NFData IssueNumber where
rnf (IssueNumber s) = rnf s
instance FromJSON IssueNumber where
parseJSON = fmap IssueNumber . parseJSON
instance ToJSON IssueNumber where
toJSON = toJSON . unIssueNumber
-------------------------------------------------------------------------------
-- IssueLabel
-------------------------------------------------------------------------------
data IssueLabel = IssueLabel
{ labelColor :: !Text
, labelUrl :: !URL
, labelName :: !(Name IssueLabel)
, labelDesc :: !(Maybe Text)
}
deriving (Show)
instance NFData IssueLabel where
rnf IssueLabel {..} = rnf labelDesc
instance FromJSON IssueLabel where
parseJSON = withObject "IssueLabel" $ \o -> IssueLabel
<$> o .: "color"
<*> o .:? "url" .!= URL "" -- in events there aren't URL
<*> o .: "name"
<*> o .:? "description"
-------------------------------------------------------------------------------
-- PullRequestReference
-------------------------------------------------------------------------------
data PullRequestReference = PullRequestReference
{ pullRequestReferenceHtmlUrl :: !(Maybe URL)
, pullRequestReferencePatchUrl :: !(Maybe URL)
, pullRequestReferenceDiffUrl :: !(Maybe URL)
}
deriving (Show)
instance NFData PullRequestReference where
rnf PullRequestReference {..} =
rnf pullRequestReferenceHtmlUrl `seq`
rnf pullRequestReferencePatchUrl `seq`
rnf pullRequestReferenceDiffUrl
instance FromJSON PullRequestReference where
parseJSON = withObject "PullRequestReference" $ \o -> PullRequestReference
<$> o .:? "html_url"
<*> o .:? "patch_url"
<*> o .:? "diff_url"
-------------------------------------------------------------------------------
-- Issue
-------------------------------------------------------------------------------
data Issue = Issue
{ issueClosedAt :: !(Maybe UTCTime)
, issueUpdatedAt :: !UTCTime
, issueEventsUrl :: !URL
, issueHtmlUrl :: !(Maybe URL)
, issueClosedBy :: !(Maybe SimpleUser)
, issueLabels :: !(Vector IssueLabel)
, issueNumber :: !IssueNumber
, issueAssignees :: !(Vector SimpleUser)
, issueUser :: !SimpleUser
, issueTitle :: !Text
, issuePullRequest :: !(Maybe PullRequestReference)
, issueUrl :: !URL
, issueCreatedAt :: !UTCTime
, issueBody :: !(Maybe Text)
, issueState :: !IssueState
, issueId :: !(Id Issue)
, issueComments :: !Int
-- , issueMilestone :: !(Maybe Milestone)
}
deriving (Show)
instance NFData Issue where
rnf Issue {..} =
rnf issueClosedAt `seq`
rnf issueUpdatedAt
instance FromJSON Issue where
parseJSON = withObject "Issue" $ \o -> Issue
<$> o .:? "closed_at"
<*> o .: "updated_at"
<*> o .: "events_url"
<*> o .: "html_url"
<*> o .:? "closed_by"
<*> o .: "labels"
<*> o .: "number"
<*> o .: "assignees"
<*> o .: "user"
<*> o .: "title"
<*> o .:? "pull_request"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "state"
<*> o .: "id"
<*> o .: "comments"
-- <*> o .:? "milestone"
| dmjio/aeson | examples/src/GitHub.hs | bsd-3-clause | 8,596 | 0 | 41 | 1,815 | 1,496 | 820 | 676 | -1 | -1 |
--------------------------------------------------------------------
-- |
-- Module : Flickr.Contacts
-- Description : flickr.contacts - fetch user's contact list.
-- Copyright : (c) Sigbjorn Finne, 2008
-- License : BSD3
--
-- Maintainer : Sigbjorn Finne <sof@forkIO.com>
-- Stability : provisional
-- Portability : portable
--
-- flickr.contacts API, fetching a user's contact list.
--------------------------------------------------------------------
module Flickr.Contacts where
import Flickr.Monad
import Flickr.Types
import Flickr.Types.Import
import Data.Maybe (maybeToList)
-- | Get a list of contacts for the calling user.
getList :: Maybe Filter -> FM [Contact]
getList f = withReadPerm $
flickTranslate toContactList $
flickCall "flickr.contacts.getList"
(maybeToList (fmap (\ x -> ("filter",show x)) f))
-- | Get the contact list for a user.
getPublicList :: UserID -> FM [Contact]
getPublicList u =
flickTranslate toContactList $
flickCall "flickr.contacts.getPublicList"
[ ("user_id", u) ]
| sof/flickr | Flickr/Contacts.hs | bsd-3-clause | 1,064 | 0 | 14 | 187 | 164 | 95 | 69 | 15 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Halfs.Errors
where
import Data.Word
import Foreign.C.Error
import Halfs.Monad
import Halfs.Types
data HalfsError =
HE_AbsolutePathExpected
| HE_AllocFailed
| HE_BadFileHandleForRead
| HE_BadFileHandleForWrite
| HE_DecodeFail_BlockCarrier String
| HE_DecodeFail_Ext String
| HE_DecodeFail_Directory String
| HE_DecodeFail_Inode String
| HE_DirectoryHandleNotFound
| HE_DirectoryNotEmpty
| HE_ErrnoAnnotated HalfsError Errno
| HE_FileNotFound
| HE_FsckErr HalfsError
| HE_InternalError String
| HE_InvalidExtIdx
| HE_InvalidStreamIndex Word64
| HE_InvalidDirHandle
| HE_InvalidFileHandle
| HE_MountFailed RsnHalfsMountFail
| HE_ObjectExists FilePath
| HE_ObjectDNE FilePath
| HE_PathComponentNotFound String
| HE_RenameFailed
| HE_TestFailed String
| HE_UnexpectedFileType FileType FilePath
| HE_UnmountFailed
deriving (Eq, Show)
data RsnHalfsMountFail =
BadSuperBlock String
| DirtyUnmount
deriving (Eq, Show)
annErrno :: MonadError HalfsError m => HalfsError -> Errno -> m a
e `annErrno` errno = throwError (e `HE_ErrnoAnnotated` errno)
throwErrno :: Monad m => Errno -> HalfsError -> HalfsT HalfsError env m a
throwErrno en = throwError . (`HE_ErrnoAnnotated` en)
-- TODO: template haskell to make this a bit cleaner?
instance Show Errno where
show en | en == eOK = "EOK"
| en == e2BIG = "E2BIG"
| en == eACCES = "EACCES"
| en == eADDRINUSE = "EADDRINUSE"
| en == eADDRNOTAVAIL = "EADDRNOTAVAIL"
| en == eADV = "EADV"
| en == eAFNOSUPPORT = "EAFNOSUPPORT"
| en == eAGAIN = "EAGAIN"
| en == eALREADY = "EALREADY"
| en == eBADF = "EBADF"
| en == eBADMSG = "EBADMSG"
| en == eBADRPC = "EBADRPC"
| en == eBUSY = "EBUSY"
| en == eCHILD = "ECHILD"
| en == eCOMM = "ECOMM"
| en == eCONNABORTED = "ECONNABORTED"
| en == eCONNREFUSED = "ECONNREFUSED"
| en == eCONNRESET = "ECONNRESET"
| en == eDEADLK = "EDEADLK"
| en == eDESTADDRREQ = "EDESTADDRREQ"
| en == eDIRTY = "EDIRTY"
| en == eDOM = "EDOM"
| en == eDQUOT = "EDQUOT"
| en == eEXIST = "EEXIST"
| en == eFAULT = "EFAULT"
| en == eFBIG = "EFBIG"
| en == eFTYPE = "EFTYPE"
| en == eHOSTDOWN = "EHOSTDOWN"
| en == eHOSTUNREACH = "EHOSTUNREACH"
| en == eIDRM = "EIDRM"
| en == eILSEQ = "EILSEQ"
| en == eINPROGRESS = "EINPROGRESS"
| en == eINTR = "EINTR"
| en == eINVAL = "EINVAL"
| en == eIO = "EIO"
| en == eISCONN = "EISCONN"
| en == eISDIR = "EISDIR"
| en == eLOOP = "ELOOP"
| en == eMFILE = "EMFILE"
| en == eMLINK = "EMLINK"
| en == eMSGSIZE = "EMSGSIZE"
| en == eMULTIHOP = "EMULTIHOP"
| en == eNAMETOOLONG = "ENAMETOOLONG"
| en == eNETDOWN = "ENETDOWN"
| en == eNETRESET = "ENETRESET"
| en == eNETUNREACH = "ENETUNREACH"
| en == eNFILE = "ENFILE"
| en == eNOBUFS = "ENOBUFS"
| en == eNODATA = "ENODATA"
| en == eNODEV = "ENODEV"
| en == eNOENT = "ENOENT"
| en == eNOEXEC = "ENOEXEC"
| en == eNOLCK = "ENOLCK"
| en == eNOLINK = "ENOLINK"
| en == eNOMEM = "ENOMEM"
| en == eNOMSG = "ENOMSG"
| en == eNONET = "ENONET"
| en == eNOPROTOOPT = "ENOPROTOOPT"
| en == eNOSPC = "ENOSPC"
| en == eNOSR = "ENOSR"
| en == eNOSTR = "ENOSTR"
| en == eNOSYS = "ENOSYS"
| en == eNOTBLK = "ENOTBLK"
| en == eNOTCONN = "ENOTCONN"
| en == eNOTDIR = "ENOTDIR"
| en == eNOTEMPTY = "ENOTEMPTY"
| en == eNOTSOCK = "ENOTSOCK"
| en == eNOTTY = "ENOTTY"
| en == eNXIO = "ENXIO"
| en == eOPNOTSUPP = "EOPNOTSUPP"
| en == ePERM = "EPERM"
| en == ePFNOSUPPORT = "EPFNOSUPPORT"
| en == ePIPE = "EPIPE"
| en == ePROCLIM = "EPROCLIM"
| en == ePROCUNAVAIL = "EPROCUNAVAIL"
| en == ePROGMISMATCH = "EPROGMISMATCH"
| en == ePROGUNAVAIL = "EPROGUNAVAIL"
| en == ePROTO = "EPROTO"
| en == ePROTONOSUPPORT = "EPROTONOSUPPORT"
| en == ePROTOTYPE = "EPROTOTYPE"
| en == eRANGE = "ERANGE"
| en == eREMCHG = "EREMCHG"
| en == eREMOTE = "EREMOTE"
| en == eROFS = "EROFS"
| en == eRPCMISMATCH = "ERPCMISMATCH"
| en == eRREMOTE = "ERREMOTE"
| en == eSHUTDOWN = "ESHUTDOWN"
| en == eSOCKTNOSUPPORT = "ESOCKTNOSUPPORT"
| en == eSPIPE = "ESPIPE"
| en == eSRCH = "ESRCH"
| en == eSRMNT = "ESRMNT"
| en == eSTALE = "ESTALE"
| en == eTIME = "ETIME"
| en == eTIMEDOUT = "ETIMEDOUT"
| en == eTOOMANYREFS = "ETOOMANYREFS"
| en == eTXTBSY = "ETXTBSY"
| en == eUSERS = "EUSERS"
| en == eWOULDBLOCK = "EWOULDBLOCK"
| en == eXDEV = "EXDEV"
| otherwise = "<unknown errno>"
| hackern/halfs | Halfs/Errors.hs | bsd-3-clause | 6,058 | 0 | 9 | 2,572 | 1,584 | 764 | 820 | 144 | 1 |
module Reddit.Types.PostSpec where
import Reddit.Types.Post
import Data.Aeson (eitherDecode)
import Data.ByteString.Lazy (ByteString)
import Test.Hspec
isLeft :: Either a b -> Bool
isLeft = const True `either` const False
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Reddit.Types.Post" $ do
let decode' = eitherDecode :: ByteString -> Either String PostID
it "can parse a PostID" $ do
decode' "\"t3_1n1qrg\"" `shouldBe` Right (PostID "1n1qrg")
decode' "\"1n1qrg\"" `shouldBe` Right (PostID "1n1qrg")
decode' "\"t5_2s580\"" `shouldSatisfy` isLeft
| intolerable/reddit | test/Reddit/Types/PostSpec.hs | bsd-2-clause | 585 | 0 | 14 | 99 | 190 | 99 | 91 | 16 | 1 |
module Distribution.Client.Dependency.Modular.ConfiguredConversion
( convCP
) where
import Data.Maybe
import Prelude hiding (pi)
import Distribution.Package (UnitId)
import Distribution.Client.Types
import Distribution.Client.Dependency.Types (ResolverPackage(..))
import qualified Distribution.Client.PackageIndex as CI
import qualified Distribution.Simple.PackageIndex as SI
import Distribution.Client.Dependency.Modular.Configured
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.ComponentDeps (ComponentDeps)
-- | Converts from the solver specific result @CP QPN@ into
-- a 'ResolverPackage', which can then be converted into
-- the install plan.
convCP :: SI.InstalledPackageIndex ->
CI.PackageIndex SourcePackage ->
CP QPN -> ResolverPackage
convCP iidx sidx (CP qpi fa es ds) =
case convPI qpi of
Left pi -> PreExisting
(fromJust $ SI.lookupUnitId iidx pi)
Right pi -> Configured $ ConfiguredPackage
srcpkg
fa
es
ds'
where
Just srcpkg = CI.lookupPackageId sidx pi
where
ds' :: ComponentDeps [ConfiguredId]
ds' = fmap (map convConfId) ds
convPI :: PI QPN -> Either UnitId PackageId
convPI (PI _ (I _ (Inst pi))) = Left pi
convPI qpi = Right $ confSrcId $ convConfId qpi
convConfId :: PI QPN -> ConfiguredId
convConfId (PI (Q _ pn) (I v loc)) = ConfiguredId {
confSrcId = sourceId
, confInstId = installedId
}
where
sourceId = PackageIdentifier pn v
installedId = case loc of
Inst pi -> pi
_otherwise -> fakeUnitId sourceId
| tolysz/prepare-ghcjs | spec-lts8/cabal/cabal-install/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs | bsd-3-clause | 1,712 | 0 | 12 | 444 | 427 | 232 | 195 | 38 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.PackageDescription
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This defines the data structure for the @.cabal@ file format. There are
-- several parts to this structure. It has top level info and then 'Library',
-- 'Executable', 'TestSuite', and 'Benchmark' sections each of which have
-- associated 'BuildInfo' data that's used to build the library, exe, test, or
-- benchmark. To further complicate things there is both a 'PackageDescription'
-- and a 'GenericPackageDescription'. This distinction relates to cabal
-- configurations. When we initially read a @.cabal@ file we get a
-- 'GenericPackageDescription' which has all the conditional sections.
-- Before actually building a package we have to decide
-- on each conditional. Once we've done that we get a 'PackageDescription'.
-- It was done this way initially to avoid breaking too much stuff when the
-- feature was introduced. It could probably do with being rationalised at some
-- point to make it simpler.
module Distribution.PackageDescription (
-- * Package descriptions
PackageDescription(..),
emptyPackageDescription,
specVersion,
descCabalVersion,
BuildType(..),
knownBuildTypes,
-- ** Renaming
ModuleRenaming(..),
defaultRenaming,
lookupRenaming,
-- ** Libraries
Library(..),
ModuleReexport(..),
emptyLibrary,
withLib,
hasPublicLib,
hasLibs,
libModules,
-- ** Executables
Executable(..),
emptyExecutable,
withExe,
hasExes,
exeModules,
-- * Tests
TestSuite(..),
TestSuiteInterface(..),
TestType(..),
testType,
knownTestTypes,
emptyTestSuite,
hasTests,
withTest,
testModules,
enabledTests,
-- * Benchmarks
Benchmark(..),
BenchmarkInterface(..),
BenchmarkType(..),
benchmarkType,
knownBenchmarkTypes,
emptyBenchmark,
hasBenchmarks,
withBenchmark,
benchmarkModules,
enabledBenchmarks,
-- * Build information
BuildInfo(..),
emptyBuildInfo,
allBuildInfo,
allLanguages,
allExtensions,
usedExtensions,
hcOptions,
hcProfOptions,
hcSharedOptions,
-- ** Supplementary build information
ComponentName(..),
defaultLibName,
HookedBuildInfo,
emptyHookedBuildInfo,
updatePackageDescription,
-- * package configuration
GenericPackageDescription(..),
Flag(..), FlagName(..), FlagAssignment,
CondTree(..), ConfVar(..), Condition(..),
cNot, cAnd, cOr,
-- * Source repositories
SourceRepo(..),
RepoKind(..),
RepoType(..),
knownRepoTypes,
-- * Custom setup build information
SetupBuildInfo(..),
) where
import Distribution.Compat.Binary
import qualified Distribution.Compat.Semigroup as Semi ((<>))
import Distribution.Compat.Semigroup as Semi (Monoid(..), Semigroup, gmempty)
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP ((<++))
import Distribution.Package
import Distribution.ModuleName
import Distribution.Version
import Distribution.License
import Distribution.Compiler
import Distribution.System
import Distribution.Text
import Language.Haskell.Extension
import Data.Data (Data)
import Data.List (nub, intercalate)
import Data.Maybe (fromMaybe, maybeToList)
import Data.Foldable as Fold (Foldable(foldMap))
import Data.Traversable as Trav (Traversable(traverse))
import Data.Typeable ( Typeable )
import Control.Applicative as AP (Alternative(..), Applicative(..))
import Control.Monad (MonadPlus(mplus,mzero), ap)
import GHC.Generics (Generic)
import Text.PrettyPrint as Disp
import qualified Data.Char as Char (isAlphaNum, isDigit, toLower)
import qualified Data.Map as Map
import Data.Map (Map)
-- -----------------------------------------------------------------------------
-- The PackageDescription type
-- | This data type is the internal representation of the file @pkg.cabal@.
-- It contains two kinds of information about the package: information
-- which is needed for all packages, such as the package name and version, and
-- information which is needed for the simple build system only, such as
-- the compiler options and library name.
--
data PackageDescription
= PackageDescription {
-- the following are required by all packages:
package :: PackageIdentifier,
license :: License,
licenseFiles :: [FilePath],
copyright :: String,
maintainer :: String,
author :: String,
stability :: String,
testedWith :: [(CompilerFlavor,VersionRange)],
homepage :: String,
pkgUrl :: String,
bugReports :: String,
sourceRepos :: [SourceRepo],
synopsis :: String, -- ^A one-line summary of this package
description :: String, -- ^A more verbose description of this package
category :: String,
customFieldsPD :: [(String,String)], -- ^Custom fields starting
-- with x-, stored in a
-- simple assoc-list.
-- | YOU PROBABLY DON'T WANT TO USE THIS FIELD. This field is
-- special! Depending on how far along processing the
-- PackageDescription we are, the contents of this field are
-- either nonsense, or the collected dependencies of *all* the
-- components in this package. buildDepends is initialized by
-- 'finalizePackageDescription' and 'flattenPackageDescription';
-- prior to that, dependency info is stored in the 'CondTree'
-- built around a 'GenericPackageDescription'. When this
-- resolution is done, dependency info is written to the inner
-- 'BuildInfo' and this field. This is all horrible, and #2066
-- tracks progress to get rid of this field.
buildDepends :: [Dependency],
-- | The version of the Cabal spec that this package description uses.
-- For historical reasons this is specified with a version range but
-- only ranges of the form @>= v@ make sense. We are in the process of
-- transitioning to specifying just a single version, not a range.
specVersionRaw :: Either Version VersionRange,
buildType :: Maybe BuildType,
setupBuildInfo :: Maybe SetupBuildInfo,
-- components
libraries :: [Library],
executables :: [Executable],
testSuites :: [TestSuite],
benchmarks :: [Benchmark],
dataFiles :: [FilePath],
dataDir :: FilePath,
extraSrcFiles :: [FilePath],
extraTmpFiles :: [FilePath],
extraDocFiles :: [FilePath]
}
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary PackageDescription
instance Package PackageDescription where
packageId = package
-- | The version of the Cabal spec that this package should be interpreted
-- against.
--
-- Historically we used a version range but we are switching to using a single
-- version. Currently we accept either. This function converts into a single
-- version by ignoring upper bounds in the version range.
--
specVersion :: PackageDescription -> Version
specVersion pkg = case specVersionRaw pkg of
Left version -> version
Right versionRange -> case asVersionIntervals versionRange of
[] -> Version [0] []
((LowerBound version _, _):_) -> version
-- | The range of versions of the Cabal tools that this package is intended to
-- work with.
--
-- This function is deprecated and should not be used for new purposes, only to
-- support old packages that rely on the old interpretation.
--
descCabalVersion :: PackageDescription -> VersionRange
descCabalVersion pkg = case specVersionRaw pkg of
Left version -> orLaterVersion version
Right versionRange -> versionRange
{-# DEPRECATED descCabalVersion "Use specVersion instead" #-}
emptyPackageDescription :: PackageDescription
emptyPackageDescription
= PackageDescription {
package = PackageIdentifier (PackageName "")
(Version [] []),
license = UnspecifiedLicense,
licenseFiles = [],
specVersionRaw = Right anyVersion,
buildType = Nothing,
copyright = "",
maintainer = "",
author = "",
stability = "",
testedWith = [],
buildDepends = [],
homepage = "",
pkgUrl = "",
bugReports = "",
sourceRepos = [],
synopsis = "",
description = "",
category = "",
customFieldsPD = [],
setupBuildInfo = Nothing,
libraries = [],
executables = [],
testSuites = [],
benchmarks = [],
dataFiles = [],
dataDir = "",
extraSrcFiles = [],
extraTmpFiles = [],
extraDocFiles = []
}
-- | The type of build system used by this package.
data BuildType
= Simple -- ^ calls @Distribution.Simple.defaultMain@
| Configure -- ^ calls @Distribution.Simple.defaultMainWithHooks defaultUserHooks@,
-- which invokes @configure@ to generate additional build
-- information used by later phases.
| Make -- ^ calls @Distribution.Make.defaultMain@
| Custom -- ^ uses user-supplied @Setup.hs@ or @Setup.lhs@ (default)
| UnknownBuildType String
-- ^ a package that uses an unknown build type cannot actually
-- be built. Doing it this way rather than just giving a
-- parse error means we get better error messages and allows
-- you to inspect the rest of the package description.
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary BuildType
knownBuildTypes :: [BuildType]
knownBuildTypes = [Simple, Configure, Make, Custom]
instance Text BuildType where
disp (UnknownBuildType other) = Disp.text other
disp other = Disp.text (show other)
parse = do
name <- Parse.munch1 Char.isAlphaNum
return $ case name of
"Simple" -> Simple
"Configure" -> Configure
"Custom" -> Custom
"Make" -> Make
_ -> UnknownBuildType name
-- ---------------------------------------------------------------------------
-- The SetupBuildInfo type
-- One can see this as a very cut-down version of BuildInfo below.
-- To keep things simple for tools that compile Setup.hs we limit the
-- options authors can specify to just Haskell package dependencies.
data SetupBuildInfo = SetupBuildInfo {
setupDepends :: [Dependency],
defaultSetupDepends :: Bool
-- ^ Is this a default 'custom-setup' section added by the cabal-install
-- code (as opposed to user-provided)? This field is only used
-- internally, and doesn't correspond to anything in the .cabal
-- file. See #3199.
}
deriving (Generic, Show, Eq, Read, Typeable, Data)
instance Binary SetupBuildInfo
instance Semi.Monoid SetupBuildInfo where
mempty = SetupBuildInfo [] False
mappend = (Semi.<>)
instance Semigroup SetupBuildInfo where
a <> b = SetupBuildInfo (setupDepends a Semi.<> setupDepends b)
(defaultSetupDepends a || defaultSetupDepends b)
-- ---------------------------------------------------------------------------
-- Module renaming
-- | Renaming applied to the modules provided by a package.
-- The boolean indicates whether or not to also include all of the
-- original names of modules. Thus, @ModuleRenaming False []@ is
-- "don't expose any modules, and @ModuleRenaming True [("Data.Bool", "Bool")]@
-- is, "expose all modules, but also expose @Data.Bool@ as @Bool@".
--
data ModuleRenaming = ModuleRenaming Bool [(ModuleName, ModuleName)]
deriving (Show, Read, Eq, Ord, Typeable, Data, Generic)
defaultRenaming :: ModuleRenaming
defaultRenaming = ModuleRenaming True []
lookupRenaming :: Package pkg => pkg -> Map PackageName ModuleRenaming -> ModuleRenaming
lookupRenaming = Map.findWithDefault defaultRenaming . packageName
instance Binary ModuleRenaming where
instance Monoid ModuleRenaming where
mempty = ModuleRenaming False []
mappend = (Semi.<>)
instance Semigroup ModuleRenaming where
ModuleRenaming b rns <> ModuleRenaming b' rns'
= ModuleRenaming (b || b') (rns ++ rns') -- TODO: dedupe?
-- NB: parentheses are mandatory, because later we may extend this syntax
-- to allow "hiding (A, B)" or other modifier words.
instance Text ModuleRenaming where
disp (ModuleRenaming True []) = Disp.empty
disp (ModuleRenaming b vs) = (if b then text "with" else Disp.empty) <+> dispRns
where dispRns = Disp.parens
(Disp.hsep
(Disp.punctuate Disp.comma (map dispEntry vs)))
dispEntry (orig, new)
| orig == new = disp orig
| otherwise = disp orig <+> text "as" <+> disp new
parse = do Parse.string "with" >> Parse.skipSpaces
fmap (ModuleRenaming True) parseRns
<++ fmap (ModuleRenaming False) parseRns
<++ return (ModuleRenaming True [])
where parseRns = do
rns <- Parse.between (Parse.char '(') (Parse.char ')') parseList
Parse.skipSpaces
return rns
parseList =
Parse.sepBy parseEntry (Parse.char ',' >> Parse.skipSpaces)
parseEntry :: Parse.ReadP r (ModuleName, ModuleName)
parseEntry = do
orig <- parse
Parse.skipSpaces
(do _ <- Parse.string "as"
Parse.skipSpaces
new <- parse
Parse.skipSpaces
return (orig, new)
<++
return (orig, orig))
-- ---------------------------------------------------------------------------
-- The Library type
data Library = Library {
libName :: String,
exposedModules :: [ModuleName],
reexportedModules :: [ModuleReexport],
requiredSignatures:: [ModuleName], -- ^ What sigs need implementations?
exposedSignatures:: [ModuleName], -- ^ What sigs are visible to users?
libExposed :: Bool, -- ^ Is the lib to be exposed by default?
libBuildInfo :: BuildInfo
}
deriving (Generic, Show, Eq, Read, Typeable, Data)
instance Binary Library
instance Monoid Library where
mempty = Library {
libName = mempty,
exposedModules = mempty,
reexportedModules = mempty,
requiredSignatures = mempty,
exposedSignatures = mempty,
libExposed = True,
libBuildInfo = mempty
}
mappend = (Semi.<>)
instance Semigroup Library where
a <> b = Library {
libName = combine' libName,
exposedModules = combine exposedModules,
reexportedModules = combine reexportedModules,
requiredSignatures = combine requiredSignatures,
exposedSignatures = combine exposedSignatures,
libExposed = libExposed a && libExposed b, -- so False propagates
libBuildInfo = combine libBuildInfo
}
where combine field = field a `mappend` field b
combine' field = case (field a, field b) of
("","") -> ""
("", x) -> x
(x, "") -> x
(x, y) -> error $ "Ambiguous values for library field: '"
++ x ++ "' and '" ++ y ++ "'"
emptyLibrary :: Library
emptyLibrary = mempty
-- | Does this package have a PUBLIC library?
hasPublicLib :: PackageDescription -> Bool
hasPublicLib p = any f (libraries p)
where f lib = buildable (libBuildInfo lib) &&
libName lib == display (packageName (package p))
-- | Does this package have any libraries?
hasLibs :: PackageDescription -> Bool
hasLibs p = any (buildable . libBuildInfo) (libraries p)
-- |If the package description has a library section, call the given
-- function with the library build info as argument.
withLib :: PackageDescription -> (Library -> IO ()) -> IO ()
withLib pkg_descr f =
sequence_ [f lib | lib <- libraries pkg_descr, buildable (libBuildInfo lib)]
-- | Get all the module names from the library (exposed and internal modules)
-- which need to be compiled. (This does not include reexports, which
-- do not need to be compiled.)
libModules :: Library -> [ModuleName]
libModules lib = exposedModules lib
++ otherModules (libBuildInfo lib)
++ exposedSignatures lib
++ requiredSignatures lib
-- -----------------------------------------------------------------------------
-- Module re-exports
data ModuleReexport = ModuleReexport {
moduleReexportOriginalPackage :: Maybe PackageName,
moduleReexportOriginalName :: ModuleName,
moduleReexportName :: ModuleName
}
deriving (Eq, Generic, Read, Show, Typeable, Data)
instance Binary ModuleReexport
instance Text ModuleReexport where
disp (ModuleReexport mpkgname origname newname) =
maybe Disp.empty (\pkgname -> disp pkgname <> Disp.char ':') mpkgname
<> disp origname
<+> if newname == origname
then Disp.empty
else Disp.text "as" <+> disp newname
parse = do
mpkgname <- Parse.option Nothing $ do
pkgname <- parse
_ <- Parse.char ':'
return (Just pkgname)
origname <- parse
newname <- Parse.option origname $ do
Parse.skipSpaces
_ <- Parse.string "as"
Parse.skipSpaces
parse
return (ModuleReexport mpkgname origname newname)
-- ---------------------------------------------------------------------------
-- The Executable type
data Executable = Executable {
exeName :: String,
modulePath :: FilePath,
buildInfo :: BuildInfo
}
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary Executable
instance Monoid Executable where
mempty = gmempty
mappend = (Semi.<>)
instance Semigroup Executable where
a <> b = Executable{
exeName = combine' exeName,
modulePath = combine modulePath,
buildInfo = combine buildInfo
}
where combine field = field a `mappend` field b
combine' field = case (field a, field b) of
("","") -> ""
("", x) -> x
(x, "") -> x
(x, y) -> error $ "Ambiguous values for executable field: '"
++ x ++ "' and '" ++ y ++ "'"
emptyExecutable :: Executable
emptyExecutable = mempty
-- |does this package have any executables?
hasExes :: PackageDescription -> Bool
hasExes p = any (buildable . buildInfo) (executables p)
-- | Perform the action on each buildable 'Executable' in the package
-- description.
withExe :: PackageDescription -> (Executable -> IO ()) -> IO ()
withExe pkg_descr f =
sequence_ [f exe | exe <- executables pkg_descr, buildable (buildInfo exe)]
-- | Get all the module names from an exe
exeModules :: Executable -> [ModuleName]
exeModules exe = otherModules (buildInfo exe)
-- ---------------------------------------------------------------------------
-- The TestSuite type
-- | A \"test-suite\" stanza in a cabal file.
--
data TestSuite = TestSuite {
testName :: String,
testInterface :: TestSuiteInterface,
testBuildInfo :: BuildInfo,
testEnabled :: Bool
-- TODO: By having a 'testEnabled' field in the PackageDescription, we
-- are mixing build status information (i.e., arguments to 'configure')
-- with static package description information. This is undesirable, but
-- a better solution is waiting on the next overhaul to the
-- GenericPackageDescription -> PackageDescription resolution process.
}
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary TestSuite
-- | The test suite interfaces that are currently defined. Each test suite must
-- specify which interface it supports.
--
-- More interfaces may be defined in future, either new revisions or totally
-- new interfaces.
--
data TestSuiteInterface =
-- | Test interface \"exitcode-stdio-1.0\". The test-suite takes the form
-- of an executable. It returns a zero exit code for success, non-zero for
-- failure. The stdout and stderr channels may be logged. It takes no
-- command line parameters and nothing on stdin.
--
TestSuiteExeV10 Version FilePath
-- | Test interface \"detailed-0.9\". The test-suite takes the form of a
-- library containing a designated module that exports \"tests :: [Test]\".
--
| TestSuiteLibV09 Version ModuleName
-- | A test suite that does not conform to one of the above interfaces for
-- the given reason (e.g. unknown test type).
--
| TestSuiteUnsupported TestType
deriving (Eq, Generic, Read, Show, Typeable, Data)
instance Binary TestSuiteInterface
instance Monoid TestSuite where
mempty = TestSuite {
testName = mempty,
testInterface = mempty,
testBuildInfo = mempty,
testEnabled = False
}
mappend = (Semi.<>)
instance Semigroup TestSuite where
a <> b = TestSuite {
testName = combine' testName,
testInterface = combine testInterface,
testBuildInfo = combine testBuildInfo,
testEnabled = testEnabled a || testEnabled b
}
where combine field = field a `mappend` field b
combine' f = case (f a, f b) of
("", x) -> x
(x, "") -> x
(x, y) -> error "Ambiguous values for test field: '"
++ x ++ "' and '" ++ y ++ "'"
instance Monoid TestSuiteInterface where
mempty = TestSuiteUnsupported (TestTypeUnknown mempty (Version [] []))
mappend = (Semi.<>)
instance Semigroup TestSuiteInterface where
a <> (TestSuiteUnsupported _) = a
_ <> b = b
emptyTestSuite :: TestSuite
emptyTestSuite = mempty
-- | Does this package have any test suites?
hasTests :: PackageDescription -> Bool
hasTests = any (buildable . testBuildInfo) . testSuites
-- | Get all the enabled test suites from a package.
enabledTests :: PackageDescription -> [TestSuite]
enabledTests = filter testEnabled . testSuites
-- | Perform an action on each buildable 'TestSuite' in a package.
withTest :: PackageDescription -> (TestSuite -> IO ()) -> IO ()
withTest pkg_descr f =
mapM_ f $ filter (buildable . testBuildInfo) $ enabledTests pkg_descr
-- | Get all the module names from a test suite.
testModules :: TestSuite -> [ModuleName]
testModules test = (case testInterface test of
TestSuiteLibV09 _ m -> [m]
_ -> [])
++ otherModules (testBuildInfo test)
-- | The \"test-type\" field in the test suite stanza.
--
data TestType = TestTypeExe Version -- ^ \"type: exitcode-stdio-x.y\"
| TestTypeLib Version -- ^ \"type: detailed-x.y\"
| TestTypeUnknown String Version -- ^ Some unknown test type e.g. \"type: foo\"
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary TestType
knownTestTypes :: [TestType]
knownTestTypes = [ TestTypeExe (Version [1,0] [])
, TestTypeLib (Version [0,9] []) ]
stdParse :: Text ver => (ver -> String -> res) -> Parse.ReadP r res
stdParse f = do
cs <- Parse.sepBy1 component (Parse.char '-')
_ <- Parse.char '-'
ver <- parse
let name = intercalate "-" cs
return $! f ver (lowercase name)
where
component = do
cs <- Parse.munch1 Char.isAlphaNum
if all Char.isDigit cs then Parse.pfail else return cs
-- each component must contain an alphabetic character, to avoid
-- ambiguity in identifiers like foo-1 (the 1 is the version number).
instance Text TestType where
disp (TestTypeExe ver) = text "exitcode-stdio-" <> disp ver
disp (TestTypeLib ver) = text "detailed-" <> disp ver
disp (TestTypeUnknown name ver) = text name <> char '-' <> disp ver
parse = stdParse $ \ver name -> case name of
"exitcode-stdio" -> TestTypeExe ver
"detailed" -> TestTypeLib ver
_ -> TestTypeUnknown name ver
testType :: TestSuite -> TestType
testType test = case testInterface test of
TestSuiteExeV10 ver _ -> TestTypeExe ver
TestSuiteLibV09 ver _ -> TestTypeLib ver
TestSuiteUnsupported testtype -> testtype
-- ---------------------------------------------------------------------------
-- The Benchmark type
-- | A \"benchmark\" stanza in a cabal file.
--
data Benchmark = Benchmark {
benchmarkName :: String,
benchmarkInterface :: BenchmarkInterface,
benchmarkBuildInfo :: BuildInfo,
benchmarkEnabled :: Bool
-- TODO: See TODO for 'testEnabled'.
}
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary Benchmark
-- | The benchmark interfaces that are currently defined. Each
-- benchmark must specify which interface it supports.
--
-- More interfaces may be defined in future, either new revisions or
-- totally new interfaces.
--
data BenchmarkInterface =
-- | Benchmark interface \"exitcode-stdio-1.0\". The benchmark
-- takes the form of an executable. It returns a zero exit code
-- for success, non-zero for failure. The stdout and stderr
-- channels may be logged. It takes no command line parameters
-- and nothing on stdin.
--
BenchmarkExeV10 Version FilePath
-- | A benchmark that does not conform to one of the above
-- interfaces for the given reason (e.g. unknown benchmark type).
--
| BenchmarkUnsupported BenchmarkType
deriving (Eq, Generic, Read, Show, Typeable, Data)
instance Binary BenchmarkInterface
instance Monoid Benchmark where
mempty = Benchmark {
benchmarkName = mempty,
benchmarkInterface = mempty,
benchmarkBuildInfo = mempty,
benchmarkEnabled = False
}
mappend = (Semi.<>)
instance Semigroup Benchmark where
a <> b = Benchmark {
benchmarkName = combine' benchmarkName,
benchmarkInterface = combine benchmarkInterface,
benchmarkBuildInfo = combine benchmarkBuildInfo,
benchmarkEnabled = benchmarkEnabled a || benchmarkEnabled b
}
where combine field = field a `mappend` field b
combine' f = case (f a, f b) of
("", x) -> x
(x, "") -> x
(x, y) -> error "Ambiguous values for benchmark field: '"
++ x ++ "' and '" ++ y ++ "'"
instance Monoid BenchmarkInterface where
mempty = BenchmarkUnsupported (BenchmarkTypeUnknown mempty (Version [] []))
mappend = (Semi.<>)
instance Semigroup BenchmarkInterface where
a <> (BenchmarkUnsupported _) = a
_ <> b = b
emptyBenchmark :: Benchmark
emptyBenchmark = mempty
-- | Does this package have any benchmarks?
hasBenchmarks :: PackageDescription -> Bool
hasBenchmarks = any (buildable . benchmarkBuildInfo) . benchmarks
-- | Get all the enabled benchmarks from a package.
enabledBenchmarks :: PackageDescription -> [Benchmark]
enabledBenchmarks = filter benchmarkEnabled . benchmarks
-- | Perform an action on each buildable 'Benchmark' in a package.
withBenchmark :: PackageDescription -> (Benchmark -> IO ()) -> IO ()
withBenchmark pkg_descr f =
mapM_ f $ filter (buildable . benchmarkBuildInfo) $ enabledBenchmarks pkg_descr
-- | Get all the module names from a benchmark.
benchmarkModules :: Benchmark -> [ModuleName]
benchmarkModules benchmark = otherModules (benchmarkBuildInfo benchmark)
-- | The \"benchmark-type\" field in the benchmark stanza.
--
data BenchmarkType = BenchmarkTypeExe Version
-- ^ \"type: exitcode-stdio-x.y\"
| BenchmarkTypeUnknown String Version
-- ^ Some unknown benchmark type e.g. \"type: foo\"
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary BenchmarkType
knownBenchmarkTypes :: [BenchmarkType]
knownBenchmarkTypes = [ BenchmarkTypeExe (Version [1,0] []) ]
instance Text BenchmarkType where
disp (BenchmarkTypeExe ver) = text "exitcode-stdio-" <> disp ver
disp (BenchmarkTypeUnknown name ver) = text name <> char '-' <> disp ver
parse = stdParse $ \ver name -> case name of
"exitcode-stdio" -> BenchmarkTypeExe ver
_ -> BenchmarkTypeUnknown name ver
benchmarkType :: Benchmark -> BenchmarkType
benchmarkType benchmark = case benchmarkInterface benchmark of
BenchmarkExeV10 ver _ -> BenchmarkTypeExe ver
BenchmarkUnsupported benchmarktype -> benchmarktype
-- ---------------------------------------------------------------------------
-- The BuildInfo type
-- Consider refactoring into executable and library versions.
data BuildInfo = BuildInfo {
buildable :: Bool, -- ^ component is buildable here
buildTools :: [Dependency], -- ^ tools needed to build this bit
cppOptions :: [String], -- ^ options for pre-processing Haskell code
ccOptions :: [String], -- ^ options for C compiler
ldOptions :: [String], -- ^ options for linker
pkgconfigDepends :: [Dependency], -- ^ pkg-config packages that are used
frameworks :: [String], -- ^support frameworks for Mac OS X
extraFrameworkDirs:: [String], -- ^ extra locations to find frameworks.
cSources :: [FilePath],
jsSources :: [FilePath],
hsSourceDirs :: [FilePath], -- ^ where to look for the Haskell module hierarchy
otherModules :: [ModuleName], -- ^ non-exposed or non-main modules
defaultLanguage :: Maybe Language,-- ^ language used when not explicitly specified
otherLanguages :: [Language], -- ^ other languages used within the package
defaultExtensions :: [Extension], -- ^ language extensions used by all modules
otherExtensions :: [Extension], -- ^ other language extensions used within the package
oldExtensions :: [Extension], -- ^ the old extensions field, treated same as 'defaultExtensions'
extraLibs :: [String], -- ^ what libraries to link with when compiling a program that uses your package
extraGHCiLibs :: [String], -- ^ if present, overrides extraLibs when package is loaded with GHCi.
extraLibDirs :: [String],
includeDirs :: [FilePath], -- ^directories to find .h files
includes :: [FilePath], -- ^ The .h files to be found in includeDirs
installIncludes :: [FilePath], -- ^ .h files to install with the package
options :: [(CompilerFlavor,[String])],
profOptions :: [(CompilerFlavor,[String])],
sharedOptions :: [(CompilerFlavor,[String])],
customFieldsBI :: [(String,String)], -- ^Custom fields starting
-- with x-, stored in a
-- simple assoc-list.
targetBuildDepends :: [Dependency], -- ^ Dependencies specific to a library or executable target
targetBuildRenaming :: Map PackageName ModuleRenaming
}
deriving (Generic, Show, Read, Eq, Typeable, Data)
instance Binary BuildInfo
instance Monoid BuildInfo where
mempty = BuildInfo {
buildable = True,
buildTools = [],
cppOptions = [],
ccOptions = [],
ldOptions = [],
pkgconfigDepends = [],
frameworks = [],
extraFrameworkDirs = [],
cSources = [],
jsSources = [],
hsSourceDirs = [],
otherModules = [],
defaultLanguage = Nothing,
otherLanguages = [],
defaultExtensions = [],
otherExtensions = [],
oldExtensions = [],
extraLibs = [],
extraGHCiLibs = [],
extraLibDirs = [],
includeDirs = [],
includes = [],
installIncludes = [],
options = [],
profOptions = [],
sharedOptions = [],
customFieldsBI = [],
targetBuildDepends = [],
targetBuildRenaming = Map.empty
}
mappend = (Semi.<>)
instance Semigroup BuildInfo where
a <> b = BuildInfo {
buildable = buildable a && buildable b,
buildTools = combine buildTools,
cppOptions = combine cppOptions,
ccOptions = combine ccOptions,
ldOptions = combine ldOptions,
pkgconfigDepends = combine pkgconfigDepends,
frameworks = combineNub frameworks,
extraFrameworkDirs = combineNub extraFrameworkDirs,
cSources = combineNub cSources,
jsSources = combineNub jsSources,
hsSourceDirs = combineNub hsSourceDirs,
otherModules = combineNub otherModules,
defaultLanguage = combineMby defaultLanguage,
otherLanguages = combineNub otherLanguages,
defaultExtensions = combineNub defaultExtensions,
otherExtensions = combineNub otherExtensions,
oldExtensions = combineNub oldExtensions,
extraLibs = combine extraLibs,
extraGHCiLibs = combine extraGHCiLibs,
extraLibDirs = combineNub extraLibDirs,
includeDirs = combineNub includeDirs,
includes = combineNub includes,
installIncludes = combineNub installIncludes,
options = combine options,
profOptions = combine profOptions,
sharedOptions = combine sharedOptions,
customFieldsBI = combine customFieldsBI,
targetBuildDepends = combineNub targetBuildDepends,
targetBuildRenaming = combineMap targetBuildRenaming
}
where
combine field = field a `mappend` field b
combineNub field = nub (combine field)
combineMby field = field b `mplus` field a
combineMap field = Map.unionWith mappend (field a) (field b)
emptyBuildInfo :: BuildInfo
emptyBuildInfo = mempty
-- | The 'BuildInfo' for the library (if there is one and it's buildable), and
-- all buildable executables, test suites and benchmarks. Useful for gathering
-- dependencies.
allBuildInfo :: PackageDescription -> [BuildInfo]
allBuildInfo pkg_descr = [ bi | lib <- libraries pkg_descr
, let bi = libBuildInfo lib
, buildable bi ]
++ [ bi | exe <- executables pkg_descr
, let bi = buildInfo exe
, buildable bi ]
++ [ bi | tst <- testSuites pkg_descr
, let bi = testBuildInfo tst
, buildable bi
, testEnabled tst ]
++ [ bi | tst <- benchmarks pkg_descr
, let bi = benchmarkBuildInfo tst
, buildable bi
, benchmarkEnabled tst ]
--FIXME: many of the places where this is used, we actually want to look at
-- unbuildable bits too, probably need separate functions
-- | The 'Language's used by this component
--
allLanguages :: BuildInfo -> [Language]
allLanguages bi = maybeToList (defaultLanguage bi)
++ otherLanguages bi
-- | The 'Extension's that are used somewhere by this component
--
allExtensions :: BuildInfo -> [Extension]
allExtensions bi = usedExtensions bi
++ otherExtensions bi
-- | The 'Extensions' that are used by all modules in this component
--
usedExtensions :: BuildInfo -> [Extension]
usedExtensions bi = oldExtensions bi
++ defaultExtensions bi
-- Libraries live in a separate namespace, so must distinguish
data ComponentName = CLibName String
| CExeName String
| CTestName String
| CBenchName String
deriving (Eq, Generic, Ord, Read, Show)
instance Binary ComponentName
defaultLibName :: PackageIdentifier -> ComponentName
defaultLibName pid = CLibName (display (pkgName pid))
type HookedBuildInfo = [(ComponentName, BuildInfo)]
emptyHookedBuildInfo :: HookedBuildInfo
emptyHookedBuildInfo = []
-- |Select options for a particular Haskell compiler.
hcOptions :: CompilerFlavor -> BuildInfo -> [String]
hcOptions = lookupHcOptions options
hcProfOptions :: CompilerFlavor -> BuildInfo -> [String]
hcProfOptions = lookupHcOptions profOptions
hcSharedOptions :: CompilerFlavor -> BuildInfo -> [String]
hcSharedOptions = lookupHcOptions sharedOptions
lookupHcOptions :: (BuildInfo -> [(CompilerFlavor,[String])])
-> CompilerFlavor -> BuildInfo -> [String]
lookupHcOptions f hc bi = [ opt | (hc',opts) <- f bi
, hc' == hc
, opt <- opts ]
-- ------------------------------------------------------------
-- * Source repos
-- ------------------------------------------------------------
-- | Information about the source revision control system for a package.
--
-- When specifying a repo it is useful to know the meaning or intention of the
-- information as doing so enables automation. There are two obvious common
-- purposes: one is to find the repo for the latest development version, the
-- other is to find the repo for this specific release. The 'ReopKind'
-- specifies which one we mean (or another custom one).
--
-- A package can specify one or the other kind or both. Most will specify just
-- a head repo but some may want to specify a repo to reconstruct the sources
-- for this package release.
--
-- The required information is the 'RepoType' which tells us if it's using
-- 'Darcs', 'Git' for example. The 'repoLocation' and other details are
-- interpreted according to the repo type.
--
data SourceRepo = SourceRepo {
-- | The kind of repo. This field is required.
repoKind :: RepoKind,
-- | The type of the source repository system for this repo, eg 'Darcs' or
-- 'Git'. This field is required.
repoType :: Maybe RepoType,
-- | The location of the repository. For most 'RepoType's this is a URL.
-- This field is required.
repoLocation :: Maybe String,
-- | 'CVS' can put multiple \"modules\" on one server and requires a
-- module name in addition to the location to identify a particular repo.
-- Logically this is part of the location but unfortunately has to be
-- specified separately. This field is required for the 'CVS' 'RepoType' and
-- should not be given otherwise.
repoModule :: Maybe String,
-- | The name or identifier of the branch, if any. Many source control
-- systems have the notion of multiple branches in a repo that exist in the
-- same location. For example 'Git' and 'CVS' use this while systems like
-- 'Darcs' use different locations for different branches. This field is
-- optional but should be used if necessary to identify the sources,
-- especially for the 'RepoThis' repo kind.
repoBranch :: Maybe String,
-- | The tag identify a particular state of the repository. This should be
-- given for the 'RepoThis' repo kind and not for 'RepoHead' kind.
--
repoTag :: Maybe String,
-- | Some repositories contain multiple projects in different subdirectories
-- This field specifies the subdirectory where this packages sources can be
-- found, eg the subdirectory containing the @.cabal@ file. It is interpreted
-- relative to the root of the repository. This field is optional. If not
-- given the default is \".\" ie no subdirectory.
repoSubdir :: Maybe FilePath
}
deriving (Eq, Generic, Read, Show, Typeable, Data)
instance Binary SourceRepo
-- | What this repo info is for, what it represents.
--
data RepoKind =
-- | The repository for the \"head\" or development version of the project.
-- This repo is where we should track the latest development activity or
-- the usual repo people should get to contribute patches.
RepoHead
-- | The repository containing the sources for this exact package version
-- or release. For this kind of repo a tag should be given to give enough
-- information to re-create the exact sources.
| RepoThis
| RepoKindUnknown String
deriving (Eq, Generic, Ord, Read, Show, Typeable, Data)
instance Binary RepoKind
-- | An enumeration of common source control systems. The fields used in the
-- 'SourceRepo' depend on the type of repo. The tools and methods used to
-- obtain and track the repo depend on the repo type.
--
data RepoType = Darcs | Git | SVN | CVS
| Mercurial | GnuArch | Bazaar | Monotone
| OtherRepoType String
deriving (Eq, Generic, Ord, Read, Show, Typeable, Data)
instance Binary RepoType
knownRepoTypes :: [RepoType]
knownRepoTypes = [Darcs, Git, SVN, CVS
,Mercurial, GnuArch, Bazaar, Monotone]
repoTypeAliases :: RepoType -> [String]
repoTypeAliases Bazaar = ["bzr"]
repoTypeAliases Mercurial = ["hg"]
repoTypeAliases GnuArch = ["arch"]
repoTypeAliases _ = []
instance Text RepoKind where
disp RepoHead = Disp.text "head"
disp RepoThis = Disp.text "this"
disp (RepoKindUnknown other) = Disp.text other
parse = do
name <- ident
return $ case lowercase name of
"head" -> RepoHead
"this" -> RepoThis
_ -> RepoKindUnknown name
instance Text RepoType where
disp (OtherRepoType other) = Disp.text other
disp other = Disp.text (lowercase (show other))
parse = fmap classifyRepoType ident
classifyRepoType :: String -> RepoType
classifyRepoType s =
fromMaybe (OtherRepoType s) $ lookup (lowercase s) repoTypeMap
where
repoTypeMap = [ (name, repoType')
| repoType' <- knownRepoTypes
, name <- display repoType' : repoTypeAliases repoType' ]
ident :: Parse.ReadP r String
ident = Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-')
lowercase :: String -> String
lowercase = map Char.toLower
-- ------------------------------------------------------------
-- * Utils
-- ------------------------------------------------------------
updatePackageDescription :: HookedBuildInfo -> PackageDescription -> PackageDescription
updatePackageDescription hooked_bis p
= p{ executables = updateMany (CExeName . exeName) updateExecutable (executables p)
, libraries = updateMany (CLibName . libName) updateLibrary (libraries p)
, benchmarks = updateMany (CBenchName . benchmarkName) updateBenchmark (benchmarks p)
, testSuites = updateMany (CTestName . testName) updateTestSuite (testSuites p)
}
where
updateMany :: (a -> ComponentName) -- ^ get 'ComponentName' from @a@
-> (BuildInfo -> a -> a) -- ^ @updateExecutable@, @updateLibrary@, etc
-> [a] -- ^list of components to update
-> [a] -- ^list with updated components
updateMany name update cs' = foldr (updateOne name update) cs' hooked_bis
updateOne :: (a -> ComponentName) -- ^ get 'ComponentName' from @a@
-> (BuildInfo -> a -> a) -- ^ @updateExecutable@, @updateLibrary@, etc
-> (ComponentName, BuildInfo) -- ^(name, new buildinfo)
-> [a] -- ^list of components to update
-> [a] -- ^list with name component updated
updateOne _ _ _ [] = []
updateOne name_sel update hooked_bi'@(name,bi) (c:cs)
| name_sel c == name ||
-- Special case: an empty name means "please update the BuildInfo for
-- the public library, i.e. the one with the same name as the
-- package." See 'parseHookedBuildInfo'.
name == CLibName "" && name_sel c == defaultLibName (package p)
= update bi c : cs
| otherwise = c : updateOne name_sel update hooked_bi' cs
updateExecutable bi exe = exe{buildInfo = bi `mappend` buildInfo exe}
updateLibrary bi lib = lib{libBuildInfo = bi `mappend` libBuildInfo lib}
updateBenchmark bi ben = ben{benchmarkBuildInfo = bi `mappend` benchmarkBuildInfo ben}
updateTestSuite bi test = test{testBuildInfo = bi `mappend` testBuildInfo test}
-- ---------------------------------------------------------------------------
-- The GenericPackageDescription type
data GenericPackageDescription =
GenericPackageDescription {
packageDescription :: PackageDescription,
genPackageFlags :: [Flag],
condLibraries :: [(String, CondTree ConfVar [Dependency] Library)],
condExecutables :: [(String, CondTree ConfVar [Dependency] Executable)],
condTestSuites :: [(String, CondTree ConfVar [Dependency] TestSuite)],
condBenchmarks :: [(String, CondTree ConfVar [Dependency] Benchmark)]
}
deriving (Show, Eq, Typeable, Data, Generic)
instance Package GenericPackageDescription where
packageId = packageId . packageDescription
instance Binary GenericPackageDescription
-- | A flag can represent a feature to be included, or a way of linking
-- a target against its dependencies, or in fact whatever you can think of.
data Flag = MkFlag
{ flagName :: FlagName
, flagDescription :: String
, flagDefault :: Bool
, flagManual :: Bool
}
deriving (Show, Eq, Typeable, Data, Generic)
instance Binary Flag
-- | A 'FlagName' is the name of a user-defined configuration flag
newtype FlagName = FlagName String
deriving (Eq, Generic, Ord, Show, Read, Typeable, Data)
instance Binary FlagName
-- | A 'FlagAssignment' is a total or partial mapping of 'FlagName's to
-- 'Bool' flag values. It represents the flags chosen by the user or
-- discovered during configuration. For example @--flags=foo --flags=-bar@
-- becomes @[("foo", True), ("bar", False)]@
--
type FlagAssignment = [(FlagName, Bool)]
-- | A @ConfVar@ represents the variable type used.
data ConfVar = OS OS
| Arch Arch
| Flag FlagName
| Impl CompilerFlavor VersionRange
deriving (Eq, Show, Typeable, Data, Generic)
instance Binary ConfVar
-- | A boolean expression parameterized over the variable type used.
data Condition c = Var c
| Lit Bool
| CNot (Condition c)
| COr (Condition c) (Condition c)
| CAnd (Condition c) (Condition c)
deriving (Show, Eq, Typeable, Data, Generic)
-- | Boolean negation of a 'Condition' value.
cNot :: Condition a -> Condition a
cNot (Lit b) = Lit (not b)
cNot (CNot c) = c
cNot c = CNot c
-- | Boolean AND of two 'Condtion' values.
cAnd :: Condition a -> Condition a -> Condition a
cAnd (Lit False) _ = Lit False
cAnd _ (Lit False) = Lit False
cAnd (Lit True) x = x
cAnd x (Lit True) = x
cAnd x y = CAnd x y
-- | Boolean OR of two 'Condition' values.
cOr :: Eq v => Condition v -> Condition v -> Condition v
cOr (Lit True) _ = Lit True
cOr _ (Lit True) = Lit True
cOr (Lit False) x = x
cOr x (Lit False) = x
cOr c (CNot d)
| c == d = Lit True
cOr (CNot c) d
| c == d = Lit True
cOr x y = COr x y
instance Functor Condition where
f `fmap` Var c = Var (f c)
_ `fmap` Lit c = Lit c
f `fmap` CNot c = CNot (fmap f c)
f `fmap` COr c d = COr (fmap f c) (fmap f d)
f `fmap` CAnd c d = CAnd (fmap f c) (fmap f d)
instance Foldable Condition where
f `foldMap` Var c = f c
_ `foldMap` Lit _ = mempty
f `foldMap` CNot c = Fold.foldMap f c
f `foldMap` COr c d = foldMap f c `mappend` foldMap f d
f `foldMap` CAnd c d = foldMap f c `mappend` foldMap f d
instance Traversable Condition where
f `traverse` Var c = Var `fmap` f c
_ `traverse` Lit c = pure $ Lit c
f `traverse` CNot c = CNot `fmap` Trav.traverse f c
f `traverse` COr c d = COr `fmap` traverse f c <*> traverse f d
f `traverse` CAnd c d = CAnd `fmap` traverse f c <*> traverse f d
instance Applicative Condition where
pure = Var
(<*>) = ap
instance Monad Condition where
return = AP.pure
-- Terminating cases
(>>=) (Lit x) _ = Lit x
(>>=) (Var x) f = f x
-- Recursing cases
(>>=) (CNot x ) f = CNot (x >>= f)
(>>=) (COr x y) f = COr (x >>= f) (y >>= f)
(>>=) (CAnd x y) f = CAnd (x >>= f) (y >>= f)
instance Monoid (Condition a) where
mempty = Lit False
mappend = (Semi.<>)
instance Semigroup (Condition a) where
(<>) = COr
instance Alternative Condition where
empty = mempty
(<|>) = mappend
instance MonadPlus Condition where
mzero = mempty
mplus = mappend
instance Binary c => Binary (Condition c)
data CondTree v c a = CondNode
{ condTreeData :: a
, condTreeConstraints :: c
, condTreeComponents :: [( Condition v
, CondTree v c a
, Maybe (CondTree v c a))]
}
deriving (Show, Eq, Typeable, Data, Generic)
instance (Binary v, Binary c, Binary a) => Binary (CondTree v c a)
| thomie/cabal | Cabal/Distribution/PackageDescription.hs | bsd-3-clause | 50,919 | 0 | 16 | 14,486 | 9,947 | 5,527 | 4,420 | 830 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Göster | ZAP Uzantıları</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçindekiler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İçerik</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/reveal/src/main/javahelp/org/zaproxy/zap/extension/reveal/resources/help_tr_TR/helpset_tr_TR.hs | apache-2.0 | 978 | 78 | 66 | 158 | 423 | 213 | 210 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module CmmExpr
( CmmExpr(..), cmmExprType, cmmExprWidth, maybeInvertCmmExpr
, CmmReg(..), cmmRegType
, CmmLit(..), cmmLitType
, LocalReg(..), localRegType
, GlobalReg(..), isArgReg, globalRegType, spReg, hpReg, spLimReg, nodeReg, node, baseReg
, VGcPtr(..), vgcFlag -- Temporary!
, DefinerOfRegs, UserOfRegs
, foldRegsDefd, foldRegsUsed, filterRegsUsed
, foldLocalRegsDefd, foldLocalRegsUsed
, RegSet, LocalRegSet, GlobalRegSet
, emptyRegSet, elemRegSet, extendRegSet, deleteFromRegSet, mkRegSet
, plusRegSet, minusRegSet, timesRegSet, sizeRegSet, nullRegSet
, regSetToList
, regUsedIn
, Area(..)
, module CmmMachOp
, module CmmType
)
where
#include "HsVersions.h"
import CmmType
import CmmMachOp
import BlockId
import CLabel
import DynFlags
import Unique
import Outputable (panic)
import Data.Set (Set)
import qualified Data.Set as Set
-----------------------------------------------------------------------------
-- CmmExpr
-- An expression. Expressions have no side effects.
-----------------------------------------------------------------------------
data CmmExpr
= CmmLit CmmLit -- Literal
| CmmLoad !CmmExpr !CmmType -- Read memory location
| CmmReg !CmmReg -- Contents of register
| CmmMachOp MachOp [CmmExpr] -- Machine operation (+, -, *, etc.)
| CmmStackSlot Area {-# UNPACK #-} !Int
-- addressing expression of a stack slot
-- See Note [CmmStackSlot aliasing]
| CmmRegOff !CmmReg Int
-- CmmRegOff reg i
-- ** is shorthand only, meaning **
-- CmmMachOp (MO_Add rep) [x, CmmLit (CmmInt (fromIntegral i) rep)]
-- where rep = typeWidth (cmmRegType reg)
instance Eq CmmExpr where -- Equality ignores the types
CmmLit l1 == CmmLit l2 = l1==l2
CmmLoad e1 _ == CmmLoad e2 _ = e1==e2
CmmReg r1 == CmmReg r2 = r1==r2
CmmRegOff r1 i1 == CmmRegOff r2 i2 = r1==r2 && i1==i2
CmmMachOp op1 es1 == CmmMachOp op2 es2 = op1==op2 && es1==es2
CmmStackSlot a1 i1 == CmmStackSlot a2 i2 = a1==a2 && i1==i2
_e1 == _e2 = False
data CmmReg
= CmmLocal {-# UNPACK #-} !LocalReg
| CmmGlobal GlobalReg
deriving( Eq, Ord )
-- | A stack area is either the stack slot where a variable is spilled
-- or the stack space where function arguments and results are passed.
data Area
= Old -- See Note [Old Area]
| Young {-# UNPACK #-} !BlockId -- Invariant: must be a continuation BlockId
-- See Note [Continuation BlockId] in CmmNode.
deriving (Eq, Ord)
{- Note [Old Area]
~~~~~~~~~~~~~~~~~~
There is a single call area 'Old', allocated at the extreme old
end of the stack frame (ie just younger than the return address)
which holds:
* incoming (overflow) parameters,
* outgoing (overflow) parameter to tail calls,
* outgoing (overflow) result values
* the update frame (if any)
Its size is the max of all these requirements. On entry, the stack
pointer will point to the youngest incoming parameter, which is not
necessarily at the young end of the Old area.
End of note -}
{- Note [CmmStackSlot aliasing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When do two CmmStackSlots alias?
- T[old+N] aliases with U[young(L)+M] for all T, U, L, N and M
- T[old+N] aliases with U[old+M] only if the areas actually overlap
Or more informally, different Areas may overlap with each other.
An alternative semantics, that we previously had, was that different
Areas do not overlap. The problem that lead to redefining the
semantics of stack areas is described below.
e.g. if we had
x = Sp[old + 8]
y = Sp[old + 16]
Sp[young(L) + 8] = L
Sp[young(L) + 16] = y
Sp[young(L) + 24] = x
call f() returns to L
if areas semantically do not overlap, then we might optimise this to
Sp[young(L) + 8] = L
Sp[young(L) + 16] = Sp[old + 8]
Sp[young(L) + 24] = Sp[old + 16]
call f() returns to L
and now young(L) cannot be allocated at the same place as old, and we
are doomed to use more stack.
- old+8 conflicts with young(L)+8
- old+16 conflicts with young(L)+16 and young(L)+8
so young(L)+8 == old+24 and we get
Sp[-8] = L
Sp[-16] = Sp[8]
Sp[-24] = Sp[0]
Sp -= 24
call f() returns to L
However, if areas are defined to be "possibly overlapping" in the
semantics, then we cannot commute any loads/stores of old with
young(L), and we will be able to re-use both old+8 and old+16 for
young(L).
x = Sp[8]
y = Sp[0]
Sp[8] = L
Sp[0] = y
Sp[-8] = x
Sp = Sp - 8
call f() returns to L
Now, the assignments of y go away,
x = Sp[8]
Sp[8] = L
Sp[-8] = x
Sp = Sp - 8
call f() returns to L
-}
data CmmLit
= CmmInt !Integer Width
-- Interpretation: the 2's complement representation of the value
-- is truncated to the specified size. This is easier than trying
-- to keep the value within range, because we don't know whether
-- it will be used as a signed or unsigned value (the CmmType doesn't
-- distinguish between signed & unsigned).
| CmmFloat Rational Width
| CmmVec [CmmLit] -- Vector literal
| CmmLabel CLabel -- Address of label
| CmmLabelOff CLabel Int -- Address of label + byte offset
-- Due to limitations in the C backend, the following
-- MUST ONLY be used inside the info table indicated by label2
-- (label2 must be the info label), and label1 must be an
-- SRT, a slow entrypoint or a large bitmap (see the Mangler)
-- Don't use it at all unless tablesNextToCode.
-- It is also used inside the NCG during when generating
-- position-independent code.
| CmmLabelDiffOff CLabel CLabel Int -- label1 - label2 + offset
| CmmBlock {-# UNPACK #-} !BlockId -- Code label
-- Invariant: must be a continuation BlockId
-- See Note [Continuation BlockId] in CmmNode.
| CmmHighStackMark -- A late-bound constant that stands for the max
-- #bytes of stack space used during a procedure.
-- During the stack-layout pass, CmmHighStackMark
-- is replaced by a CmmInt for the actual number
-- of bytes used
deriving Eq
cmmExprType :: DynFlags -> CmmExpr -> CmmType
cmmExprType dflags (CmmLit lit) = cmmLitType dflags lit
cmmExprType _ (CmmLoad _ rep) = rep
cmmExprType dflags (CmmReg reg) = cmmRegType dflags reg
cmmExprType dflags (CmmMachOp op args) = machOpResultType dflags op (map (cmmExprType dflags) args)
cmmExprType dflags (CmmRegOff reg _) = cmmRegType dflags reg
cmmExprType dflags (CmmStackSlot _ _) = bWord dflags -- an address
-- Careful though: what is stored at the stack slot may be bigger than
-- an address
cmmLitType :: DynFlags -> CmmLit -> CmmType
cmmLitType _ (CmmInt _ width) = cmmBits width
cmmLitType _ (CmmFloat _ width) = cmmFloat width
cmmLitType _ (CmmVec []) = panic "cmmLitType: CmmVec []"
cmmLitType cflags (CmmVec (l:ls)) = let ty = cmmLitType cflags l
in if all (`cmmEqType` ty) (map (cmmLitType cflags) ls)
then cmmVec (1+length ls) ty
else panic "cmmLitType: CmmVec"
cmmLitType dflags (CmmLabel lbl) = cmmLabelType dflags lbl
cmmLitType dflags (CmmLabelOff lbl _) = cmmLabelType dflags lbl
cmmLitType dflags (CmmLabelDiffOff {}) = bWord dflags
cmmLitType dflags (CmmBlock _) = bWord dflags
cmmLitType dflags (CmmHighStackMark) = bWord dflags
cmmLabelType :: DynFlags -> CLabel -> CmmType
cmmLabelType dflags lbl
| isGcPtrLabel lbl = gcWord dflags
| otherwise = bWord dflags
cmmExprWidth :: DynFlags -> CmmExpr -> Width
cmmExprWidth dflags e = typeWidth (cmmExprType dflags e)
--------
--- Negation for conditional branches
maybeInvertCmmExpr :: CmmExpr -> Maybe CmmExpr
maybeInvertCmmExpr (CmmMachOp op args) = do op' <- maybeInvertComparison op
return (CmmMachOp op' args)
maybeInvertCmmExpr _ = Nothing
-----------------------------------------------------------------------------
-- Local registers
-----------------------------------------------------------------------------
data LocalReg
= LocalReg {-# UNPACK #-} !Unique CmmType
-- ^ Parameters:
-- 1. Identifier
-- 2. Type
instance Eq LocalReg where
(LocalReg u1 _) == (LocalReg u2 _) = u1 == u2
instance Ord LocalReg where
compare (LocalReg u1 _) (LocalReg u2 _) = compare u1 u2
instance Uniquable LocalReg where
getUnique (LocalReg uniq _) = uniq
cmmRegType :: DynFlags -> CmmReg -> CmmType
cmmRegType _ (CmmLocal reg) = localRegType reg
cmmRegType dflags (CmmGlobal reg) = globalRegType dflags reg
localRegType :: LocalReg -> CmmType
localRegType (LocalReg _ rep) = rep
-----------------------------------------------------------------------------
-- Register-use information for expressions and other types
-----------------------------------------------------------------------------
-- | Sets of registers
-- These are used for dataflow facts, and a common operation is taking
-- the union of two RegSets and then asking whether the union is the
-- same as one of the inputs. UniqSet isn't good here, because
-- sizeUniqSet is O(n) whereas Set.size is O(1), so we use ordinary
-- Sets.
type RegSet r = Set r
type LocalRegSet = RegSet LocalReg
type GlobalRegSet = RegSet GlobalReg
emptyRegSet :: RegSet r
nullRegSet :: RegSet r -> Bool
elemRegSet :: Ord r => r -> RegSet r -> Bool
extendRegSet :: Ord r => RegSet r -> r -> RegSet r
deleteFromRegSet :: Ord r => RegSet r -> r -> RegSet r
mkRegSet :: Ord r => [r] -> RegSet r
minusRegSet, plusRegSet, timesRegSet :: Ord r => RegSet r -> RegSet r -> RegSet r
sizeRegSet :: RegSet r -> Int
regSetToList :: RegSet r -> [r]
emptyRegSet = Set.empty
nullRegSet = Set.null
elemRegSet = Set.member
extendRegSet = flip Set.insert
deleteFromRegSet = flip Set.delete
mkRegSet = Set.fromList
minusRegSet = Set.difference
plusRegSet = Set.union
timesRegSet = Set.intersection
sizeRegSet = Set.size
regSetToList = Set.toList
class Ord r => UserOfRegs r a where
foldRegsUsed :: DynFlags -> (b -> r -> b) -> b -> a -> b
foldLocalRegsUsed :: UserOfRegs LocalReg a
=> DynFlags -> (b -> LocalReg -> b) -> b -> a -> b
foldLocalRegsUsed = foldRegsUsed
class Ord r => DefinerOfRegs r a where
foldRegsDefd :: DynFlags -> (b -> r -> b) -> b -> a -> b
foldLocalRegsDefd :: DefinerOfRegs LocalReg a
=> DynFlags -> (b -> LocalReg -> b) -> b -> a -> b
foldLocalRegsDefd = foldRegsDefd
filterRegsUsed :: UserOfRegs r e => DynFlags -> (r -> Bool) -> e -> RegSet r
filterRegsUsed dflags p e =
foldRegsUsed dflags
(\regs r -> if p r then extendRegSet regs r else regs)
emptyRegSet e
instance UserOfRegs LocalReg CmmReg where
foldRegsUsed _ f z (CmmLocal reg) = f z reg
foldRegsUsed _ _ z (CmmGlobal _) = z
instance DefinerOfRegs LocalReg CmmReg where
foldRegsDefd _ f z (CmmLocal reg) = f z reg
foldRegsDefd _ _ z (CmmGlobal _) = z
instance UserOfRegs GlobalReg CmmReg where
foldRegsUsed _ _ z (CmmLocal _) = z
foldRegsUsed _ f z (CmmGlobal reg) = f z reg
instance DefinerOfRegs GlobalReg CmmReg where
foldRegsDefd _ _ z (CmmLocal _) = z
foldRegsDefd _ f z (CmmGlobal reg) = f z reg
instance Ord r => UserOfRegs r r where
foldRegsUsed _ f z r = f z r
instance Ord r => DefinerOfRegs r r where
foldRegsDefd _ f z r = f z r
instance Ord r => UserOfRegs r (RegSet r) where
foldRegsUsed _ f = Set.fold (flip f)
instance (Ord r, UserOfRegs r CmmReg) => UserOfRegs r CmmExpr where
-- The (Ord r) in the context is necessary here
-- See Note [Recursive superclasses] in TcInstDcls
foldRegsUsed dflags f z e = expr z e
where expr z (CmmLit _) = z
expr z (CmmLoad addr _) = foldRegsUsed dflags f z addr
expr z (CmmReg r) = foldRegsUsed dflags f z r
expr z (CmmMachOp _ exprs) = foldRegsUsed dflags f z exprs
expr z (CmmRegOff r _) = foldRegsUsed dflags f z r
expr z (CmmStackSlot _ _) = z
instance UserOfRegs r a => UserOfRegs r (Maybe a) where
foldRegsUsed dflags f z (Just x) = foldRegsUsed dflags f z x
foldRegsUsed _ _ z Nothing = z
instance UserOfRegs r a => UserOfRegs r [a] where
foldRegsUsed _ _ set [] = set
foldRegsUsed dflags f set (x:xs) = foldRegsUsed dflags f (foldRegsUsed dflags f set x) xs
instance DefinerOfRegs r a => DefinerOfRegs r [a] where
foldRegsDefd _ _ set [] = set
foldRegsDefd dflags f set (x:xs) = foldRegsDefd dflags f (foldRegsDefd dflags f set x) xs
instance DefinerOfRegs r a => DefinerOfRegs r (Maybe a) where
foldRegsDefd _ _ set Nothing = set
foldRegsDefd dflags f set (Just x) = foldRegsDefd dflags f set x
-----------------------------------------------------------------------------
-- Another reg utility
regUsedIn :: CmmReg -> CmmExpr -> Bool
_ `regUsedIn` CmmLit _ = False
reg `regUsedIn` CmmLoad e _ = reg `regUsedIn` e
reg `regUsedIn` CmmReg reg' = reg == reg'
reg `regUsedIn` CmmRegOff reg' _ = reg == reg'
reg `regUsedIn` CmmMachOp _ es = any (reg `regUsedIn`) es
_ `regUsedIn` CmmStackSlot _ _ = False
-----------------------------------------------------------------------------
-- Global STG registers
-----------------------------------------------------------------------------
data VGcPtr = VGcPtr | VNonGcPtr deriving( Eq, Show )
-- TEMPORARY!!!
-----------------------------------------------------------------------------
-- Global STG registers
-----------------------------------------------------------------------------
vgcFlag :: CmmType -> VGcPtr
vgcFlag ty | isGcPtrType ty = VGcPtr
| otherwise = VNonGcPtr
data GlobalReg
-- Argument and return registers
= VanillaReg -- pointers, unboxed ints and chars
{-# UNPACK #-} !Int -- its number
VGcPtr
| FloatReg -- single-precision floating-point registers
{-# UNPACK #-} !Int -- its number
| DoubleReg -- double-precision floating-point registers
{-# UNPACK #-} !Int -- its number
| LongReg -- long int registers (64-bit, really)
{-# UNPACK #-} !Int -- its number
| XmmReg -- 128-bit SIMD vector register
{-# UNPACK #-} !Int -- its number
| YmmReg -- 256-bit SIMD vector register
{-# UNPACK #-} !Int -- its number
| ZmmReg -- 512-bit SIMD vector register
{-# UNPACK #-} !Int -- its number
-- STG registers
| Sp -- Stack ptr; points to last occupied stack location.
| SpLim -- Stack limit
| Hp -- Heap ptr; points to last occupied heap location.
| HpLim -- Heap limit register
| CCCS -- Current cost-centre stack
| CurrentTSO -- pointer to current thread's TSO
| CurrentNursery -- pointer to allocation area
| HpAlloc -- allocation count for heap check failure
-- We keep the address of some commonly-called
-- functions in the register table, to keep code
-- size down:
| EagerBlackholeInfo -- stg_EAGER_BLACKHOLE_info
| GCEnter1 -- stg_gc_enter_1
| GCFun -- stg_gc_fun
-- Base offset for the register table, used for accessing registers
-- which do not have real registers assigned to them. This register
-- will only appear after we have expanded GlobalReg into memory accesses
-- (where necessary) in the native code generator.
| BaseReg
-- Base Register for PIC (position-independent code) calculations
-- Only used inside the native code generator. It's exact meaning differs
-- from platform to platform (see module PositionIndependentCode).
| PicBaseReg
deriving( Show )
instance Eq GlobalReg where
VanillaReg i _ == VanillaReg j _ = i==j -- Ignore type when seeking clashes
FloatReg i == FloatReg j = i==j
DoubleReg i == DoubleReg j = i==j
LongReg i == LongReg j = i==j
XmmReg i == XmmReg j = i==j
YmmReg i == YmmReg j = i==j
ZmmReg i == ZmmReg j = i==j
Sp == Sp = True
SpLim == SpLim = True
Hp == Hp = True
HpLim == HpLim = True
CCCS == CCCS = True
CurrentTSO == CurrentTSO = True
CurrentNursery == CurrentNursery = True
HpAlloc == HpAlloc = True
EagerBlackholeInfo == EagerBlackholeInfo = True
GCEnter1 == GCEnter1 = True
GCFun == GCFun = True
BaseReg == BaseReg = True
PicBaseReg == PicBaseReg = True
_r1 == _r2 = False
instance Ord GlobalReg where
compare (VanillaReg i _) (VanillaReg j _) = compare i j
-- Ignore type when seeking clashes
compare (FloatReg i) (FloatReg j) = compare i j
compare (DoubleReg i) (DoubleReg j) = compare i j
compare (LongReg i) (LongReg j) = compare i j
compare (XmmReg i) (XmmReg j) = compare i j
compare (YmmReg i) (YmmReg j) = compare i j
compare (ZmmReg i) (ZmmReg j) = compare i j
compare Sp Sp = EQ
compare SpLim SpLim = EQ
compare Hp Hp = EQ
compare HpLim HpLim = EQ
compare CCCS CCCS = EQ
compare CurrentTSO CurrentTSO = EQ
compare CurrentNursery CurrentNursery = EQ
compare HpAlloc HpAlloc = EQ
compare EagerBlackholeInfo EagerBlackholeInfo = EQ
compare GCEnter1 GCEnter1 = EQ
compare GCFun GCFun = EQ
compare BaseReg BaseReg = EQ
compare PicBaseReg PicBaseReg = EQ
compare (VanillaReg _ _) _ = LT
compare _ (VanillaReg _ _) = GT
compare (FloatReg _) _ = LT
compare _ (FloatReg _) = GT
compare (DoubleReg _) _ = LT
compare _ (DoubleReg _) = GT
compare (LongReg _) _ = LT
compare _ (LongReg _) = GT
compare (XmmReg _) _ = LT
compare _ (XmmReg _) = GT
compare (YmmReg _) _ = LT
compare _ (YmmReg _) = GT
compare (ZmmReg _) _ = LT
compare _ (ZmmReg _) = GT
compare Sp _ = LT
compare _ Sp = GT
compare SpLim _ = LT
compare _ SpLim = GT
compare Hp _ = LT
compare _ Hp = GT
compare HpLim _ = LT
compare _ HpLim = GT
compare CCCS _ = LT
compare _ CCCS = GT
compare CurrentTSO _ = LT
compare _ CurrentTSO = GT
compare CurrentNursery _ = LT
compare _ CurrentNursery = GT
compare HpAlloc _ = LT
compare _ HpAlloc = GT
compare GCEnter1 _ = LT
compare _ GCEnter1 = GT
compare GCFun _ = LT
compare _ GCFun = GT
compare BaseReg _ = LT
compare _ BaseReg = GT
compare EagerBlackholeInfo _ = LT
compare _ EagerBlackholeInfo = GT
-- convenient aliases
baseReg, spReg, hpReg, spLimReg, nodeReg :: CmmReg
baseReg = CmmGlobal BaseReg
spReg = CmmGlobal Sp
hpReg = CmmGlobal Hp
spLimReg = CmmGlobal SpLim
nodeReg = CmmGlobal node
node :: GlobalReg
node = VanillaReg 1 VGcPtr
globalRegType :: DynFlags -> GlobalReg -> CmmType
globalRegType dflags (VanillaReg _ VGcPtr) = gcWord dflags
globalRegType dflags (VanillaReg _ VNonGcPtr) = bWord dflags
globalRegType _ (FloatReg _) = cmmFloat W32
globalRegType _ (DoubleReg _) = cmmFloat W64
globalRegType _ (LongReg _) = cmmBits W64
globalRegType _ (XmmReg _) = cmmVec 4 (cmmBits W32)
globalRegType _ (YmmReg _) = cmmVec 8 (cmmBits W32)
globalRegType _ (ZmmReg _) = cmmVec 16 (cmmBits W32)
globalRegType dflags Hp = gcWord dflags
-- The initialiser for all
-- dynamically allocated closures
globalRegType dflags _ = bWord dflags
isArgReg :: GlobalReg -> Bool
isArgReg (VanillaReg {}) = True
isArgReg (FloatReg {}) = True
isArgReg (DoubleReg {}) = True
isArgReg (LongReg {}) = True
isArgReg (XmmReg {}) = True
isArgReg (YmmReg {}) = True
isArgReg (ZmmReg {}) = True
isArgReg _ = False
| green-haskell/ghc | compiler/cmm/CmmExpr.hs | bsd-3-clause | 20,742 | 0 | 12 | 5,698 | 4,651 | 2,414 | 2,237 | 344 | 2 |
{-# LANGUAGE JavaScriptFFI, InterruptibleFFI #-}
{-
Test whether throwing exceptions from JavaScript gives the expected result
-}
module Main where
import Control.Concurrent
import qualified Control.Exception as Ex
import System.IO
import GHCJS.Prim
foreign import javascript unsafe
"throw 'unsafe exception';" js_unsafeExcep :: IO ()
foreign import javascript safe
"throw 'safe exception';" js_safeExcep :: IO ()
foreign import javascript interruptible
"throw 'interruptible exception';" js_interruptibleExcep :: IO ()
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
testHandle "unsafe" js_unsafeExcep
testHandle "safe" js_safeExcep
testHandle "interruptible" js_interruptibleExcep
testHandle :: String -> IO () -> IO ()
testHandle descr a = do
putStrLn ("running " ++ descr ++ " no handler")
forkIO $ do
putStrLn "before"
a
putStrLn "after"
threadDelay 500000
putStrLn ("running " ++ descr ++ " handler")
forkIO $ do
putStrLn "before"
a `Ex.catch` theHandler
putStrLn "after"
threadDelay 500000
theHandler :: Ex.SomeException -> IO ()
theHandler e = putStrLn ("got exception: " ++ show e)
| beni55/ghcjs | test/ffi/ffiExceptions.hs | mit | 1,274 | 5 | 10 | 296 | 312 | 148 | 164 | 35 | 1 |
module MouseRegion (T, newRegion, coefficients) where
data Origin =
TopLeft
| BottomLeft
deriving (Eq, Show)
data T = T {
origin :: Origin,
width :: Float,
height :: Float,
center_x :: Float,
center_y :: Float
} deriving (Eq, Show)
newRegion :: Origin -> Float -> Float -> T
newRegion o w h = T {
origin = o,
width = w,
height = h,
center_x = w / 2.0,
center_y = h / 2.0
}
coefficients :: T -> (Integer, Integer) -> (Float, Float)
coefficients r (sx, sy) =
let fx = fromIntegral sx
fy = fromIntegral sy
ox = ((fx - center_x r) / width r) * 2.0
oy = ((fy - center_y r) / height r) * 2.0
in
case (origin r) of
TopLeft -> (-ox, -oy)
BottomLeft -> (-ox, oy)
| io7m/jcamera | com.io7m.jcamera.documentation/src/main/resources/com/io7m/jcamera/documentation/haskell/MouseRegion.hs | isc | 743 | 0 | 14 | 222 | 316 | 179 | 137 | 28 | 2 |
module FIFO where
import Control.Monad
import Control.Monad.State
import Data.List
import Data.Maybe
data FIFO a = FIFO [a] [a]
deriving Show
empty :: FIFO a
empty = FIFO [] []
isEmpty :: FIFO a -> Bool
isEmpty (FIFO [] []) = True
isEmpty _ = False
enqueue :: a -> FIFO a -> FIFO a
enqueue x (FIFO front back) = FIFO front (x:back)
-- | Remove the head off the queue. My type's different from yours
-- because I use Maybe to handle the case where somebody tries to
-- dequeue off an empty FIFO.
dequeue :: FIFO a -> Maybe (a, FIFO a)
dequeue queue = case queue of
FIFO [] [] -> Nothing
FIFO (x:f) b -> Just (x, FIFO f b)
otherwise -> dequeue (rotate queue)
where rotate (FIFO [] back) = FIFO (reverse back) []
-- | Elements exit the queue in the order they appear in the list.
fromList :: [a] -> FIFO a
fromList xs = FIFO xs []
-- | Elements appear in the result list in the order they exit the queue.
toList :: FIFO a -> [a]
toList = unfoldr dequeue
-- | Enqueue multiple elements. Elements exit the queue in the order
-- they appear in xs.
add :: [a] -> FIFO a -> FIFO a
add xs q = foldl' (flip enqueue) q xs
-- | Remove n elements from the queue. My result type is different
-- from yours, again, because I handle the empty FIFO case. If you
-- try to remove too many elements, you get a bunch of Nothings at
-- the end of your list.
remove :: Int -> FIFO a -> ([Maybe a], FIFO a)
remove n q = runState (removeM n) q
-- | State monad action to dequeue n elements from the state queue.
removeM :: Int -> State (FIFO a) [Maybe a]
removeM n = replicateM n dequeueM
-- | State monad action to dequeue an element from the state queue.
dequeueM :: State (FIFO a) (Maybe a)
dequeueM = do q <- get
case dequeue q of
Just (x, q') -> put q' >> return (Just x)
Nothing -> return Nothing
| NaevaTheCat/KMC-haskell | src/FIFO.hs | mit | 1,914 | 0 | 13 | 497 | 574 | 295 | 279 | 35 | 3 |
module FRP.Helm.Extras
( near
, wrap
, foldp2
)where
import FRP.Helm (foldp, (<~), (~~), Signal)
{-| Check if two values are within a tolerance -}
near :: (Num a, Ord a) => a -> a -> a -> Bool
near tolerance x y = abs (x - y) <= tolerance
{-| Wraps the third argument within given range. -}
wrap :: (Ord a, Num a) => a -> a -> a -> a
wrap a b c
| a <= c && c < b = c
| c <= a = wrap a b (c + b - a)
| otherwise = wrap a b (c - b - a)
{-| Fold over two signals. -}
foldp2 :: (a -> b1 -> b -> b) -> b -> Signal a -> Signal b1 -> Signal b
foldp2 fn ini s1 s2 = foldp (uncurry fn) ini ((,) <~ s1 ~~ s2)
| niilohlin/helm-extras | src/FRP/Helm/Extras.hs | mit | 635 | 0 | 10 | 188 | 308 | 163 | 145 | 14 | 1 |
module GenM32 where
import Data.Word
import Data.Monoid
import Data.List
import System.IO
import qualified Data.ByteString as B
import Data.ByteString.Lazy.Builder
import Inst
osecpuSignatureM32 :: [Word8]
osecpuSignatureM32 = [0x05, 0xE2, 0x00, 0xCF, 0xEE, 0x7F, 0xF1, 0x88]
intToWord32 :: Int -> Word32
intToWord32 = fromInteger . toInteger
offsetSignature :: Word32
offsetSignature = 0x76000000
class EncodeM32 a where
toM32 :: a -> Word32
instance EncodeM32 Reg where
toM32 (Reg r) = fromInteger (toInteger r) + offsetSignature
instance EncodeM32 BitSpec where
toM32 (BitSpec r) = fromInteger (toInteger r) + offsetSignature
instance EncodeM32 Imm where
toM32 (Imm r) = fromInteger (toInteger r)
instance EncodeM32 PReg where
toM32 (PReg p) = fromInteger (toInteger p) + offsetSignature
instance EncodeM32 Label where
toM32 (Label l) = fromInteger (toInteger l) + offsetSignature
instance EncodeM32 LabelOpt where
toM32 (LabelOpt o) = fromInteger (toInteger o) + offsetSignature
instToM32 n = offsetSignature + n
sInt32Type = instToM32 0x06
assemble :: Program -> [Word32]
assemble prog = concatMap encode' (instructions prog)
where
encode' NOP = [instToM32 0x00]
encode' (LIMM bit r imm) = [instToM32 0x02,
0xFFFFF788,
toM32 imm,
toM32 r,
toM32 bit]
encode' (LB opt label) = [instToM32 0x01,
toM32 label,
toM32 opt]
encode' (PLIMM p label) = [instToM32 0x03,
toM32 label,
toM32 p]
encode' (PCP p0 p1) = [instToM32 0x1e,
toM32 p1,
toM32 p0]
encode' (CND reg) = [instToM32 0x04, toM32 reg]
encode' (LMEM0 bit r p) = [instToM32 0x08,
toM32 p,
sInt32Type,
instToM32 0,
toM32 r,
toM32 bit]
-- 76000009 r bit p typ 76000000
encode' (SMEM0 bit r p) = [instToM32 0x09,
toM32 r,
toM32 bit,
toM32 p,
sInt32Type,
instToM32 0]
-- 7600000E p1 typ r bit p0
encode' (PADD bit p0 p1 r) = [instToM32 0x0e,
toM32 p1,
sInt32Type,
toM32 r,
toM32 bit,
toM32 p0]
encode' (OR bit r0 r1 r2) = genArith 0x10 bit r0 r1 r2
encode' (XOR bit r0 r1 r2) = genArith 0x11 bit r0 r1 r2
encode' (AND bit r0 r1 r2) = genArith 0x12 bit r0 r1 r2
encode' (SBX bit r0 r1 r2) = genArith 0x13 bit r0 r1 r2
encode' (ADD bit r0 r1 r2) = genArith 0x14 bit r0 r1 r2
encode' (SUB bit r0 r1 r2) = genArith 0x15 bit r0 r1 r2
encode' (MUL bit r0 r1 r2) = genArith 0x16 bit r0 r1 r2
encode' (SHL bit r0 r1 r2) = genArith 0x18 bit r0 r1 r2
encode' (SAR bit r0 r1 r2) = genArith 0x19 bit r0 r1 r2
encode' (DIV bit r0 r1 r2) = genArith 0x1A bit r0 r1 r2
encode' (MOD bit r0 r1 r2) = genArith 0x1B bit r0 r1 r2
encode' (CMPE bit0 bit1 r0 r1 r2) = genComp 0x20 bit0 bit1 r0 r1 r2
encode' (CMPNE bit0 bit1 r0 r1 r2) = genComp 0x21 bit0 bit1 r0 r1 r2
encode' (CMPL bit0 bit1 r0 r1 r2) = genComp 0x22 bit0 bit1 r0 r1 r2
encode' (CMPGE bit0 bit1 r0 r1 r2) = genComp 0x23 bit0 bit1 r0 r1 r2
encode' (CMPLE bit0 bit1 r0 r1 r2) = genComp 0x24 bit0 bit1 r0 r1 r2
encode' (CMPG bit0 bit1 r0 r1 r2) = genComp 0x25 bit0 bit1 r0 r1 r2
encode' (TSTZ bit0 bit1 r0 r1 r2) = genComp 0x26 bit0 bit1 r0 r1 r2
encode' (TSTNZ bit0 bit1 r0 r1 r2) = genComp 0x27 bit0 bit1 r0 r1 r2
encode' BREAK = [0x760000FE, 0x760001FF, 0x76000000]
encode' (DATA ws) = [instToM32 0x2e,
sInt32Type,
instToM32 (genericLength ws)
] ++ ws
genArith code bit r0 r1 r2 = [instToM32 code,
toM32 r1,
toM32 r2,
toM32 r0,
toM32 bit]
genComp code bit0 bit1 r0 r1 r2 = [instToM32 code,
toM32 r1,
toM32 r2,
toM32 bit1,
toM32 r0,
toM32 bit0]
offset :: Word32
offset = 0x76000000
toBinary :: [Word32] -> Builder
toBinary w32s = mconcat (map word8 osecpuSignatureM32 ++ map word32BE w32s)
hAssembleOut :: Handle -> Program -> IO ()
hAssembleOut h p = hPutBuilder h (toBinary (assemble p))
| ryna4c2e/chage | GenM32.hs | mit | 5,259 | 0 | 13 | 2,316 | 1,600 | 808 | 792 | 107 | 30 |
module Data.Integer.Concurrent where
import Control.Applicative
import Control.Concurrent.MVar as MVar
import Control.Concurrent.MVar.Extensions as MVarExt
newAtomicInteger :: Integral a => a -> IO (MVar a)
newAtomicInteger = MVar.newMVar
getAndAdd :: Integral a => MVar a -> a -> IO a
getAndAdd = \mvar change -> ((liftA fst) (transformMVar mvar ((+) change)))
getAndIncrement :: Integral a => MVar a -> IO a
getAndIncrement = (flip getAndAdd 1)
getAndDecrement :: Integral a => MVar a -> IO a
getAndDecrement = (flip getAndAdd (-1))
addAndGet :: Integral a => MVar a -> a -> IO a
addAndGet = \mvar change -> ((liftA snd) (transformMVar mvar ((+) change)))
incrementAndGet :: Integral a => MVar a -> IO a
incrementAndGet = (flip addAndGet 1)
decrementAndGet :: Integral a => MVar a -> IO a
decrementAndGet = (flip addAndGet (-1))
| stevedonnelly/haskell | code/Data/Integer/Concurrent.hs | mit | 838 | 0 | 11 | 139 | 343 | 179 | 164 | 18 | 1 |
module Naming5 where
outer b = b
where
inner :: a -> a
inner a = const b a
const _ b = b
| Lemmih/haskell-tc | tests/Naming5.hs | mit | 101 | 0 | 7 | 36 | 46 | 24 | 22 | 5 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
module Database.Persist.Class.DeleteCascade
( DeleteCascade (..)
, deleteCascadeWhere
) where
import Database.Persist.Class.PersistStore
import Database.Persist.Class.PersistQuery
import Database.Persist.Class.PersistEntity
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
class (PersistStore m, PersistEntity a, PersistEntityBackend a ~ PersistMonadBackend m) => DeleteCascade a m where
deleteCascade :: Key a -> m ()
deleteCascadeWhere :: (DeleteCascade a m, PersistQuery m)
=> [Filter a] -> m ()
deleteCascadeWhere filts = selectKeys filts [] C.$$ CL.mapM_ deleteCascade
| gbwey/persistentold | persistent/Database/Persist/Class/DeleteCascade.hs | mit | 709 | 0 | 9 | 116 | 180 | 102 | 78 | 15 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Rules where
{-
some kind of chess thingy
-}
import Data.Array.IArray
import Data.List (groupBy, sortBy)
import Data.List.Split (splitOn)
import Data.Text (strip)
import Text.Read (readMaybe)
import Data.Tuple (swap)
import Data.Char (ord, chr, toLower)
import qualified Data.Function as Func (on)
import Data.Maybe
import Data.Default
import qualified Data.Map as Map
import Control.Monad.Writer.Lazy
import Debug.Trace (trace)
import Position
import Stringboxer
newtype Board = B (Array TPos Tile)
instance Show Board where
show (B board) = box . (:) (' ': ['1'..'8']) . mapWC (concatMap pp) ['A'..]
. groupsort $ board
where
groupsort = groupBy ((==) `Func.on` snd . fst)
. sortBy (compare `Func.on` snd . fst)
. assocs
-- Map with a counter
mapWC f cs = map (\(a,b) -> a:f b) . zip cs
pp ((c, i), tile) = case piece tile of
None -> case colour tile of
Black -> "■"
White -> "□"
p -> show p
data Chess = Chess {
board :: Board,
turn :: Colour,
flags :: Map.Map (Colour, String) Bool,
moves :: [Move] -- filo stack; move m, of moves = (m:ms), is always the
-- latest move
}
instance Show Chess where
show b = "It's " ++ show (turn b) ++ "s turn and the game has so far had "
++ show (length $ moves b) ++ " moves.\n" ++ show (board b)
instance Default Chess where
def = Chess {board= newBoard, turn= White, moves= [], flags = Map.empty }
-- Ranks of chess pieces
data Rank = Turd | King | Queen | Rook | Bishop | Knight | Pawn
deriving (Show, Eq)
-- Colour of chess pieces
data Colour = White | Black deriving (Show, Eq, Ord)
-- A piece
data Piece = P Colour Rank | None deriving Eq
instance Show Piece where
-- maybe im needing later
show (P _ Turd) = "💩"
show None = " "
show (P colour rank) = case colour of
Black -> case rank of
Knight -> "♞"
Rook -> "♜"
Bishop -> "♝"
Pawn -> "♟"
King -> "♚"
Queen -> "♛"
White -> case rank of
Knight -> "♘"
Rook -> "♖"
Bishop -> "♗"
Pawn -> "♙"
King -> "♔"
Queen -> "♕"
data Tile = T {
colour :: Colour,
pos :: TPos,
piece :: Piece
}
instance Show Tile where
show = show . piece
shlow :: Show a => a -> String
shlow = map toLower . show
data Move = M TPos TPos
deriving Show
-- | Get piece on a certain tile of the board
getPiece :: Board -> TPos -> Piece
getPiece (B board) pos = piece $ board ! pos
-- | Predicate for position being on the board
inBoardRange :: TPos -> Bool
inBoardRange (a, b) = p a && p b
where p i = i >= 1 && i <= 8
{- TODO: make and check the ruleset
Data needed for decision
* Move: Position to another Position
* Rank: Knight | Rook | Bishop | Pawn | King | Queen | Turd
* Colour: White | Black
Legend: Verizontal = Horizontal or vertical
Common rules:
* Pieces, except the Knight, can't generally move across other pieces.
* Pieces can't land on friendly pieces.
* Pieces can't exist outside the board
Rules by rank:
* Knights can move 2 or 1 steps in one verizontal direction
and then 1 or 2 steps steps orthogonal to the steps before
* Knights aren't blocked by allied pieces, but may not land
on a friendly piece
* Rooks can move any steps in a verizontal direction
* Bishops can move any steps in a diagonal direction
* Queens can move as rooks and bishops
* Kings can move as queens but just one step
unless they're doing a castling
* Pawns can move just one step, but diagonally if there's a piece to catch
pawns cannot move forward if another piece is blocking the way.
* Pawns may move two steps forward on the first turn
Checking rules:
* A king is put in check when it may be taken by an opposing piece
the next turn.
* When a king is in check, it may only do moves that removes the threat
by either
** Block the threat
** Remove the threat
** Move to a non-threatened space
* If none of the above is possible, it's a check mate and the opposing player
has won the game.
Castling rules:
* Only a king may perform a castling on a rook
* Pieces affected by the castling may not have moved before
* No pieces may stand between the king and the rook
* The king may not currently be in check, land in check or pass any tile
that would put them in check
__En Passant Rule__
╒═╤═╕ ╒═╤═╕ ╒═╤═╕
│♟│ │ │♟│ │ │♟│ │
├─┼─┤ ├─┼─┤ ├─┼─┤
│ │♟│ => │♙│♟│ => │ │ │
├─┼─┤ ├─┼─┤ ├─┼─┤
│ │ │ │ │ │ │♟│ │
├─┼─┤ ├─┼─┤ ├─┼─┤
│♙│ │ │ │ │ │ │ │
╘═╧═╛ ╘═╧═╛ ╘═╧═╛
En passant:
* A pawn that moved two steps may be a victim of an en passant as
the opposing player may the next turn attack the piece by moving to
the tile between the players two positions. This move is only valid the
turn after the pawn victim moved.
Pawn promotion:
* A pawn reaching the eight rank (the tile row at the end of the board)
may be promoted to another piece. The piece may be converted to a queen,
bishop, rook or knight of the same colour. There is no limit on how many
pieces of a kind there may be.
-}
-- type Step = (Int, Int)
-- type WalkPredicate = TPos -> Step -> Bool
movesOf :: Chess -> TPos -> [Move]
movesOf s pos@(x,y) = wrap $ case getPiece (board s) pos of
None -> []
P col rank -> case rank of
Turd -> [(x',y') | x' <- [1..8], y' <- [1..8]]
Knight -> let incrs = [1,2,-1,-2]
in filter (\p -> not $ ownerEq s p col)
[(x+x',y+y') | x' <- incrs, y' <- incrs, abs y' /= abs x']
Bishop -> takeWhileAhead (pred col)
$ steps (anyway $ Incs [(1, 1)]) pos
Rook -> takeWhileAhead (pred col)
$ steps (anyway $ Incs [(1, 0), (0,1)]) pos
Queen -> takeWhileAhead (pred col)
$ steps (anyway $ Incs [(1,0), (0,1), (1,1)]) pos
King ->
let kingHasMoved = const False
hasBeenChecked = False
in execWriter $ do
-- Basic king movement
tell [(a', b') | a' <- [x-1..x+1], b' <- [y-1..y+1], (a', b') /= (x, y)]
-- Castling
tell []
Pawn ->
let (start, inc) = setupPawn col
enemy = opponent col
(oppStart, oInc) = setupPawn enemy
hasEnemy ep = ownerEq s ep enemy
pawnThatMovedLastTurn p = case head $ moves s of
M from to@(a,b) | p == to && ownerEq s to enemy
-> from == (a, oppStart)
_ -> False
in execWriter $ do
-- Basic movement
tell [(x, y `inc` 1)]
-- May move one more step if it's in starting position
tell $ if start /= y then [] else [(x, y `inc` 2)]
-- May move diagonally if there's an enemy piece there
tell $ filter hasEnemy [(x + i, y `inc` 1) | i <- [-1, 1]]
-- Enforceing enpassant rules
tell
$ map (\(a,b) -> (a, b `inc` 1))
$ filter pawnThatMovedLastTurn [(x + i, y) | i <- [-1, 1]]
where
-- Final wrap of the move check
wrap = map (M pos) . filter inBoardRange
-- | takeWhile but with the predicate of the element tupled with its
-- | lookahead
takeWhileAhead p xs = map fst $ takeWhile p (zip xs $ tail xs)
-- | predicate for walking pieces like rook, queen and bishop
pred col (position, next) = inBoardRange position
setupPawn Black = (2, (+))
setupPawn White = (7, (-))
checked :: Colour -> Board -> Bool
checked = undefined
opponent :: Colour -> Colour
opponent White = Black
opponent Black = White
owner :: Board -> TPos -> Maybe Colour
owner (B b) p = if inBoardRange p then
case piece $ b ! p of
None -> Nothing
P a _ -> Just a
else Nothing
ownerEq :: Chess -> TPos -> Colour -> Bool
ownerEq c p col = case owner (board c) p of
Just col' -> col == col'
Nothing -> False
-- | Moves a piece on the board, should only be done after validMove
movePiece :: Board -> Move -> Board
movePiece (B board) (M p p') = undefined
strToMove :: String -> Maybe Move
strToMove s = case splitOn " " s of
[p1, _to, p2] -> do
p1' <- str2p p1
p2' <- str2p p2
return $ M p1' p2'
where
stripParens = filter $ \x -> x `notElem` "()"
str2p str = case splitOn "," . stripParens $ str of
[c,i] -> do
c' <- readMaybe c
i' <- readMaybe i
return (c', i')
_ -> Nothing
_ -> Nothing
newTile :: TPos -> Tile
newTile p = T { colour = col p,
pos = p,
piece = startPiece p}
where
col (ch, i) | even ch `xor` even i = Black
| otherwise = White
xor a b = (a || b) && not (a && b)
startPiece :: TPos -> Piece
startPiece (ch, i) = case i of
1 -> P White $ lineup ch
2 -> P White Pawn
7 -> P Black Pawn
8 -> P Black $ lineup ch
_ -> if i >= 3 && i <= 6 then None
else error $ "Out of boards at " ++ show ch ++ show i
lineup c | c `elem` [1,8] = Rook
| c `elem` [2,7] = Knight
| c `elem` [3,6] = Bishop
| c == 5 = King
| c == 4 = Queen
newBoard :: Board
newBoard = B $ array ((1, 1), (8, 8))
[(pos, newTile pos) | pos <- boardRange]
where boardRange = [(ch, i) | ch <- [1..8], i <- [1..8]]
| nyson/fess | Rules.hs | mit | 9,921 | 9 | 29 | 3,105 | 2,682 | 1,444 | 1,238 | -1 | -1 |
module Trivial where
data Trivial =
Trivial'
instance Eq Trivial where
Trivial' == Trivial' = True | candu/haskellbook | ch6/trivial.hs | mit | 104 | 0 | 6 | 21 | 30 | 16 | 14 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Web.Markury.Action.Util where
import Web.Markury.Model.DB
import Web.Markury.Model.Input
import Control.Monad.IO.Class ( MonadIO )
import Control.Monad.Logger ( NoLoggingT, runNoLoggingT )
import Control.Monad.Trans.Resource ( ResourceT, runResourceT )
import Database.Persist.Sql ( SqlBackend, SqlPersistT, runSqlConn )
import Text.Blaze.Html ( Html )
import Text.Blaze.Html.Renderer.Utf8 ( renderHtml )
import Web.Spock.Safe
runSql :: (HasSpock m, SpockConn m ~ SqlBackend) => SqlPersistT (NoLoggingT (ResourceT IO)) a -> m a
runSql action = runQuery $ \conn -> runResourceT $ runNoLoggingT $ runSqlConn action conn
renderBlaze :: MonadIO m => Html -> ActionT m a
renderBlaze = lazyBytes . renderHtml
| y-taka-23/markury | src/Web/Markury/Action/Util.hs | mit | 772 | 0 | 11 | 101 | 221 | 130 | 91 | 16 | 1 |
module Dotfiles.CommandsSpec where
import Control.Monad ((>=>))
import Data.List (sort)
import Test.Hspec
import Dotfiles
import Dotfiles.Commands
import Dotfiles.Config
import Dotfiles.Utils
import SpecHelper
-- re-read config
cmd :: Env -> Command -> Args -> IO ()
cmd env command args =
readEnv (envRoot env) >>= \env' -> runCommand env' command args
-- read names only
readCfg :: Env -> IO [String]
readCfg env = (sort . dfNames) `fmap` readConfig (envCfgPath env)
cfg :: [String]
cfg = sort ["~/first", "~/second", "~/third", "~/fourth", "~/fifth"]
withPopulatedEnv :: (Env -> IO ()) -> IO ()
withPopulatedEnv action = withEnv ((createFiles >> return) >=> action)
where
createFiles env =
mapM_ (\fn -> writeFile (normalize (envRoot env) fn) fn) cfg
spec :: Spec
spec = do
describe "Commands" $ do
around withPopulatedEnv $ do
it "env init" $ \env -> do
readCfg env `shouldReturn` []
exists (envAppDir env) `shouldReturn` True
exists (envCfgPath env) `shouldReturn` True
exists (envStorage env) `shouldReturn` True
it "runs `add` command" $ \env -> do
cmd env addDotfiles [head cfg]
readCfg env `shouldReturn` [head cfg]
it "runs `forget` command" $ \env -> do
cmd env addDotfiles cfg
readCfg env `shouldReturn` cfg
cmd env forgetDotfiles [head cfg]
cmd env syncDotfiles []
readCfg env `shouldReturn` tail cfg
it "runs `sync` command" $ \env -> do
writeConfig
(envCfgPath env)
Config
{ appDir = Just $ denormalize (envRoot env) (envAppDir env)
, dfNames = cfg
}
cmd env syncDotfiles []
readCfg env `shouldReturn` cfg
-- it "runs `install` with git repo" $ \env -> do
-- rm (envAppDir env)
-- rm (envCfgPath env)
-- runCommand env install ["https://github.com/ilya-yurtaev/dotfiles"]
-- exists (envCfgPath env) `shouldReturn` True
| ilya-yurtaev/hdotfiles | tests/Dotfiles/CommandsSpec.hs | mit | 1,999 | 0 | 23 | 530 | 619 | 317 | 302 | 46 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module GhostLang.InterpreterTests
( oneLevelCallNoParamsPattern
, oneLevelCallOneParamPattern
, localScopeOneParamPattern
, twoLevelTwoParamsPattern
, longChainTwoParamsPattern
) where
import Control.Concurrent.STM (newTVarIO, readTVarIO)
import GhostLang.RuntimeState ( Counter (..)
, RuntimeState (..)
, defaultRuntimeState
, emptyCounter
, getProcCalls
, getTotalProcCalls
)
import GhostLang.Interpreter.InstructionSet (InstructionSet)
import GhostLang.InterpreterGenerators (TestInstrSet (..))
import GhostLang.Interpreter (runPattern')
import GhostLang.Types ( Value (..)
, Procedure (..)
, Pattern (..)
, Operation (..)
)
import Test.HUnit
import Text.Parsec.Pos (initialPos)
-- | Specific test case with a procedure call with no parameters.
oneLevelCallNoParamsPattern :: Assertion
oneLevelCallNoParamsPattern = do
let p = Pattern (initialPos "") "pa1" 1
[ Call (Procedure "pr1" []
[ Loop (Literal 5)
[ Invoke Instr1 ]
]) []
]
counter <- runWithInputCounter p
1 @=? getProcCalls "pr1" counter
1 @=? getTotalProcCalls counter
1 @=? loopCmds counter
5 @=? instrInvoked counter
-- | Specific test case with a procedure call with one parameter.
oneLevelCallOneParamPattern :: Assertion
oneLevelCallOneParamPattern = do
let p = Pattern (initialPos "") "pa1" 1
[ Call (Procedure "pr1" ["iterations"]
[ Loop (Stored "iterations")
[ Invoke Instr1 ]
]) [ Literal 5 ]
]
counter <- runWithInputCounter p
1 @=? getProcCalls "pr1" counter
1 @=? getTotalProcCalls counter
1 @=? loopCmds counter
5 @=? instrInvoked counter
-- | Specific test case to verify local scopes for each procedure
-- level. Using the same name in different procedured, but different
-- values, shall yield different values.
localScopeOneParamPattern :: Assertion
localScopeOneParamPattern = do
let proc2 = Procedure "pr2" ["iterations"]
[ Loop (Stored "iterations") [ Invoke Instr1 ]
]
proc1 = Procedure "pr1" ["iterations"]
[ Call proc2 [ Literal 10 ]
, Loop (Stored "iterations") [ Invoke Instr2 ]
]
p = Pattern (initialPos "") "pa1" 1
[ Call proc1 [ Literal 5 ]
]
counter <- runWithInputCounter p
1 @=? getProcCalls "pr1" counter
1 @=? getProcCalls "pr2" counter
2 @=? loopCmds counter
-- The number of instructions invoked shall be 15 if the scoping is
-- working correctly.
15 @=? instrInvoked counter
-- | Specific test to verify that more than one parameter is handled
-- correctly.
twoLevelTwoParamsPattern :: Assertion
twoLevelTwoParamsPattern = do
let proc3 = Procedure "pr3" [] []
proc2 = Procedure "pr2" [] []
proc1 = Procedure "pr1" ["iterationsX", "iterationsY"]
[ Loop (Stored "iterationsX") [ Call proc2 [] ]
, Loop (Stored "iterationsY") [ Call proc3 [] ]
]
p = Pattern (initialPos "") "pa1" 1
[ Call proc1 [ Literal 5, Literal 10 ]
] :: Pattern TestInstrSet
counter <- runWithInputCounter p
1 @=? getProcCalls "pr1" counter
5 @=? getProcCalls "pr2" counter
10 @=? getProcCalls "pr3" counter
2 @=? loopCmds counter
-- | Specific test to verify a long chain of procedure calls and that
-- argument values are propagated as expected.
longChainTwoParamsPattern :: Assertion
longChainTwoParamsPattern = do
let proc6 = Procedure "pr6" [] []
proc5 = Procedure "pr5" [] []
proc4 = Procedure "pr4" ["iterationsX", "iterationsD"]
[ Loop (Stored "iterationsX") [ Call proc5 [] ]
, Loop (Stored "iterationsD") [ Call proc6 [] ]
]
proc3 = Procedure "pr3" ["iterationsX", "iterationsC"]
[ Call proc4 [ Stored "iterationsX", Stored "iterationsC" ]
]
proc2 = Procedure "pr2" ["iterationsX", "iterationsB"]
[ Call proc3 [ Stored "iterationsX", Stored "iterationsB"]
]
proc1 = Procedure "pr1" ["iterationsX", "iterationsA"]
[ Call proc2 [ Stored "iterationsX", Stored "iterationsA"]
]
p = Pattern (initialPos "") "pa1" 1
[ Call proc1 [ Literal 5, Literal 10 ]
] :: Pattern TestInstrSet
counter <- runWithInputCounter p
1 @=? getProcCalls "pr1" counter
1 @=? getProcCalls "pr2" counter
1 @=? getProcCalls "pr3" counter
1 @=? getProcCalls "pr4" counter
5 @=? getProcCalls "pr5" counter
10 @=? getProcCalls "pr6" counter
2 @=? loopCmds counter
runWithInputCounter :: InstructionSet a => Pattern a -> IO Counter
runWithInputCounter pattern = do
inpC <- newTVarIO emptyCounter
state <- defaultRuntimeState
runPattern' pattern $ state { counters = [inpC] }
readTVarIO inpC
| kosmoskatten/ghost-lang | ghost-lang/test/GhostLang/InterpreterTests.hs | mit | 5,241 | 0 | 18 | 1,597 | 1,267 | 639 | 628 | 106 | 1 |
module Y2016.M08.D31.Exercise where
import Crypto.Hash
import qualified Data.ByteString.Lazy.Char8 as BL
{--
We'll look at Merkle trees from the tweet we saw yesterday on @1HaskellADay
from Carlos Galdino @carlosgaldino.
http://blog.carlosgaldino.com/merkle-trees.html
Also, Merkle trees are used in the Blockchain (which BitCoin uses), so here's
an article on that.
http://chimera.labs.oreilly.com/books/1234000001802/ch07.html#merkle_trees
We're not going to declare and construct Merkle trees today, what we are
going to do is to get warmed up with hashing functions, specifically the
SHA256 hashing function.
Define a function that takes a string and hashes it to an SHA256 digest:
--}
hashShow :: Show a => a -> Digest SHA256
hashShow = undefined
-- actually, hashShow works on anything that has a String-representation.
-- Hash the following strings:
hashme :: [String]
hashme = ["I like to move it, move it!", "π is 3.14159265358979323846..."]
-- what are the SHA256 hashes of the above strings?
{--
So, here's the thing. Bitcoin doubly hashes the string, or, more correctly,
it hashes the exchange in the block, then it hashes that hash. Let's do the
same thing:
--}
hashhash :: Digest SHA256 -> Digest SHA256
hashhash = undefined
-- note that the hash of a hash is a hash. join function on monad, anyone?
-- So: hash the above strings, then hash the hashes. Verify that the hash
-- of a hash is not the original hash.
{--
So, here's the other thing. The Merkle tree's data nodes (leaf nodes) hash the
data. Great, but nodes that contain (two) child nodes contain no data but
do contain a hash that is the hash of the concatenation of the hashes of its
(two) child nodes. Let's do that. Concatenate two hashes and then hashhash the
resulting String ... result.
--}
childrenHash :: Digest SHA256 -> Digest SHA256 -> Digest SHA256
childrenHash = undefined
-- Take the two hashed hashes from the above strings, concatenate them, then
-- hash hash that concatenation (of the two hash hashes). What is your result?
| geophf/1HaskellADay | exercises/HAD/Y2016/M08/D31/Exercise.hs | mit | 2,039 | 0 | 7 | 345 | 124 | 75 | 49 | 11 | 1 |
------------------------------------------------------------------------------
module Network.HTTP.Media.Tests (tests) where
import Control.Monad (join, replicateM, (>=>))
import Data.Foldable (foldlM)
import Data.Function (on)
import Data.List (nubBy)
import Data.Map (empty)
import Data.Monoid ((<>))
import Data.Word (Word16)
import Test.QuickCheck
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (testProperty)
import Network.HTTP.Media hiding (parameters,
subType)
import Network.HTTP.Media.Gen (padString)
import Network.HTTP.Media.MediaType.Gen
import Network.HTTP.Media.MediaType.Internal
import Network.HTTP.Media.Quality
------------------------------------------------------------------------------
tests :: [TestTree]
tests =
[ testParse
, testMatchAccept
, testMapAccept
, testMatchContent
, testMapContent
, testMatchQuality
, testMapQuality
]
------------------------------------------------------------------------------
testParse :: TestTree
testParse = testGroup "parseQuality"
[ testProperty "Without quality" $ do
media <- medias
rendered <- padConcat (return . renderHeader) media
return $ parseQuality rendered === Just (map maxQuality media)
, testProperty "With quality" $ do
media <- qualities
rendered <- padConcat padQuality media
return $ parseQuality rendered === Just media
, testProperty "With extensions" $ do
media <- qualities
rendered <- padConcat (padQuality >=> padExtensions) media
return $ parseQuality rendered === Just media
]
where
medias = listOf1 genMediaType
qualities = medias >>= mapM (flip fmap (choose (0, 1000)) . Quality)
padConcat f l = flip (foldlM (padComma f)) (tail l) =<< f (head l)
padComma f a b = pad a <$> padString "," <*> f b
padQuality qMedia = do
semi <- padString ";"
let d = renderHeader (qualityData qMedia)
v = showQ (qualityValue qMedia)
return $ d <> semi <> "q=" <> v
padExtensions s = genParameters >>= fmap (s <>) . renderParameters
pad a s b = a <> s <> b
------------------------------------------------------------------------------
testMatchAccept :: TestTree
testMatchAccept = testMatch "Accept" matchAccept renderHeader
------------------------------------------------------------------------------
testMapAccept :: TestTree
testMapAccept = testMap "Accept" mapAccept renderHeader
------------------------------------------------------------------------------
testMatchContent :: TestTree
testMatchContent = testGroup "matchContent"
[ testProperty "Matches" $ do
media <- genMediaType
return $ matchContent [media] (renderHeader media) === Just media
, testProperty "Nothing" $ do
content <- genMediaType
parsers <- filter (not . matches content) <$> genServer
return $ matchContent parsers (renderHeader content) === Nothing
, testProperty "Against */*" $ do
media <- genMediaType
return $
matchContent [anything] (renderHeader media) === Just anything
, testProperty "Against type/*" $ do
media <- genMediaType
let sub = subStarOf media
return $ matchContent [sub] (renderHeader media) === Just sub
]
------------------------------------------------------------------------------
testMapContent :: TestTree
testMapContent = testGroup "mapContent"
[ testProperty "Matches" $ do
media <- genMediaType
return $ mapContent [(media, ())] (renderHeader media) === Just ()
, testProperty "Nothing" $ do
content <- genMediaType
parsers <- join zip . filter (not . matches content) <$> genServer
return $ mapContent parsers (renderHeader content) === Nothing
]
------------------------------------------------------------------------------
testMatchQuality :: TestTree
testMatchQuality = testMatch "Quality" matchQuality id
------------------------------------------------------------------------------
testMapQuality :: TestTree
testMapQuality = testMap "Quality" mapQuality id
------------------------------------------------------------------------------
testMatch
:: String
-> ([MediaType] -> a -> Maybe MediaType)
-> ([Quality MediaType] -> a)
-> TestTree
testMatch name match qToI = testGroup ("match" ++ name)
[ testProperty "Most specific" $ do
media <- genConcreteMediaType
let client = qToI $ map maxQuality
[ MediaType "*" "*" empty
, media { subType = "*" }
, media { parameters = empty }
, media
]
return $ match [media] client === Just media
, testProperty "Nothing" $ do
client <- listOf1 genConcreteMediaType
server <- filter (not . flip any client . matches) <$> genServer
return $ match server (qToI $ map maxQuality client) === Nothing
, testProperty "Left biased" $ do
server <- genNubServer
let client = qToI $ map maxQuality server
return $ match server client === Just (head server)
, testProperty "Against */*" $ do
server <- genNubServer
let stars = "*/*" :: MediaType
return $ match server (qToI [maxQuality stars]) ===
Just (head server)
, testProperty "Against type/*" $ do
server <- genNubServer
let client = qToI [maxQuality (subStarOf $ head server)]
return $ match server client === Just (head server)
, testQuality match qToI
]
------------------------------------------------------------------------------
testQuality
:: ([MediaType] -> a -> Maybe MediaType)
-> ([Quality MediaType] -> a)
-> TestTree
testQuality match qToI = testGroup "Quality"
[ testProperty "Highest quality" $ do
server <- genServer
qs <- replicateM (length server) $ choose (1, 1000)
let client = zipWith Quality server qs
qmax v q = if qualityValue q > qualityValue v then q else v
return $ match server (qToI client) ===
Just (qualityData $ foldr1 qmax client)
, testProperty "Most specific quality" $ do
(a, b) <- genMatchingPair
c <- genDiffMediaType a
let client = qToI [quality a "0.5", maxQuality b, maxQuality c]
return $ match [a, c] client === Just c
, testQ0 match qToI
]
------------------------------------------------------------------------------
testQ0
:: ([MediaType] -> a -> Maybe MediaType)
-> ([Quality MediaType] -> a)
-> TestTree
testQ0 match qToI = testGroup "q=0"
[ testProperty "Does not choose a q=0" $ do
server <- genConcreteMediaType
return $ match [server] (qToI [minQuality server]) === Nothing
, testProperty "Does not choose any q=0" $ do
server <- genServer
return $ match server (qToI $ map minQuality server) === Nothing
, testProperty "Does not choose q=0 with less specific type" $ do
(a, b) <- genMatchingPair
let client = qToI [minQuality a, maxQuality b]
return $ match [a] client === Nothing
, testProperty "Does choose type with q=0 on less specific type" $ do
(a, b) <- genMatchingPair
let client = qToI [minQuality b, maxQuality a]
return $ match [a] client === Just a
, testProperty "Does not choose q=0 when followed by same type" $ do
server <- genConcreteMediaType
let client = qToI [minQuality server, maxQuality server]
return $ match [server] client === Nothing
, testProperty "Does not choose q=0 when preceded by same type" $ do
server <- genConcreteMediaType
let client = qToI [maxQuality server, minQuality server]
return $ match [server] client === Nothing
]
------------------------------------------------------------------------------
testMap
:: String
-> ([(MediaType, MediaType)] -> a -> Maybe MediaType)
-> ([Quality MediaType] -> a)
-> TestTree
testMap name mapf qToI = testGroup ("map" ++ name)
[ testProperty "Matches" $ do
server <- genServer
qs <- replicateM (length server) $ choose (1, 1000 :: Word16)
let client = zipWith Quality server qs
qmax q v = if qualityValue q >= qualityValue v then q else v
zipped = zip server server
return $ mapf zipped (qToI client) ===
Just (qualityData $ foldr1 qmax client)
, testProperty "Nothing" $ do
(server, client) <- genServerAndClient
let zipped = zip server $ repeat "*/*"
return $ mapf zipped (qToI $ map maxQuality client) === Nothing
]
------------------------------------------------------------------------------
genServer :: Gen [MediaType]
genServer = listOf1 genConcreteMediaType
------------------------------------------------------------------------------
genNubServer :: Gen [MediaType]
genNubServer = nubBy (on (==) stripParams) <$> genServer
------------------------------------------------------------------------------
genServerAndClient :: Gen ([MediaType], [MediaType])
genServerAndClient = do
server <- genServer
client <- filter (not . flip any server . flip matches) <$>
listOf1 (genDiffMediaTypesWith genConcreteMediaType server)
return (server, client)
| zmthy/http-media | test/Network/HTTP/Media/Tests.hs | mit | 9,839 | 0 | 18 | 2,643 | 2,624 | 1,310 | 1,314 | 190 | 2 |
module Problems where
{-|
This module is used to generate the Main module.
This is useful because of the many Problem modules
that need to be included.
It's also not possible at the moment[1] to use TemplateHaskell for this.
[1]: https://ghc.haskell.org/trac/ghc/ticket/1475
|-}
import Data.Function (on)
import qualified Data.Char as Char
import qualified Data.List as List
import qualified System.Directory as Directory
import Text.Regex.Posix as Posix
problems :: IO [String]
problems = do
dirs <- Directory.listDirectory "src/"
let hasForm = (=~ "Problem[0-9]+\\.hs")
cutSuffix = List.takeWhile (/= '.')
wanted = cutSuffix <$> filter hasForm dirs
getNumber = read . filter Char.isNumber :: String -> Int
sorted = fmap snd . List.sortBy (compare `on` fst) $ zip (fmap getNumber wanted) wanted
return sorted
mainModule :: IO String
mainModule = do
ps <- problems
let mkImport = \p -> concat ["import qualified ", p, " as ", p]
mkPEntry = \p -> concat ["(\"", p, ": \", ", p ,".main)"]
imports = fmap mkImport ps
pEntries = List.intercalate ",\n " $ fmap mkPEntry ps
pList = ["problems :: [(String, IO ())]",
"problems = [" `mappend` pEntries `mappend` "]"]
return . unlines $ concat [
["module Main where",
"",
"import Control.Monad (forM_)"],
imports,
[""],
pList,
["",
"main = forM_ problems $ \\(desc, p) -> putStr desc >> p"]
]
main = writeFile "src/Main.hs" =<< mainModule
| runjak/projectEuler | src/Problems.hs | mit | 1,534 | 0 | 14 | 373 | 374 | 211 | 163 | 34 | 1 |
-----------------------------------------------------------------------------
--
-- Module : Language.TypeScript.Pretty
-- Copyright : (c) DICOM Grid Inc. 2013
-- License : MIT
--
-- Maintainer : Phillip Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Language.TypeScript.Pretty (
renderDeclarationSourceFile
) where
import Language.TypeScript.Types
import Text.PrettyPrint
renderDeclarationSourceFile :: [DeclarationElement] -> String
renderDeclarationSourceFile = render . declarationSourceFile
declarationSourceFile :: [DeclarationElement] -> Doc
declarationSourceFile = vcat . map declarationElement
exported :: Exported -> Doc
exported _ = text "export"
renderMaybe :: (a -> Doc) -> Maybe a -> Doc
renderMaybe f = maybe empty f
stringLiteral :: String -> Doc
stringLiteral = doubleQuotes . text
declarationElement :: DeclarationElement -> Doc
declarationElement (InterfaceDeclaration _ e i) =
renderMaybe exported e
<+> interface i
declarationElement (ImportDeclaration e name entityName) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> renderEntityName entityName
declarationElement (ExportDeclaration name) =
exported Exported
<+> text "="
<+> text name
declarationElement (ExternalImportDeclaration e name imp) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> text "require"
<+> stringLiteral imp
declarationElement (AmbientDeclaration _ e a) =
renderMaybe exported e
<+> text "declare"
<+> renderAmbientDeclaration a
renderAmbientDeclaration :: Ambient -> Doc
renderAmbientDeclaration (AmbientVariableDeclaration _ name ty) =
text "var"
<+> text name
<+> renderMaybe typeAnnotation ty
<+> semi
renderAmbientDeclaration (AmbientFunctionDeclaration _ name plrt) =
text "function"
<+> text name
<+> parameterListAndReturnType plrt
<+> semi
renderAmbientDeclaration (AmbientClassDeclaration _ name ps exts imps els) =
text "class"
<+> text name
<+> renderMaybe typeParameters ps
<+> renderMaybe extendsClause exts
<+> renderMaybe implementsClause imps
<+> braces (sepEndBy semi (renderAmbientClassBodyElement . snd ) els)
renderAmbientDeclaration (AmbientInterfaceDeclaration i) = interface i
renderAmbientDeclaration (AmbientEnumDeclaration _ name members) =
text "enum" <+> text name <+> braces (sepEndBy comma enumMember members)
where
enumMember (name, val) = text name <+> renderMaybe (\n -> char '=' <+> integer n) val
renderAmbientDeclaration (AmbientModuleDeclaration _ name ds) =
text "module"
<+> sepBy dot text name
<+> braces (vcat (map renderAmbientDeclaration ds))
renderAmbientDeclaration (AmbientExternalModuleDeclaration _ name es) =
text "module"
<+> stringLiteral name
<+> braces (vcat (map renderAmbientExternalModuleElement es))
renderAmbientExternalModuleElement :: AmbientExternalModuleElement -> Doc
renderAmbientExternalModuleElement (AmbientModuleElement a) = renderAmbientDeclaration a
renderAmbientExternalModuleElement (ExportAssignment name) =
text "export"
<+> char '='
<+> text name
<+> semi
renderAmbientExternalModuleElement (AmbientModuleExternalImportDeclaration e name imp) =
renderMaybe exported e
<+> text "import"
<+> text name
<+> char '='
<+> text "require"
<+> stringLiteral imp
renderAmbientClassBodyElement :: AmbientClassBodyElement -> Doc
renderAmbientClassBodyElement (AmbientConstructorDeclaration ps) =
text "constructor"
<+> parameterList ps
<+> semi
renderAmbientClassBodyElement (AmbientMemberDeclaration p s prop (Left ty)) =
renderMaybe publicOrPrivate p
<+> renderMaybe static s
<+> propertyName prop
<+> renderMaybe typeAnnotation ty
renderAmbientClassBodyElement (AmbientMemberDeclaration p s prop (Right ps)) =
renderMaybe publicOrPrivate p
<+> renderMaybe static s
<+> propertyName prop
<+> parameterListAndReturnType ps
renderAmbientClassBodyElement (AmbientIndexSignature i) = renderIndexSignature i
renderIndexSignature :: IndexSignature -> Doc
renderIndexSignature (IndexSignature s sn ty) =
text s
<+> colon
<+> stringOrNumber sn
<+> typeAnnotation ty
dot :: Doc
dot = char '.'
sepEndBy :: Doc -> (a -> Doc) -> [a] -> Doc
sepEndBy s f as = hsep $ map (\e -> f e <+> s) as
renderEntityName :: EntityName -> Doc
renderEntityName (EntityName Nothing e) = text e
renderEntityName (EntityName (Just (ModuleName es)) e) = hcat (punctuate dot (map text es)) <> text e
interface :: Interface -> Doc
interface (Interface _ name ps exts ty) =
text "interface"
<+> text name
<+> renderMaybe typeParameters ps
<+> renderMaybe extendsClause exts
<+> objectType ty
extendsClause :: [TypeRef] -> Doc
extendsClause rs = text "extends" <+> classOrInterfaceTypeList rs
implementsClause :: [TypeRef] -> Doc
implementsClause rs = text "implements" <+> classOrInterfaceTypeList rs
sepBy :: Doc -> (a -> Doc) -> [a] -> Doc
sepBy s f as = hsep $ punctuate s (map f as)
commaSep :: (a -> Doc) -> [a] -> Doc
commaSep = sepBy comma
classOrInterfaceTypeList :: [TypeRef] -> Doc
classOrInterfaceTypeList = commaSep typeRef
objectType :: TypeBody -> Doc
objectType = braces . typeBody
typeBody :: TypeBody -> Doc
typeBody (TypeBody ms) = hcat . map (\(_, m) -> typeMember m <+> semi) $ ms
typeMember :: TypeMember -> Doc
typeMember (MethodSignature name opt plrt) =
propertyName name
<+> renderMaybe optional opt
<+> parameterListAndReturnType plrt
typeMember (PropertySignature name opt ty) =
propertyName name
<+> renderMaybe optional opt
<+> renderMaybe typeAnnotation ty
typeMember (CallSignature plrt) = parameterListAndReturnType plrt
typeMember (ConstructSignature tyArgs pl ty) =
text "new"
<+> renderMaybe typeParameters tyArgs
<+> parens (parameterList pl)
<+> renderMaybe typeAnnotation ty
typeMember (TypeIndexSignature i) = renderIndexSignature i
propertyName :: String -> Doc
propertyName = text
typeAnnotation :: Type -> Doc
typeAnnotation t = colon <+> _type t
parameterListAndReturnType :: ParameterListAndReturnType -> Doc
parameterListAndReturnType (ParameterListAndReturnType ps pl ty) =
renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> renderMaybe typeAnnotation ty
parameterList :: [Parameter] -> Doc
parameterList = commaSep parameter
optional :: Optional -> Doc
optional _ = char '?'
parameter :: Parameter -> Doc
parameter (RequiredOrOptionalParameter pop name opt ty) =
renderMaybe publicOrPrivate pop
<+> text name
<+> renderMaybe optional opt
<+> renderMaybe typeAnnotation ty
parameter (RestParameter name ty) =
text "..."
<+> text name
<+> renderMaybe typeAnnotation ty
static :: Static -> Doc
static _ = text "static"
publicOrPrivate :: PublicOrPrivate -> Doc
publicOrPrivate Public = text "public"
publicOrPrivate Private = text "private"
stringOrNumber :: StringOrNumber -> Doc
stringOrNumber String = text "string"
stringOrNumber Number = text "number"
typeParameters :: [TypeParameter] -> Doc
typeParameters ps = char '<' <+> commaSep typeParameter ps <+> char '>'
typeParameter :: TypeParameter -> Doc
typeParameter (TypeParameter name ext) =
text name
<+> renderMaybe (\t -> text "extends" <+> _type t) ext
_type :: Type -> Doc
_type (ArrayType t) = _type t <+> text "[]"
_type (Predefined p) = predefinedType p
_type (TypeReference r) = typeRef r
_type (ObjectType o) = objectType o
_type (FunctionType ps pl ret) =
renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> text "=>"
<+> _type ret
_type (ConstructorType ps pl ret) =
text "new"
<+> renderMaybe typeParameters ps
<+> parens (parameterList pl)
<+> text "=>"
<+> _type ret
typeRef :: TypeRef -> Doc
typeRef (TypeRef n as) =
typeName n
<+> renderMaybe typeArguments as
predefinedType :: PredefinedType -> Doc
predefinedType AnyType = text "any"
predefinedType NumberType = text "number"
predefinedType BooleanType = text "boolean"
predefinedType StringType = text "string"
predefinedType VoidType = text "void"
typeName :: TypeName -> Doc
typeName (TypeName Nothing t) = text t
typeName (TypeName (Just (ModuleName ts)) t) = sepBy dot text ts <+> text t
typeArguments :: [Type] -> Doc
typeArguments ts = char '<' <+> commaSep _type ts <+> char '>'
| paf31/language-typescript | src/Language/TypeScript/Pretty.hs | mit | 8,354 | 0 | 12 | 1,351 | 2,524 | 1,231 | 1,293 | 217 | 1 |
import Types
import Typechecker
main :: IO ()
main = return ()
| 5outh/Molecule | Main.hs | gpl-2.0 | 64 | 0 | 6 | 13 | 27 | 14 | 13 | 4 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, OverloadedStrings #-}
module Sound.Tidal.UI where
import Prelude hiding ((<*), (*>))
import Data.Char (digitToInt, isDigit, ord)
import Data.Bits (testBit, Bits, xor, shiftL, shiftR)
-- import System.Random (randoms, mkStdGen)
-- import Control.Monad.ST
-- import Control.Monad.Primitive (PrimState, PrimMonad)
-- import qualified Data.Vector as V
-- import Data.Word (Word32)
import Data.Ratio ((%))
import Data.List (sort, sortOn, findIndices, elemIndex, groupBy, transpose, intercalate, findIndex)
import Data.Maybe (isJust, fromJust, fromMaybe, mapMaybe)
import qualified Data.Text as T
import qualified Data.Map.Strict as Map
import Data.Bool (bool)
import Sound.Tidal.Bjorklund (bjorklund)
import Sound.Tidal.Core
import qualified Sound.Tidal.Params as P
import Sound.Tidal.Pattern
import Sound.Tidal.Utils
------------------------------------------------------------------------
-- * UI
-- | Randomisation
-- cf. George Marsaglia (2003). "Xorshift RNGs". Journal of Statistical Software 8:14.
-- https://www.jstatsoft.org/article/view/v008i14
xorwise :: Int -> Int
xorwise x =
let a = xor (shiftL x 13) x
b = xor (shiftR a 17) a
in xor (shiftL b 5) b
-- stretch 300 cycles over the range of [0,2**29 == 536870912) then apply the xorshift algorithm
timeToIntSeed :: RealFrac a => a -> Int
timeToIntSeed = xorwise . truncate . (* 536870912) . snd . (properFraction :: (RealFrac a => a -> (Int,a))) . (/ 300)
intSeedToRand :: Fractional a => Int -> a
intSeedToRand = (/ 536870912) . realToFrac . (`mod` 536870912)
timeToRand :: (RealFrac a, Fractional b) => a -> b
timeToRand = intSeedToRand . timeToIntSeed
timeToRands :: (RealFrac a, Fractional b) => a -> Int -> [b]
timeToRands t n = timeToRands' (timeToIntSeed t) n
timeToRands' :: Fractional a => Int -> Int -> [a]
timeToRands' seed n
| n <= 0 = []
| otherwise = (intSeedToRand seed) : (timeToRands' (xorwise seed) (n-1))
{-|
`rand` generates a continuous pattern of (pseudo-)random numbers between `0` and `1`.
@
sound "bd*8" # pan rand
@
pans bass drums randomly
@
sound "sn sn ~ sn" # gain rand
@
makes the snares' randomly loud and quiet.
Numbers coming from this pattern are 'seeded' by time. So if you reset
time (via `cps (-1)`, then `cps 1.1` or whatever cps you want to
restart with) the random pattern will emit the exact same _random_
numbers again.
In cases where you need two different random patterns, you can shift
one of them around to change the time from which the _random_ pattern
is read, note the difference:
@
jux (# gain rand) $ sound "sn sn ~ sn" # gain rand
@
and with the juxed version shifted backwards for 1024 cycles:
@
jux (# ((1024 <~) $ gain rand)) $ sound "sn sn ~ sn" # gain rand
@
-}
rand :: Fractional a => Pattern a
rand = Pattern (\(State a@(Arc s e) _) -> [Event (Context []) Nothing a (realToFrac $ (timeToRand ((e + s)/2) :: Double))])
{- | Just like `rand` but for whole numbers, `irand n` generates a pattern of (pseudo-) random whole numbers between `0` to `n-1` inclusive. Notably used to pick a random
samples from a folder:
@
d1 $ segment 4 $ n (irand 5) # sound "drum"
@
-}
irand :: Num a => Int -> Pattern a
irand i = fromIntegral . (floor :: Double -> Int) . (* fromIntegral i) <$> rand
{- | 1D Perlin (smooth) noise, works like rand but smoothly moves between random
values each cycle. `perlinWith` takes a pattern as the RNG's "input" instead
of automatically using the cycle count.
@
d1 $ s "arpy*32" # cutoff (perlinWith (saw * 4) * 2000)
@
will generate a smooth random pattern for the cutoff frequency which will
repeat every cycle (because the saw does)
The `perlin` function uses the cycle count as input and can be used much like @rand@.
-}
perlinWith :: Pattern Double -> Pattern Double
perlinWith p = interp <$> (p-pa) <*> (timeToRand <$> pa) <*> (timeToRand <$> pb) where
pa = (fromIntegral :: Int -> Double) . floor <$> p
pb = (fromIntegral :: Int -> Double) . (+1) . floor <$> p
interp x a b = a + smootherStep x * (b-a)
smootherStep x = 6.0 * x**5 - 15.0 * x**4 + 10.0 * x**3
perlin :: Pattern Double
perlin = perlinWith (sig fromRational)
{- `perlin2With` is Perlin noise with a 2-dimensional input. This can be
useful for more control over how the randomness repeats (or doesn't).
@
d1
$ s "[supersaw:-12*32]"
# lpf (rangex 60 5000 $ perlin2With (cosine*2) (sine*2))
# lpq 0.3
@
will generate a smooth random cutoff pattern that repeats every cycle without
any reversals or discontinuities (because the 2D path is a circle).
`perlin2` only needs one input because it uses the cycle count as the
second input.
-}
perlin2With :: Pattern Double -> Pattern Double -> Pattern Double
perlin2With x y = (/2) . (+1) $ interp2 <$> xfrac <*> yfrac <*> dota <*> dotb <*> dotc <*> dotd where
fl = fmap ((fromIntegral :: Int -> Double) . floor)
ce = fmap ((fromIntegral :: Int -> Double) . (+1) . floor)
xfrac = x - fl x
yfrac = y - fl y
randAngle a b = 2 * pi * timeToRand (a + 0.0001 * b)
pcos x' y' = cos $ randAngle <$> x' <*> y'
psin x' y' = sin $ randAngle <$> x' <*> y'
dota = pcos (fl x) (fl y) * xfrac + psin (fl x) (fl y) * yfrac
dotb = pcos (ce x) (fl y) * (xfrac - 1) + psin (ce x) (fl y) * yfrac
dotc = pcos (fl x) (ce y) * xfrac + psin (fl x) (ce y) * (yfrac - 1)
dotd = pcos (ce x) (ce y) * (xfrac - 1) + psin (ce x) (ce y) * (yfrac - 1)
interp2 x' y' a b c d = (1.0 - s x') * (1.0 - s y') * a + s x' * (1.0 - s y') * b
+ (1.0 - s x') * s y' * c + s x' * s y' * d
s x' = 6.0 * x'**5 - 15.0 * x'**4 + 10.0 * x'**3
perlin2 :: Pattern Double -> Pattern Double
perlin2 = perlin2With (sig fromRational)
{- | Randomly picks an element from the given list
@
sound "superpiano(3,8)" # note (choose ["a", "e", "g", "c"])
@
plays a melody randomly choosing one of the four notes \"a\", \"e\", \"g\", \"c\".
-}
choose :: [a] -> Pattern a
choose = chooseBy rand
chooseBy :: Pattern Double -> [a] -> Pattern a
chooseBy _ [] = silence
chooseBy f xs = (xs !!!) . floor <$> range 0 (fromIntegral $ length xs) f
{- | Like @choose@, but works on an a list of tuples of values and weights
@
sound "superpiano(3,8)" # note (wchoose [("a",1), ("e",0.5), ("g",2), ("c",1)])
@
In the above example, the "a" and "c" notes are twice as likely to
play as the "e" note, and half as likely to play as the "g" note.
-}
wchoose :: [(a,Double)] -> Pattern a
wchoose = wchooseBy rand
wchooseBy :: Pattern Double -> [(a,Double)] -> Pattern a
wchooseBy pat pairs = match <$> pat
where
match r = values !! head (findIndices (> (r*total)) cweights)
cweights = scanl1 (+) (map snd pairs)
values = map fst pairs
total = sum $ map snd pairs
{- |
Similar to `degrade` `degradeBy` allows you to control the percentage of events that
are removed. For example, to remove events 90% of the time:
@
d1 $ slow 2 $ degradeBy 0.9 $ sound "[[[feel:5*8,feel*3] feel:3*8], feel*4]"
# accelerate "-6"
# speed "2"
@
-}
degradeBy :: Pattern Double -> Pattern a -> Pattern a
degradeBy = tParam _degradeBy
_degradeBy :: Double -> Pattern a -> Pattern a
_degradeBy = _degradeByUsing rand
-- Useful for manipulating random stream, e.g. to change 'seed'
_degradeByUsing :: Pattern Double -> Double -> Pattern a -> Pattern a
_degradeByUsing prand x p = fmap fst $ filterValues ((> x) . snd) $ (,) <$> p <* prand
unDegradeBy :: Pattern Double -> Pattern a -> Pattern a
unDegradeBy = tParam _unDegradeBy
_unDegradeBy :: Double -> Pattern a -> Pattern a
_unDegradeBy x p = fmap fst $ filterValues ((<= x) . snd) $ (,) <$> p <* rand
degradeOverBy :: Int -> Pattern Double -> Pattern a -> Pattern a
degradeOverBy i tx p = unwrap $ (\x -> fmap fst $ filterValues ((> x) . snd) $ (,) <$> p <* fastRepeatCycles i rand) <$> slow (fromIntegral i) tx
{- | Use @sometimesBy@ to apply a given function "sometimes". For example, the
following code results in `density 2` being applied about 25% of the time:
@
d1 $ sometimesBy 0.25 (density 2) $ sound "bd*8"
@
There are some aliases as well:
@
sometimes = sometimesBy 0.5
often = sometimesBy 0.75
rarely = sometimesBy 0.25
almostNever = sometimesBy 0.1
almostAlways = sometimesBy 0.9
@
-}
sometimesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
sometimesBy x f p = overlay (degradeBy x p) (unDegradeBy x $ f p)
-- | @sometimes@ is an alias for sometimesBy 0.5.
sometimes :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
sometimes = sometimesBy 0.5
-- | @often@ is an alias for sometimesBy 0.75.
often :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
often = sometimesBy 0.75
-- | @rarely@ is an alias for sometimesBy 0.25.
rarely :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
rarely = sometimesBy 0.25
-- | @almostNever@ is an alias for sometimesBy 0.1
almostNever :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
almostNever = sometimesBy 0.1
-- | @almostAlways@ is an alias for sometimesBy 0.9
almostAlways :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
almostAlways = sometimesBy 0.9
never :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
never = flip const
always :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
always = id
{- | @someCyclesBy@ is a cycle-by-cycle version of @sometimesBy@. It has a
`someCycles = someCyclesBy 0.5` alias -}
someCyclesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
someCyclesBy pd f pat = innerJoin $ (\d -> _someCyclesBy d f pat) <$> pd
_someCyclesBy :: Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
_someCyclesBy x = when test
where test c = timeToRand (fromIntegral c :: Double) < x
somecyclesBy :: Pattern Double -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
somecyclesBy = someCyclesBy
someCycles :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
someCycles = someCyclesBy 0.5
somecycles :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
somecycles = someCycles
{- | `degrade` randomly removes events from a pattern 50% of the time:
@
d1 $ slow 2 $ degrade $ sound "[[[feel:5*8,feel*3] feel:3*8], feel*4]"
# accelerate "-6"
# speed "2"
@
The shorthand syntax for `degrade` is a question mark: `?`. Using `?`
will allow you to randomly remove events from a portion of a pattern:
@
d1 $ slow 2 $ sound "bd ~ sn bd ~ bd? [sn bd?] ~"
@
You can also use `?` to randomly remove events from entire sub-patterns:
@
d1 $ slow 2 $ sound "[[[feel:5*8,feel*3] feel:3*8]?, feel*4]"
@
-}
degrade :: Pattern a -> Pattern a
degrade = _degradeBy 0.5
{- | (The above means that `brak` is a function from patterns of any type,
to a pattern of the same type.)
Make a pattern sound a bit like a breakbeat
Example:
@
d1 $ sound (brak "bd sn kurt")
@
-}
brak :: Pattern a -> Pattern a
brak = when ((== 1) . (`mod` 2)) (((1%4) `rotR`) . (\x -> fastcat [x, silence]))
{- | Divides a pattern into a given number of subdivisions, plays the subdivisions
in order, but increments the starting subdivision each cycle. The pattern
wraps to the first subdivision after the last subdivision is played.
Example:
@
d1 $ iter 4 $ sound "bd hh sn cp"
@
This will produce the following over four cycles:
@
bd hh sn cp
hh sn cp bd
sn cp bd hh
cp bd hh sn
@
There is also `iter'`, which shifts the pattern in the opposite direction.
-}
iter :: Pattern Int -> Pattern c -> Pattern c
iter = tParam _iter
_iter :: Int -> Pattern a -> Pattern a
_iter n p = slowcat $ map (\i -> (fromIntegral i % fromIntegral n) `rotL` p) [0 .. (n-1)]
-- | @iter'@ is the same as @iter@, but decrements the starting
-- subdivision instead of incrementing it.
iter' :: Pattern Int -> Pattern c -> Pattern c
iter' = tParam _iter'
_iter' :: Int -> Pattern a -> Pattern a
_iter' n p = slowcat $ map (\i -> (fromIntegral i % fromIntegral n) `rotR` p) [0 .. (n-1)]
-- | @palindrome p@ applies @rev@ to @p@ every other cycle, so that
-- the pattern alternates between forwards and backwards.
palindrome :: Pattern a -> Pattern a
palindrome p = slowAppend p (rev p)
-- | Composing patterns
{- | The function @seqP@ allows you to define when
a sound within a list starts and ends. The code below contains three
separate patterns in a `stack`, but each has different start times
(zero cycles, eight cycles, and sixteen cycles, respectively). All
patterns stop after 128 cycles:
@
d1 $ seqP [
(0, 128, sound "bd bd*2"),
(8, 128, sound "hh*2 [sn cp] cp future*4"),
(16, 128, sound (samples "arpy*8" (run 16)))
]
@
-}
seqP :: [(Time, Time, Pattern a)] -> Pattern a
seqP ps = stack $ map (\(s, e, p) -> playFor s e (sam s `rotR` p)) ps
-- | Degrades a pattern over the given time.
fadeOut :: Time -> Pattern a -> Pattern a
fadeOut dur p = innerJoin $ (`_degradeBy` p) <$> _slow dur envL
-- | Alternate version to @fadeOut@ where you can provide the time from which the fade starts
fadeOutFrom :: Time -> Time -> Pattern a -> Pattern a
fadeOutFrom from dur p = innerJoin $ (`_degradeBy` p) <$> (from `rotR` _slow dur envL)
-- | 'Undegrades' a pattern over the given time.
fadeIn :: Time -> Pattern a -> Pattern a
fadeIn dur p = innerJoin $ (`_degradeBy` p) <$> _slow dur envLR
-- | Alternate version to @fadeIn@ where you can provide the time from
-- which the fade in starts
fadeInFrom :: Time -> Time -> Pattern a -> Pattern a
fadeInFrom from dur p = innerJoin $ (`_degradeBy` p) <$> (from `rotR` _slow dur envLR)
{- | The 'spread' function allows you to take a pattern transformation
which takes a parameter, such as `slow`, and provide several
parameters which are switched between. In other words it 'spreads' a
function across several values.
Taking a simple high hat loop as an example:
@
d1 $ sound "ho ho:2 ho:3 hc"
@
We can slow it down by different amounts, such as by a half:
@
d1 $ slow 2 $ sound "ho ho:2 ho:3 hc"
@
Or by four thirds (i.e. speeding it up by a third; `4%3` means four over
three):
@
d1 $ slow (4%3) $ sound "ho ho:2 ho:3 hc"
@
But if we use `spread`, we can make a pattern which alternates between
the two speeds:
@
d1 $ spread slow [2,4%3] $ sound "ho ho:2 ho:3 hc"
@
Note that if you pass ($) as the function to spread values over, you
can put functions as the list of values. For example:
@
d1 $ spread ($) [density 2, rev, slow 2, striate 3, (# speed "0.8")]
$ sound "[bd*2 [~ bd]] [sn future]*2 cp jvbass*4"
@
Above, the pattern will have these transforms applied to it, one at a time, per cycle:
* cycle 1: `density 2` - pattern will increase in speed
* cycle 2: `rev` - pattern will be reversed
* cycle 3: `slow 2` - pattern will decrease in speed
* cycle 4: `striate 3` - pattern will be granualized
* cycle 5: `(# speed "0.8")` - pattern samples will be played back more slowly
After `(# speed "0.8")`, the transforms will repeat and start at `density 2` again.
-}
spread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b
spread f xs p = slowcat $ map (`f` p) xs
slowspread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b
slowspread = spread
{- | @fastspread@ works the same as @spread@, but the result is squashed into a single cycle. If you gave four values to @spread@, then the result would seem to speed up by a factor of four. Compare these two:
d1 $ spread chop [4,64,32,16] $ sound "ho ho:2 ho:3 hc"
d1 $ fastspread chop [4,64,32,16] $ sound "ho ho:2 ho:3 hc"
There is also @slowspread@, which is an alias of @spread@.
-}
fastspread :: (a -> t -> Pattern b) -> [a] -> t -> Pattern b
fastspread f xs p = fastcat $ map (`f` p) xs
{- | There's a version of this function, `spread'` (pronounced "spread prime"), which takes a *pattern* of parameters, instead of a list:
@
d1 $ spread' slow "2 4%3" $ sound "ho ho:2 ho:3 hc"
@
This is quite a messy area of Tidal - due to a slight difference of
implementation this sounds completely different! One advantage of
using `spread'` though is that you can provide polyphonic parameters, e.g.:
@
d1 $ spread' slow "[2 4%3, 3]" $ sound "ho ho:2 ho:3 hc"
@
-}
spread' :: Monad m => (a -> b -> m c) -> m a -> b -> m c
spread' f vpat pat = vpat >>= \v -> f v pat
{- | `spreadChoose f xs p` is similar to `slowspread` but picks values from
`xs` at random, rather than cycling through them in order. It has a
shorter alias `spreadr`.
-}
spreadChoose :: (t -> t1 -> Pattern b) -> [t] -> t1 -> Pattern b
spreadChoose f vs p = do v <- _segment 1 (choose vs)
f v p
spreadr :: (t -> t1 -> Pattern b) -> [t] -> t1 -> Pattern b
spreadr = spreadChoose
{-| Decide whether to apply one or another function depending on the result of a test function that is passed the current cycle as a number.
@
d1 $ ifp ((== 0).(flip mod 2))
(striate 4)
(# coarse "24 48") $
sound "hh hc"
@
This will apply `striate 4` for every _even_ cycle and aply `# coarse "24 48"` for every _odd_.
Detail: As you can see the test function is arbitrary and does not rely on anything tidal specific. In fact it uses only plain haskell functionality, that is: it calculates the modulo of 2 of the current cycle which is either 0 (for even cycles) or 1. It then compares this value against 0 and returns the result, which is either `True` or `False`. This is what the `ifp` signature's first part signifies `(Int -> Bool)`, a function that takes a whole number and returns either `True` or `False`.
-}
ifp :: (Int -> Bool) -> (Pattern a -> Pattern a) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
ifp test f1 f2 p = splitQueries $ p {query = q}
where q a | test (floor $ start $ arc a) = query (f1 p) a
| otherwise = query (f2 p) a
-- | @wedge t p p'@ combines patterns @p@ and @p'@ by squashing the
-- @p@ into the portion of each cycle given by @t@, and @p'@ into the
-- remainer of each cycle.
wedge :: Time -> Pattern a -> Pattern a -> Pattern a
wedge 0 _ p' = p'
wedge 1 p _ = p
wedge t p p' = overlay (_fastGap (1/t) p) (t `rotR` _fastGap (1/(1-t)) p')
{- | @whenmod@ has a similar form and behavior to `every`, but requires an
additional number. Applies the function to the pattern, when the
remainder of the current loop number divided by the first parameter,
is greater or equal than the second parameter.
For example the following makes every other block of four loops twice
as dense:
@
d1 $ whenmod 8 4 (density 2) (sound "bd sn kurt")
@
-}
whenmod :: Int -> Int -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
whenmod a b = Sound.Tidal.Core.when (\t -> (t `mod` a) >= b )
{- |
@
superimpose f p = stack [p, f p]
@
`superimpose` plays a modified version of a pattern at the same time as the original pattern,
resulting in two patterns being played at the same time.
@
d1 $ superimpose (density 2) $ sound "bd sn [cp ht] hh"
d1 $ superimpose ((# speed "2") . (0.125 <~)) $ sound "bd sn cp hh"
@
-}
superimpose :: (Pattern a -> Pattern a) -> Pattern a -> Pattern a
superimpose f p = stack [p, f p]
{- | @trunc@ truncates a pattern so that only a fraction of the pattern is played.
The following example plays only the first quarter of the pattern:
@
d1 $ trunc 0.25 $ sound "bd sn*2 cp hh*4 arpy bd*2 cp bd*2"
@
-}
trunc :: Pattern Time -> Pattern a -> Pattern a
trunc = tParam _trunc
_trunc :: Time -> Pattern a -> Pattern a
_trunc t = compress (0, t) . zoomArc (Arc 0 t)
{- | @linger@ is similar to `trunc` but the truncated part of the pattern loops until the end of the cycle
@
d1 $ linger 0.25 $ sound "bd sn*2 cp hh*4 arpy bd*2 cp bd*2"
@
-}
linger :: Pattern Time -> Pattern a -> Pattern a
linger = tParam _linger
_linger :: Time -> Pattern a -> Pattern a
_linger n p = _fast (1/n) $ zoomArc (Arc 0 n) p
{- |
Use `within` to apply a function to only a part of a pattern. For example, to
apply `density 2` to only the first half of a pattern:
@
d1 $ within (0, 0.5) (density 2) $ sound "bd*2 sn lt mt hh hh hh hh"
@
Or, to apply `(# speed "0.5") to only the last quarter of a pattern:
@
d1 $ within (0.75, 1) (# speed "0.5") $ sound "bd*2 sn lt mt hh hh hh hh"
@
-}
within :: (Time, Time) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
within (s, e) f p = stack [filterWhen (\t -> cyclePos t >= s && cyclePos t < e) $ f p,
filterWhen (\t -> not $ cyclePos t >= s && cyclePos t < e) p
]
withinArc :: Arc -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
withinArc (Arc s e) = within (s, e)
{- |
For many cases, @within'@ will function exactly as within.
The difference between the two occurs when applying functions that change the timing of notes such as 'fast' or '<~'.
within first applies the function to all notes in the cycle, then keeps the results in the specified interval, and then combines it with the old cycle (an "apply split combine" paradigm).
within' first keeps notes in the specified interval, then applies the function to these notes, and then combines it with the old cycle (a "split apply combine" paradigm).
For example, whereas using the standard version of within
@
d1 $ within (0, 0.25) (fast 2) $ sound "bd hh cp sd"
@
sounds like:
@
d1 $ sound "[bd hh] hh cp sd"
@
using this alternative version, within'
@
d1 $ within' (0, 0.25) (fast 2) $ sound "bd hh cp sd"
@
sounds like:
@
d1 $ sound "[bd bd] hh cp sd"
@
-}
within' :: (Time, Time) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
within' a@(s, e) f p =
stack [ filterWhen (\t -> cyclePos t >= s && cyclePos t < e) $ compress a $ f $ zoom a p
, filterWhen (\t -> not $ cyclePos t >= s && cyclePos t < e) p
]
revArc :: (Time, Time) -> Pattern a -> Pattern a
revArc a = within a rev
{- | You can use the @e@ function to apply a Euclidean algorithm over a
complex pattern, although the structure of that pattern will be lost:
@
d1 $ e 3 8 $ sound "bd*2 [sn cp]"
@
In the above, three sounds are picked from the pattern on the right according
to the structure given by the `e 3 8`. It ends up picking two `bd` sounds, a
`cp` and missing the `sn` entirely.
These types of sequences use "Bjorklund's algorithm", which wasn't made for
music but for an application in nuclear physics, which is exciting. More
exciting still is that it is very similar in structure to the one of the first
known algorithms written in Euclid's book of elements in 300 BC. You can read
more about this in the paper
[The Euclidean Algorithm Generates Traditional Musical Rhythms](http://cgm.cs.mcgill.ca/~godfried/publications/banff.pdf)
by Toussaint. Some examples from this paper are included below,
including rotation in some cases.
@
- (2,5) : A thirteenth century Persian rhythm called Khafif-e-ramal.
- (3,4) : The archetypal pattern of the Cumbia from Colombia, as well as a Calypso rhythm from Trinidad.
- (3,5,2) : Another thirteenth century Persian rhythm by the name of Khafif-e-ramal, as well as a Rumanian folk-dance rhythm.
- (3,7) : A Ruchenitza rhythm used in a Bulgarian folk-dance.
- (3,8) : The Cuban tresillo pattern.
- (4,7) : Another Ruchenitza Bulgarian folk-dance rhythm.
- (4,9) : The Aksak rhythm of Turkey.
- (4,11) : The metric pattern used by Frank Zappa in his piece titled Outside Now.
- (5,6) : Yields the York-Samai pattern, a popular Arab rhythm.
- (5,7) : The Nawakhat pattern, another popular Arab rhythm.
- (5,8) : The Cuban cinquillo pattern.
- (5,9) : A popular Arab rhythm called Agsag-Samai.
- (5,11) : The metric pattern used by Moussorgsky in Pictures at an Exhibition.
- (5,12) : The Venda clapping pattern of a South African children’s song.
- (5,16) : The Bossa-Nova rhythm necklace of Brazil.
- (7,8) : A typical rhythm played on the Bendir (frame drum).
- (7,12) : A common West African bell pattern.
- (7,16,14) : A Samba rhythm necklace from Brazil.
- (9,16) : A rhythm necklace used in the Central African Republic.
- (11,24,14) : A rhythm necklace of the Aka Pygmies of Central Africa.
- (13,24,5) : Another rhythm necklace of the Aka Pygmies of the upper Sangha.
@
-}
euclid :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a
euclid = tParam2 _euclid
_euclid :: Int -> Int -> Pattern a -> Pattern a
_euclid n k a = fastcat $ fmap (bool silence a) $ bjorklund (n,k)
-- _euclid :: Int -> Int -> Pattern a -> Pattern a
-- _euclid n k p = flip const <$> filterValues (== True) (fastFromList $ bjorklund (n,k)) <*> p
{- | `euclidfull n k pa pb` stacks @e n k pa@ with @einv n k pb@ -}
euclidFull :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a -> Pattern a
--euclidFull pn pk pa pb = innerJoin $ (\n k -> _euclidFull n k pa pb) <$> pn <*> pk
euclidFull n k pa pb = stack [ euclid n k pa, euclidInv n k pb ]
_euclidBool :: Int -> Int -> Pattern Bool
_euclidBool n k = fastFromList $ bjorklund (n,k)
{-_euclidFull :: Int -> Int -> Pattern a -> Pattern a -> Pattern a
_euclidFull n k p p' = pickbool <$> _euclidBool n k <*> p <*> p'
where pickbool True a _ = a
pickbool False _ b = b
-}
-- euclid' :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a
-- euclid' = tParam2 _euclidq'
_euclid' :: Int -> Int -> Pattern a -> Pattern a
_euclid' n k p = fastcat $ map (\x -> if x then p else silence) (bjorklund (n,k))
euclidOff :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a
euclidOff = tParam3 _euclidOff
eoff :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a
eoff = euclidOff
_euclidOff :: Int -> Int -> Int -> Pattern a -> Pattern a
_euclidOff _ 0 _ _ = silence
_euclidOff n k s p = (rotL $ fromIntegral s%fromIntegral k) (_euclid n k p)
euclidOffBool :: Pattern Int -> Pattern Int -> Pattern Int -> Pattern Bool -> Pattern Bool
euclidOffBool = tParam3 _euclidOffBool
_euclidOffBool :: Int -> Int -> Int -> Pattern Bool -> Pattern Bool
_euclidOffBool _ 0 _ _ = silence
_euclidOffBool n k s p = ((fromIntegral s % fromIntegral k) `rotL`) ((\a b -> if b then a else not a) <$> _euclidBool n k <*> p)
distrib :: [Pattern Int] -> Pattern a -> Pattern a
distrib ps p = do p' <- sequence ps
_distrib p' p
_distrib :: [Int] -> Pattern a -> Pattern a
_distrib xs p = boolsToPat (foldr distrib' (replicate (last xs) True) (reverse $ layers xs)) p
where
distrib' :: [Bool] -> [Bool] -> [Bool]
distrib' [] _ = []
distrib' (_:a) [] = False : distrib' a []
distrib' (True:a) (x:b) = x : distrib' a b
distrib' (False:a) b = False : distrib' a b
layers = map bjorklund . (zip<*>tail)
boolsToPat a b' = flip const <$> filterValues (== True) (fastFromList a) <*> b'
{- | `euclidInv` fills in the blanks left by `e`
-
@e 3 8 "x"@ -> @"x ~ ~ x ~ ~ x ~"@
@euclidInv 3 8 "x"@ -> @"~ x x ~ x x ~ x"@
-}
euclidInv :: Pattern Int -> Pattern Int -> Pattern a -> Pattern a
euclidInv = tParam2 _euclidInv
_euclidInv :: Int -> Int -> Pattern a -> Pattern a
--_euclidInv n k p = flip const <$> filterValues (== False) (fastFromList $ bjorklund (n,k)) <*> p
_euclidInv n k a = fastcat $ fmap (bool a silence) $ bjorklund (n,k)
index :: Real b => b -> Pattern b -> Pattern c -> Pattern c
index sz indexpat pat =
spread' (zoom' $ toRational sz) (toRational . (*(1-sz)) <$> indexpat) pat
where
zoom' tSz s = zoomArc (Arc s (s+tSz))
{-
-- | @prrw f rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace.
prrw :: (a -> b -> c) -> Int -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c
prrw f rot (blen, vlen) beatPattern valuePattern =
let
ecompare (_,e1,_) (_,e2,_) = compare (fst e1) (fst e2)
beats = sortBy ecompare $ arc beatPattern (0, blen)
values = fmap thd' . sortBy ecompare $ arc valuePattern (0, vlen)
cycles = blen * (fromIntegral $ lcm (length beats) (length values) `div` (length beats))
in
_slow cycles $ stack $ zipWith
(\( _, (start, end), v') v -> (start `rotR`) $ densityGap (1 / (end - start)) $ pure (f v' v))
(sortBy ecompare $ arc (_fast cycles $ beatPattern) (0, blen))
(drop (rot `mod` length values) $ cycle values)
-- | @prr rot (blen, vlen) beatPattern valuePattern@: pattern rotate/replace.
prr :: Int -> (Time, Time) -> Pattern String -> Pattern b -> Pattern b
prr = prrw $ flip const
{-|
@preplace (blen, plen) beats values@ combines the timing of @beats@ with the values
of @values@. Other ways of saying this are:
* sequential convolution
* @values@ quantized to @beats@.
Examples:
@
d1 $ sound $ preplace (1,1) "x [~ x] x x" "bd sn"
d1 $ sound $ preplace (1,1) "x(3,8)" "bd sn"
d1 $ sound $ "x(3,8)" <~> "bd sn"
d1 $ sound "[jvbass jvbass:5]*3" |+| (shape $ "1 1 1 1 1" <~> "0.2 0.9")
@
It is assumed the pattern fits into a single cycle. This works well with
pattern literals, but not always with patterns defined elsewhere. In those cases
use @preplace@ and provide desired pattern lengths:
@
let p = slow 2 $ "x x x"
d1 $ sound $ preplace (2,1) p "bd sn"
@
-}
preplace :: (Time, Time) -> Pattern String -> Pattern b -> Pattern b
preplace = preplaceWith $ flip const
-- | @prep@ is an alias for preplace.
prep :: (Time, Time) -> Pattern String -> Pattern b -> Pattern b
prep = preplace
preplace1 :: Pattern String -> Pattern b -> Pattern b
preplace1 = preplace (1, 1)
preplaceWith :: (a -> b -> c) -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c
preplaceWith f (blen, plen) = prrw f 0 (blen, plen)
prw :: (a -> b -> c) -> (Time, Time) -> Pattern a -> Pattern b -> Pattern c
prw = preplaceWith
preplaceWith1 :: (a -> b -> c) -> Pattern a -> Pattern b -> Pattern c
preplaceWith1 f = prrw f 0 (1, 1)
prw1 :: (a -> b -> c) -> Pattern a -> Pattern b -> Pattern c
prw1 = preplaceWith1
(<~>) :: Pattern String -> Pattern b -> Pattern b
(<~>) = preplace (1, 1)
-- | @protate len rot p@ rotates pattern @p@ by @rot@ beats to the left.
-- @len@: length of the pattern, in cycles.
-- Example: @d1 $ every 4 (protate 2 (-1)) $ slow 2 $ sound "bd hh hh hh"@
protate :: Time -> Int -> Pattern a -> Pattern a
protate len rot p = prrw (flip const) rot (len, len) p p
prot :: Time -> Int -> Pattern a -> Pattern a
prot = protate
prot1 :: Int -> Pattern a -> Pattern a
prot1 = protate 1
{-| The @<<~@ operator rotates a unit pattern to the left, similar to @<~@,
but by events rather than linear time. The timing of the pattern remains constant:
@
d1 $ (1 <<~) $ sound "bd ~ sn hh"
-- will become
d1 $ sound "sn ~ hh bd"
@ -}
(<<~) :: Int -> Pattern a -> Pattern a
(<<~) = protate 1
-- | @~>>@ is like @<<~@ but for shifting to the right.
(~>>) :: Int -> Pattern a -> Pattern a
(~>>) = (<<~) . (0-)
-- | @pequal cycles p1 p2@: quickly test if @p1@ and @p2@ are the same.
pequal :: Ord a => Time -> Pattern a -> Pattern a -> Bool
pequal cycles p1 p2 = (sort $ arc p1 (0, cycles)) == (sort $ arc p2 (0, cycles))
-}
-- | @rot n p@ rotates the values in a pattern @p@ by @n@ beats to the left.
-- Example: @d1 $ every 4 (rot 2) $ slow 2 $ sound "bd hh hh hh"@
rot :: Ord a => Pattern Int -> Pattern a -> Pattern a
rot = tParam _rot
-- Calculates a whole cycle, rotates it, then constrains events to the original query arc
_rot :: Ord a => Int -> Pattern a -> Pattern a
_rot i pat = splitQueries $ pat {query = \st -> f st (query pat (st {arc = wholeCycle (arc st)}))}
where -- TODO maybe events with the same arc (part+whole) should be
-- grouped together in the rotation?
f st es = constrainEvents (arc st) $ shiftValues $ sort $ defragParts es
shiftValues es | i >= 0 =
zipWith (\e s -> e {value = s}) es
(drop i $ cycle $ map value es)
| otherwise =
zipWith (\e s -> e{value = s}) es
(drop (length es - abs i) $ cycle $ map value es)
wholeCycle (Arc s _) = Arc (sam s) (nextSam s)
constrainEvents :: Arc -> [Event a] -> [Event a]
constrainEvents a es = mapMaybe (constrainEvent a) es
constrainEvent :: Arc -> Event a -> Maybe (Event a)
constrainEvent a e =
do
p' <- subArc (part e) a
return e {part = p'}
-- | @segment n p@: 'samples' the pattern @p@ at a rate of @n@
-- events per cycle. Useful for turning a continuous pattern into a
-- discrete one.
segment :: Pattern Time -> Pattern a -> Pattern a
segment = tParam _segment
_segment :: Time -> Pattern a -> Pattern a
_segment n p = _fast n (pure id) <* p
-- | @discretise@: the old (deprecated) name for 'segment'
discretise :: Pattern Time -> Pattern a -> Pattern a
discretise = segment
-- | @randcat ps@: does a @slowcat@ on the list of patterns @ps@ but
-- randomises the order in which they are played.
randcat :: [Pattern a] -> Pattern a
randcat ps = spread' rotL (_segment 1 $ (%1) . fromIntegral <$> (irand (length ps) :: Pattern Int)) (slowcat ps)
wrandcat :: [(Pattern a, Double)] -> Pattern a
wrandcat ps = unwrap $ wchooseBy (segment 1 rand) ps
-- @fromNote p@: converts a pattern of human-readable pitch names
-- into pitch numbers. For example, @"cs2"@ will be parsed as C Sharp
-- in the 2nd octave with the result of @11@, and @"b-3"@ as
-- @-25@. Pitches can be decorated using:
--
-- * s = Sharp, a half-step above (@"gs-1"@)
-- * f = Flat, a half-step below (@"gf-1"@)
-- * n = Natural, no decoration (@"g-1" and "gn-1"@ are equivalent)
-- * ss = Double sharp, a whole step above (@"gss-1"@)
-- * ff = Double flat, a whole step below (@"gff-1"@)
--
-- Note that TidalCycles now assumes that middle C is represented by
-- the value 0, rather than the previous value of 60. This function
-- is similar to previously available functions @tom@ and @toMIDI@,
-- but the default octave is now 0 rather than 5.
{-
definition moved to Parse.hs ..
toMIDI :: Pattern String -> Pattern Int
toMIDI p = fromJust <$> (filterValues (isJust) (noteLookup <$> p))
where
noteLookup :: String -> Maybe Int
noteLookup [] = Nothing
noteLookup s | not (last s `elem` ['0' .. '9']) = noteLookup (s ++ "0")
| not (isLetter (s !! 1)) = noteLookup((head s):'n':(tail s))
| otherwise = parse s
parse x = (\a b c -> a+b+c) <$> pc x <*> sym x <*> Just(12*digitToInt (last x))
pc x = lookup (head x) [('c',0),('d',2),('e',4),('f',5),('g',7),('a',9),('b',11)]
sym x = lookup (init (tail x)) [("s",1),("f",-1),("n",0),("ss",2),("ff",-2)]
-}
-- @tom p@: Alias for @toMIDI@.
-- tom = toMIDI
{- | The `fit` function takes a pattern of integer numbers, which are used to select values from the given list. What makes this a bit strange is that only a given number of values are selected each cycle. For example:
@
d1 $ sound (fit 3 ["bd", "sn", "arpy", "arpy:1", "casio"] "0 [~ 1] 2 1")
@
The above fits three samples into the pattern, i.e. for the first cycle this will be `"bd"`, `"sn"` and `"arpy"`, giving the result `"bd [~ sn] arpy sn"` (note that we start counting at zero, so that `0` picks the first value). The following cycle the *next* three values in the list will be picked, i.e. `"arpy:1"`, `"casio"` and `"bd"`, giving the pattern `"arpy:1 [~ casio] bd casio"` (note that the list wraps round here).
-}
fit :: Int -> [a] -> Pattern Int -> Pattern a
fit perCycle xs p = (xs !!!) <$> (p {query = map (\e -> fmap (+ pos e) e) . query p})
where pos e = perCycle * floor (start $ part e)
permstep :: RealFrac b => Int -> [a] -> Pattern b -> Pattern a
permstep nSteps things p = unwrap $ (\n -> fastFromList $ concatMap (\x -> replicate (fst x) (snd x)) $ zip (ps !! floor (n * fromIntegral (length ps - 1))) things) <$> _segment 1 p
where ps = permsort (length things) nSteps
deviance avg xs = sum $ map (abs . (avg-) . fromIntegral) xs
permsort n total = map fst $ sortOn snd $ map (\x -> (x,deviance (fromIntegral total / (fromIntegral n :: Double)) x)) $ perms n total
perms 0 _ = []
perms 1 n = [[n]]
perms n total = concatMap (\x -> map (x:) $ perms (n-1) (total-x)) [1 .. (total-(n-1))]
-- | @struct a b@: structures pattern @b@ in terms of the pattern of
-- boolean values @a@. Only @True@ values in the boolean pattern are
-- used.
struct :: Pattern Bool -> Pattern a -> Pattern a
struct ps pv = filterJust $ (\a b -> if a then Just b else Nothing ) <$> ps <* pv
-- | @substruct a b@: similar to @struct@, but each event in pattern @a@ gets replaced with pattern @b@, compressed to fit the timespan of the event.
substruct :: Pattern String -> Pattern b -> Pattern b
substruct s p = p {query = f}
where f st =
concatMap ((\a' -> queryArc (compressArcTo a' p) a') . fromJust . whole) $ filter isDigital $ (query s st)
randArcs :: Int -> Pattern [Arc]
randArcs n =
do rs <- mapM (\x -> pure (toRational x / toRational n) <~ choose [1 :: Int,2,3]) [0 .. (n-1)]
let rats = map toRational rs
total = sum rats
pairs = pairUp $ accumulate $ map (/total) rats
return pairs
where pairUp [] = []
pairUp xs = Arc 0 (head xs) : pairUp' xs
pairUp' [] = []
pairUp' [_] = []
pairUp' [a, _] = [Arc a 1]
pairUp' (a:b:xs) = Arc a b: pairUp' (b:xs)
-- TODO - what does this do? Something for @stripe@ ..
randStruct :: Int -> Pattern Int
randStruct n = splitQueries $ Pattern {query = f}
where f st = map (\(a,b,c) -> Event (Context []) (Just a) (fromJust b) c) $ filter (\(_,x,_) -> isJust x) as
where as = map (\(i, Arc s' e') ->
(Arc (s' + sam s) (e' + sam s),
subArc (Arc s e) (Arc (s' + sam s) (e' + sam s)), i)) $
enumerate $ value $ head $
queryArc (randArcs n) (Arc (sam s) (nextSam s))
(Arc s e) = arc st
-- TODO - what does this do?
substruct' :: Pattern Int -> Pattern a -> Pattern a
substruct' s p = p {query = \st -> concatMap (f st) (query s st)}
where f st (Event c (Just a') _ i) = map (\e -> e {context = combineContexts [c, context e]}) $ queryArc (compressArcTo a' (inside (pure $ 1/toRational(length (queryArc s (Arc (sam (start $ arc st)) (nextSam (start $ arc st)))))) (rotR (toRational i)) p)) a'
-- Ignore analog events (ones without wholes)
f _ _ = []
-- | @stripe n p@: repeats pattern @p@, @n@ times per cycle. So
-- similar to @fast@, but with random durations. The repetitions will
-- be continguous (touching, but not overlapping) and the durations
-- will add up to a single cycle. @n@ can be supplied as a pattern of
-- integers.
stripe :: Pattern Int -> Pattern a -> Pattern a
stripe = tParam _stripe
_stripe :: Int -> Pattern a -> Pattern a
_stripe = substruct' . randStruct
-- | @slowstripe n p@: The same as @stripe@, but the result is also
-- @n@ times slower, so that the mean average duration of the stripes
-- is exactly one cycle, and every @n@th stripe starts on a cycle
-- boundary (in indian classical terms, the @sam@).
slowstripe :: Pattern Int -> Pattern a -> Pattern a
slowstripe n = slow (toRational <$> n) . stripe n
-- Lindenmayer patterns, these go well with the step sequencer
-- general rule parser (strings map to strings)
parseLMRule :: String -> [(String,String)]
parseLMRule s = map (splitOn ':') commaSplit
where splitOn sep str = splitAt (fromJust $ elemIndex sep str)
$ filter (/= sep) str
commaSplit = map T.unpack $ T.splitOn (T.pack ",") $ T.pack s
-- specific parser for step sequencer (chars map to string)
-- ruleset in form "a:b,b:ab"
parseLMRule' :: String -> [(Char, String)]
parseLMRule' str = map fixer $ parseLMRule str
where fixer (c,r) = (head c, r)
{- | returns the `n`th iteration of a [Lindenmayer System](https://en.wikipedia.org/wiki/L-system) with given start sequence.
for example:
@
lindenmayer 1 "a:b,b:ab" "ab" -> "bab"
@
-}
lindenmayer :: Int -> String -> String -> String
lindenmayer _ _ [] = []
lindenmayer 1 r (c:cs) = fromMaybe [c] (lookup c $ parseLMRule' r)
++ lindenmayer 1 r cs
lindenmayer n r s = iterate (lindenmayer 1 r) s !! n
{- | @lindenmayerI@ converts the resulting string into a a list of integers
with @fromIntegral@ applied (so they can be used seamlessly where floats or
rationals are required) -}
lindenmayerI :: Num b => Int -> String -> String -> [b]
lindenmayerI n r s = fmap (fromIntegral . digitToInt) $ lindenmayer n r s
{- | @runMarkov n tmat xi seed@ generates a Markov chain (as a list) of length @n@
using the transition matrix @tmat@ starting from initial state @xi@, starting
with random numbers generated from @seed@
Each entry in the chain is the index of state (starting from zero).
Each row of the matrix will be automatically normalized. For example:
@
runMarkov 8 [[2,3], [1,3]] 0 0
@
will produce a two-state chain 8 steps long, from initial state @0@, where the
transition probability from state 0->0 is 2/5, 0->1 is 3/5, 1->0 is 1/4, and
1->1 is 3/4. -}
runMarkov :: Int -> [[Double]] -> Int -> Time -> [Int]
runMarkov n tp xi seed = reverse $ (iterate (markovStep $ renorm) [xi])!! (n-1) where
markovStep tp' xs = (fromJust $ findIndex (r <=) $ scanl1 (+) (tp'!!(head xs))) : xs where
r = timeToRand $ seed + (fromIntegral . length) xs / fromIntegral n
renorm = [ map (/ sum x) x | x <- tp ]
{- @markovPat n xi tp@ generates a one-cycle pattern of @n@ steps in a Markov
chain starting from state @xi@ with transition matrix @tp@. Each row of the
transition matrix is automatically normalized. For example:
@
tidal> markovPat 8 1 [[3,5,2], [4,4,2], [0,1,0]]
(0>⅛)|1
(⅛>¼)|2
(¼>⅜)|1
(⅜>½)|1
(½>⅝)|2
(⅝>¾)|1
(¾>⅞)|1
(⅞>1)|0
@ -}
markovPat :: Pattern Int -> Pattern Int -> [[Double]] -> Pattern Int
markovPat = tParam2 _markovPat
_markovPat :: Int -> Int -> [[Double]] -> Pattern Int
_markovPat n xi tp = splitQueries $ Pattern (\(State a@(Arc s _) _) ->
queryArc (listToPat $ runMarkov n tp xi (sam s)) a)
{-|
Removes events from second pattern that don't start during an event from first.
Consider this, kind of messy rhythm without any rests.
@
d1 $ sound (slowcat ["sn*8", "[cp*4 bd*4, hc*5]"]) # n (run 8)
@
If we apply a mask to it
@
d1 $ s (mask ("1 1 1 ~ 1 1 ~ 1" :: Pattern Bool)
(slowcat ["sn*8", "[cp*4 bd*4, bass*5]"] ))
# n (run 8)
@
Due to the use of `slowcat` here, the same mask is first applied to `"sn*8"` and in the next cycle to `"[cp*4 bd*4, hc*5]".
You could achieve the same effect by adding rests within the `slowcat` patterns, but mask allows you to do this more easily. It kind of keeps the rhythmic structure and you can change the used samples independently, e.g.
@
d1 $ s (mask ("1 ~ 1 ~ 1 1 ~ 1")
(slowcat ["can*8", "[cp*4 sn*4, jvbass*16]"] ))
# n (run 8)
@
-}
mask :: Pattern Bool -> Pattern a -> Pattern a
mask b p = const <$> p <* (filterValues id b)
{-
mask :: Pattern Bool -> Pattern b -> Pattern b
-- TODO - should that be part or whole?
mask pa pb = pb {query = \st -> concat [filterOns (subArc (arc st) $ part i) (query pb st) | i <- query pa st]}
where filterOns Nothing _ = []
filterOns (Just a) es = filter (onsetIn a) es
-}
-- | TODO: refactor towards union
enclosingArc :: [Arc] -> Arc
enclosingArc [] = Arc 0 1
enclosingArc as = Arc (minimum (map start as)) (maximum (map stop as))
stretch :: Pattern a -> Pattern a
-- TODO - should that be whole or part?
stretch p = splitQueries $ p {query = q}
where q st = query (zoomArc (cycleArc $ enclosingArc $ map wholeOrPart $ query p (st {arc = Arc (sam s) (nextSam s)})) p) st
where s = start $ arc st
{- | `fit'` is a generalization of `fit`, where the list is instead constructed by using another integer pattern to slice up a given pattern. The first argument is the number of cycles of that latter pattern to use when slicing. It's easier to understand this with a few examples:
@
d1 $ sound (fit' 1 2 "0 1" "1 0" "bd sn")
@
So what does this do? The first `1` just tells it to slice up a single cycle of `"bd sn"`. The `2` tells it to select two values each cycle, just like the first argument to `fit`. The next pattern `"0 1"` is the "from" pattern which tells it how to slice, which in this case means `"0"` maps to `"bd"`, and `"1"` maps to `"sn"`. The next pattern `"1 0"` is the "to" pattern, which tells it how to rearrange those slices. So the final result is the pattern `"sn bd"`.
A more useful example might be something like
@
d1 $ fit' 1 4 (run 4) "[0 3*2 2 1 0 3*2 2 [1*8 ~]]/2" $ chop 4 $ (sound "breaks152" # unit "c")
@
which uses `chop` to break a single sample into individual pieces, which `fit'` then puts into a list (using the `run 4` pattern) and reassembles according to the complicated integer pattern.
-}
fit' :: Pattern Time -> Int -> Pattern Int -> Pattern Int -> Pattern a -> Pattern a
fit' cyc n from to p = squeezeJoin $ fit n mapMasks to
where mapMasks = [stretch $ mask (const True <$> filterValues (== i) from') p'
| i <- [0..n-1]]
p' = density cyc p
from' = density cyc from
{-| @chunk n f p@ treats the given pattern @p@ as having @n@ chunks, and applies the function @f@ to one of those sections per cycle, running from left to right.
@
d1 $ chunk 4 (density 4) $ sound "cp sn arpy [mt lt]"
@
-}
chunk :: Int -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b
chunk n f p = cat [withinArc (Arc (i % fromIntegral n) ((i+1) % fromIntegral n)) f p | i <- [0 .. fromIntegral n - 1]]
{-
chunk n f p = do i <- _slow (toRational n) $ run (fromIntegral n)
within (i%(fromIntegral n),(i+)1%(fromIntegral n)) f p
-}
-- deprecated (renamed to chunk)
runWith :: Int -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b
runWith = chunk
{-| @chunk'@ works much the same as `chunk`, but runs from right to left.
-}
chunk' :: Integral a => a -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b
chunk' n f p = do i <- _slow (toRational n) $ rev $ run (fromIntegral n)
withinArc (Arc (i % fromIntegral n) ((i+)1 % fromIntegral n)) f p
-- deprecated (renamed to chunk')
runWith' :: Integral a => a -> (Pattern b -> Pattern b) -> Pattern b -> Pattern b
runWith' = chunk'
inside :: Pattern Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a
inside n f p = density n $ f (slow n p)
outside :: Pattern Time -> (Pattern a1 -> Pattern a) -> Pattern a1 -> Pattern a
outside n = inside (1/n)
loopFirst :: Pattern a -> Pattern a
loopFirst p = splitQueries $ p {query = f}
where f st = map
(\(Event c w p' v) ->
Event c (plus <$> w) (plus p') v) $
query p (st {arc = minus $ arc st})
where minus = fmap (subtract (sam s))
plus = fmap (+ sam s)
s = start $ arc st
timeLoop :: Pattern Time -> Pattern a -> Pattern a
timeLoop n = outside n loopFirst
seqPLoop :: [(Time, Time, Pattern a)] -> Pattern a
seqPLoop ps = timeLoop (pure $ maxT - minT) $ minT `rotL` seqP ps
where minT = minimum $ map (\(x,_,_) -> x) ps
maxT = maximum $ map (\(_,x,_) -> x) ps
{- | @toScale@ lets you turn a pattern of notes within a scale (expressed as a
list) to note numbers. For example `toScale [0, 4, 7] "0 1 2 3"` will turn
into the pattern `"0 4 7 12"`. It assumes your scale fits within an octave;
to change this use `toScale' size`. Example:
`toScale' 24 [0,4,7,10,14,17] (run 8)` turns into `"0 4 7 10 14 17 24 28"`
-}
toScale' :: Num a => Int -> [a] -> Pattern Int -> Pattern a
toScale' _ [] = const silence
toScale' o s = fmap noteInScale
where octave x = x `div` length s
noteInScale x = (s !!! x) + fromIntegral (o * octave x)
toScale :: Num a => [a] -> Pattern Int -> Pattern a
toScale = toScale' 12
{- | `swingBy x n` divides a cycle into `n` slices and delays the notes in
the second half of each slice by `x` fraction of a slice . @swing@ is an alias
for `swingBy (1%3)`
-}
swingBy :: Pattern Time -> Pattern Time -> Pattern a -> Pattern a
swingBy x n = inside n (withinArc (Arc 0.5 1) (x ~>))
swing :: Pattern Time -> Pattern a -> Pattern a
swing = swingBy (pure $ 1%3)
{- | `cycleChoose` is like `choose` but only picks a new item from the list
once each cycle -}
cycleChoose :: [a] -> Pattern a
cycleChoose = segment 1 . choose
{- | Internal function used by shuffle and scramble -}
_rearrangeWith :: Pattern Int -> Int -> Pattern a -> Pattern a
_rearrangeWith ipat n pat = innerJoin $ (\i -> _fast nT $ repeatCycles n $ pats !! i) <$> ipat
where
pats = map (\i -> zoom (fromIntegral i / nT, fromIntegral (i+1) / nT) pat) [0 .. (n-1)]
nT :: Time
nT = fromIntegral n
{- | `shuffle n p` evenly divides one cycle of the pattern `p` into `n` parts,
and returns a random permutation of the parts each cycle. For example,
`shuffle 3 "a b c"` could return `"a b c"`, `"a c b"`, `"b a c"`, `"b c a"`,
`"c a b"`, or `"c b a"`. But it will **never** return `"a a a"`, because that
is not a permutation of the parts.
-}
shuffle :: Pattern Int -> Pattern a -> Pattern a
shuffle = tParam _shuffle
_shuffle :: Int -> Pattern a -> Pattern a
_shuffle n = _rearrangeWith (randrun n) n
{- | `scramble n p` is like `shuffle` but randomly selects from the parts
of `p` instead of making permutations.
For example, `scramble 3 "a b c"` will randomly select 3 parts from
`"a"` `"b"` and `"c"`, possibly repeating a single part.
-}
scramble :: Pattern Int -> Pattern a -> Pattern a
scramble = tParam _scramble
_scramble :: Int -> Pattern a -> Pattern a
_scramble n = _rearrangeWith (_segment (fromIntegral n) $ irand n) n
randrun :: Int -> Pattern Int
randrun 0 = silence
randrun n' =
splitQueries $ Pattern (\(State a@(Arc s _) _) -> events a $ sam s)
where events a seed = mapMaybe toEv $ zip arcs shuffled
where shuffled = map snd $ sortOn fst $ zip rs [0 .. (n'-1)]
rs = timeToRands seed n' :: [Double]
arcs = zipWith Arc fractions (tail fractions)
fractions = map (+ (sam $ start a)) [0, 1 / fromIntegral n' .. 1]
toEv (a',v) = do a'' <- subArc a a'
return $ Event (Context []) (Just a') a'' v
ur :: Time -> Pattern String -> [(String, Pattern a)] -> [(String, Pattern a -> Pattern a)] -> Pattern a
ur t outer_p ps fs = _slow t $ unwrap $ adjust <$> timedValues (getPat . split <$> outer_p)
where split = wordsBy (==':')
getPat (s:xs) = (match s, transform xs)
-- TODO - check this really can't happen..
getPat _ = error "can't happen?"
match s = fromMaybe silence $ lookup s ps'
ps' = map (fmap (_fast t)) ps
adjust (a, (p, f)) = f a p
transform (x:_) a = transform' x a
transform _ _ = id
transform' str (Arc s e) p = s `rotR` inside (pure $ 1/(e-s)) (matchF str) p
matchF str = fromMaybe id $ lookup str fs
timedValues = withEvent (\(Event c (Just a) a' v) -> Event c (Just a) a' (a,v)) . filterDigital
inhabit :: [(String, Pattern a)] -> Pattern String -> Pattern a
inhabit ps p = squeezeJoin $ (\s -> fromMaybe silence $ lookup s ps) <$> p
{- | @spaceOut xs p@ repeats a pattern @p@ at different durations given by the list of time values in @xs@ -}
spaceOut :: [Time] -> Pattern a -> Pattern a
spaceOut xs p = _slow (toRational $ sum xs) $ stack $ map (`compressArc` p) spaceArcs
where markOut :: Time -> [Time] -> [Arc]
markOut _ [] = []
markOut offset (x:xs') = Arc offset (offset+x):markOut (offset+x) xs'
spaceArcs = map (\(Arc a b) -> Arc (a/s) (b/s)) $ markOut 0 xs
s = sum xs
-- | @flatpat@ takes a Pattern of lists and pulls the list elements as
-- separate Events
flatpat :: Pattern [a] -> Pattern a
flatpat p = p {query = concatMap (\(Event c b b' xs) -> map (Event c b b') xs) . query p}
-- | @layer@ takes a Pattern of lists and pulls the list elements as
-- separate Events
layer :: [a -> Pattern b] -> a -> Pattern b
layer fs p = stack $ map ($ p) fs
-- | @arpeggiate@ finds events that share the same timespan, and spreads
-- them out during that timespan, so for example @arpeggiate "[bd,sn]"@
-- gets turned into @"bd sn"@. Useful for creating arpeggios/broken chords.
arpeggiate :: Pattern a -> Pattern a
arpeggiate = arpWith id
-- | Shorthand alias for arpeggiate
arpg :: Pattern a -> Pattern a
arpg = arpeggiate
arpWith :: ([EventF (ArcF Time) a] -> [EventF (ArcF Time) b]) -> Pattern a -> Pattern b
arpWith f p = withEvents munge p
where munge es = concatMap (spreadOut . f) (groupBy (\a b -> whole a == whole b) $ sortOn whole es)
spreadOut xs = mapMaybe (\(n, x) -> shiftIt n (length xs) x) $ enumerate xs
shiftIt n d (Event c (Just (Arc s e)) a' v) =
do
a'' <- subArc (Arc newS newE) a'
return (Event c (Just $ Arc newS newE) a'' v)
where newS = s + (dur * fromIntegral n)
newE = newS + dur
dur = (e - s) / fromIntegral d
-- TODO ignoring analog events.. Should we just leave them as-is?
shiftIt _ _ _ = Nothing
arp :: Pattern String -> Pattern a -> Pattern a
arp = tParam _arp
_arp :: String -> Pattern a -> Pattern a
_arp name p = arpWith f p
where f = fromMaybe id $ lookup name arps
arps :: [(String, [a] -> [a])]
arps = [("up", id),
("down", reverse),
("updown", \x -> init x ++ init (reverse x)),
("downup", \x -> init (reverse x) ++ init x),
("up&down", \x -> x ++ reverse x),
("down&up", \x -> reverse x ++ x),
("converge", converge),
("diverge", reverse . converge),
("disconverge", \x -> converge x ++ tail (reverse $ converge x)),
("pinkyup", pinkyup),
("pinkyupdown", \x -> init (pinkyup x) ++ init (reverse $ pinkyup x)),
("thumbup", thumbup),
("thumbupdown", \x -> init (thumbup x) ++ init (reverse $ thumbup x))
]
converge [] = []
converge (x:xs) = x : converge' xs
converge' [] = []
converge' xs = last xs : converge (init xs)
pinkyup xs = concatMap (:[pinky]) $ init xs
where pinky = last xs
thumbup xs = concatMap (\x -> [thumb,x]) $ tail xs
where thumb = head xs
{- TODO !
-- | @fill@ 'fills in' gaps in one pattern with events from another. For example @fill "bd" "cp ~ cp"@ would result in the equivalent of `"~ bd ~"`. This only finds gaps in a resulting pattern, in other words @"[bd ~, sn]"@ doesn't contain any gaps (because @sn@ covers it all), and @"bd ~ ~ sn"@ only contains a single gap that bridges two steps.
fill :: Pattern a -> Pattern a -> Pattern a
fill p' p = struct (splitQueries $ p {query = q}) p'
where
q st = removeTolerance (s,e) $ invert (s-tolerance, e+tolerance) $ query p (st {arc = (s-tolerance, e+tolerance)})
where (s,e) = arc st
invert (s,e) es = map arcToEvent $ foldr remove [(s,e)] (map part es)
remove (s,e) xs = concatMap (remove' (s, e)) xs
remove' (s,e) (s',e') | s > s' && e < e' = [(s',s),(e,e')] -- inside
| s > s' && s < e' = [(s',s)] -- cut off right
| e > s' && e < e' = [(e,e')] -- cut off left
| s <= s' && e >= e' = [] -- swallow
| otherwise = [(s',e')] -- miss
arcToEvent a = ((a,a),"x")
removeTolerance (s,e) es = concatMap (expand) $ map (withPart f) es
where f a = concatMap (remove' (e,e+tolerance)) $ remove' (s-tolerance,s) a
expand ((a,xs),c) = map (\x -> ((a,x),c)) xs
tolerance = 0.01
-}
-- Repeats each event @n@ times within its arc
ply :: Pattern Int -> Pattern a -> Pattern a
ply = tParam _ply
_ply :: Int -> Pattern a -> Pattern a
_ply n p = arpeggiate $ stack (replicate n p)
-- Like ply, but applies a function each time. The applications are compounded.
plyWith :: (Ord t, Num t) => Pattern t -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
plyWith np f p = innerJoin $ (\n -> _plyWith n f p) <$> np
_plyWith :: (Ord t, Num t) => t -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
_plyWith numPat f p = arpeggiate $ compound numPat
where compound n | n <= 1 = p
| otherwise = overlay p (f $ compound $ n-1)
-- | Uses the first (binary) pattern to switch between the following
-- two patterns. The resulting structure comes from the source patterns, not the
-- binary pattern. See also @stitch@.
sew :: Pattern Bool -> Pattern a -> Pattern a -> Pattern a
sew pb a b = overlay (mask pb a) (mask (inv pb) b)
-- | Uses the first (binary) pattern to switch between the following
-- two patterns. The resulting structure comes from the binary
-- pattern, not the source patterns. See also @sew@.
stitch :: Pattern Bool -> Pattern a -> Pattern a -> Pattern a
stitch pb a b = overlay (struct pb a) (struct (inv pb) b)
-- | A binary pattern is used to conditionally apply a function to a
-- source pattern. The function is applied when a @True@ value is
-- active, and the pattern is let through unchanged when a @False@
-- value is active. No events are let through where no binary values
-- are active.
while :: Pattern Bool -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
while b f pat = sew b (f pat) pat
stutter :: Integral i => i -> Time -> Pattern a -> Pattern a
stutter n t p = stack $ map (\i -> (t * fromIntegral i) `rotR` p) [0 .. (n-1)]
echo, triple, quad, double :: Time -> Pattern a -> Pattern a
echo = stutter (2 :: Int)
triple = stutter (3 :: Int)
quad = stutter (4 :: Int)
double = echo
{- | The `jux` function creates strange stereo effects, by applying a
function to a pattern, but only in the right-hand channel. For
example, the following reverses the pattern on the righthand side:
@
d1 $ slow 32 $ jux (rev) $ striateBy 32 (1/16) $ sound "bev"
@
When passing pattern transforms to functions like [jux](#jux) and [every](#every),
it's possible to chain multiple transforms together with `.`, for
example this both reverses and halves the playback speed of the
pattern in the righthand channel:
@
d1 $ slow 32 $ jux ((# speed "0.5") . rev) $ striateBy 32 (1/16) $ sound "bev"
@
-}
jux
:: (Pattern ControlMap -> Pattern ControlMap)
-> Pattern ControlMap -> Pattern ControlMap
jux = juxBy 1
juxcut
:: (Pattern ControlMap -> Pattern ControlMap)
-> Pattern ControlMap -> Pattern ControlMap
juxcut f p = stack [p # P.pan (pure 0) # P.cut (pure (-1)),
f $ p # P.pan (pure 1) # P.cut (pure (-2))
]
juxcut' :: [t -> Pattern ControlMap] -> t -> Pattern ControlMap
juxcut' fs p = stack $ map (\n -> ((fs !! n) p |+ P.cut (pure $ 1-n)) # P.pan (pure $ fromIntegral n / fromIntegral l)) [0 .. l-1]
where l = length fs
{- | In addition to `jux`, `jux'` allows using a list of pattern transform. resulting patterns from each transformation will be spread via pan from left to right.
For example:
@
d1 $ jux' [iter 4, chop 16, id, rev, palindrome] $ sound "bd sn"
@
will put `iter 4` of the pattern to the far left and `palindrome` to the far right. In the center the original pattern will play and mid left mid right the chopped and the reversed version will appear.
One could also write:
@
d1 $ stack [
iter 4 $ sound "bd sn" # pan "0",
chop 16 $ sound "bd sn" # pan "0.25",
sound "bd sn" # pan "0.5",
rev $ sound "bd sn" # pan "0.75",
palindrome $ sound "bd sn" # pan "1",
]
@
-}
jux' :: [t -> Pattern ControlMap] -> t -> Pattern ControlMap
jux' fs p = stack $ map (\n -> (fs !! n) p |+ P.pan (pure $ fromIntegral n / fromIntegral l)) [0 .. l-1]
where l = length fs
-- | Multichannel variant of `jux`, _not sure what it does_
jux4
:: (Pattern ControlMap -> Pattern ControlMap)
-> Pattern ControlMap -> Pattern ControlMap
jux4 f p = stack [p # P.pan (pure (5/8)), f $ p # P.pan (pure (1/8))]
{- |
With `jux`, the original and effected versions of the pattern are
panned hard left and right (i.e., panned at 0 and 1). This can be a
bit much, especially when listening on headphones. The variant `juxBy`
has an additional parameter, which brings the channel closer to the
centre. For example:
@
d1 $ juxBy 0.5 (density 2) $ sound "bd sn:1"
@
In the above, the two versions of the pattern would be panned at 0.25
and 0.75, rather than 0 and 1.
-}
juxBy
:: Pattern Double
-> (Pattern ControlMap -> Pattern ControlMap)
-> Pattern ControlMap
-> Pattern ControlMap
juxBy n f p = stack [p |+ P.pan 0.5 |- P.pan (n/2), f $ p |+ P.pan 0.5 |+ P.pan (n/2)]
pick :: String -> Int -> String
pick name n = name ++ ":" ++ show n
-- samples "jvbass [~ latibro] [jvbass [latibro jvbass]]" ((1%2) `rotL` slow 6 "[1 6 8 7 3]")
samples :: Applicative f => f String -> f Int -> f String
samples p p' = pick <$> p <*> p'
samples' :: Applicative f => f String -> f Int -> f String
samples' p p' = flip pick <$> p' <*> p
{-
scrumple :: Time -> Pattern a -> Pattern a -> Pattern a
scrumple o p p' = p'' -- overlay p (o `rotR` p'')
where p'' = Pattern $ \a -> concatMap
(\((s,d), vs) -> map (\x -> ((s,d),
snd x
)
)
(arc p' (s,s))
) (arc p a)
-}
spreadf :: [a -> Pattern b] -> a -> Pattern b
spreadf = spread ($)
stackwith :: Unionable a => Pattern a -> [Pattern a] -> Pattern a
stackwith p ps | null ps = silence
| otherwise = stack $ map (\(i, p') -> p' # ((fromIntegral i % l) `rotL` p)) (zip [0::Int ..] ps)
where l = fromIntegral $ length ps
{-
cross f p p' = Pattern $ \t -> concat [filter flt $ arc p t,
filter (not . flt) $ arc p' t
]
] where flt = f . cyclePos . fst . fst
-}
{- | `range` will take a pattern which goes from 0 to 1 (like `sine`), and range it to a different range - between the first and second arguments. In the below example, `range 1 1.5` shifts the range of `sine1` from 0 - 1 to 1 - 1.5.
@
d1 $ jux (iter 4) $ sound "arpy arpy:2*2"
|+ speed (slow 4 $ range 1 1.5 sine1)
@
-}
range :: Num a => Pattern a -> Pattern a -> Pattern a -> Pattern a
range fromP toP p = (\from to v -> ((v * (to-from)) + from)) <$> fromP *> toP *> p
_range :: (Functor f, Num b) => b -> b -> f b -> f b
_range from to p = (+ from) . (* (to-from)) <$> p
{- | `rangex` is an exponential version of `range`, good for using with
frequencies. Do *not* use negative numbers or zero as arguments! -}
rangex :: (Functor f, Floating b) => b -> b -> f b -> f b
rangex from to p = exp <$> _range (log from) (log to) p
off :: Pattern Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
off tp f p = innerJoin $ (\tv -> _off tv f p) <$> tp
_off :: Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
_off t f p = superimpose (f . (t `rotR`)) p
offadd :: Num a => Pattern Time -> Pattern a -> Pattern a -> Pattern a
offadd tp pn p = off tp (+pn) p
-- | Step sequencing
step :: String -> String -> Pattern String
step s cs = fastcat $ map f cs
where f c | c == 'x' = pure s
| isDigit c = pure $ s ++ ":" ++ [c]
| otherwise = silence
steps :: [(String, String)] -> Pattern String
steps = stack . map (uncurry step)
-- | like `step`, but allows you to specify an array of strings to use for 0,1,2...
step' :: [String] -> String -> Pattern String
step' ss cs = fastcat $ map f cs
where f c | c == 'x' = pure $ head ss
| isDigit c = pure $ ss !! digitToInt c
| otherwise = silence
ghost'' :: Time -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
ghost'' a f p = superimpose (((a*2.5) `rotR`) . f) $ superimpose (((a*1.5) `rotR`) . f) p
ghost' :: Time -> Pattern ControlMap -> Pattern ControlMap
ghost' a p = ghost'' a ((|*| P.gain (pure 0.7)) . (|> P.end (pure 0.2)) . (|*| P.speed (pure 1.25))) p
ghost :: Pattern ControlMap -> Pattern ControlMap
ghost = ghost' 0.125
{- |
tabby - A more literal weaving than the `weave` function, give number
of 'threads' per cycle and two patterns, and this function will weave them
together using a plain (aka 'tabby') weave, with a simple over/under structure
-}
tabby :: Int -> Pattern a -> Pattern a -> Pattern a
tabby nInt p p' = stack [maskedWarp,
maskedWeft
]
where
n = fromIntegral nInt
weft = concatMap (const [[0..n-1], reverse [0..n-1]]) [0 .. (n `div` 2) - 1]
warp = transpose weft
thread xs p'' = _slow (n%1) $ fastcat $ map (\i -> zoomArc (Arc (i%n) ((i+1)%n)) p'') (concat xs)
weftP = thread weft p'
warpP = thread warp p
maskedWeft = mask (every 2 rev $ _fast (n % 2) $ fastCat [silence, pure True]) weftP
maskedWarp = mask (every 2 rev $ _fast (n % 2) $ fastCat [pure True, silence]) warpP
-- | chooses between a list of patterns, using a pattern of floats (from 0-1)
select :: Pattern Double -> [Pattern a] -> Pattern a
select = tParam _select
_select :: Double -> [Pattern a] -> Pattern a
_select f ps = ps !! floor (max 0 (min 1 f) * fromIntegral (length ps - 1))
-- | chooses between a list of functions, using a pattern of floats (from 0-1)
selectF :: Pattern Double -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a
selectF pf ps p = innerJoin $ (\f -> _selectF f ps p) <$> pf
_selectF :: Double -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a
_selectF f ps p = (ps !! floor (max 0 (min 0.999999 f) * fromIntegral (length ps))) p
-- | chooses between a list of functions, using a pattern of integers
pickF :: Pattern Int -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a
pickF pInt fs pat = innerJoin $ (\i -> _pickF i fs pat) <$> pInt
_pickF :: Int -> [Pattern a -> Pattern a] -> Pattern a -> Pattern a
_pickF i fs p = (fs !!! i) p
-- | @contrast p f f' p'@ splits controlpattern @p'@ in two, applying
-- the function @f@ to one and @f'@ to the other. This depends on
-- whether events in it contains values matching with those in @p@.
-- For example in @contrast (n "1") (# crush 3) (# vowel "a") $ n "0 1" # s "bd sn" # speed 3@,
-- the first event will have the vowel effect applied and the second
-- will have the crush applied.
contrast :: (ControlPattern -> ControlPattern) -> (ControlPattern -> ControlPattern)
-> ControlPattern -> ControlPattern -> ControlPattern
contrast = contrastBy (==)
contrastBy :: (a -> Value -> Bool)
-> (ControlPattern -> Pattern b)
-> (ControlPattern -> Pattern b)
-> Pattern (Map.Map String a)
-> Pattern (Map.Map String Value)
-> Pattern b
contrastBy comp f f' p p' = overlay (f matched) (f' unmatched)
where matches = matchManyToOne (flip $ Map.isSubmapOfBy comp) p p'
matched :: ControlPattern
matched = filterJust $ (\(t, a) -> if t then Just a else Nothing) <$> matches
unmatched :: ControlPattern
unmatched = filterJust $ (\(t, a) -> if not t then Just a else Nothing) <$> matches
contrastRange
:: (ControlPattern -> Pattern a)
-> (ControlPattern -> Pattern a)
-> Pattern (Map.Map String (Value, Value))
-> ControlPattern
-> Pattern a
contrastRange = contrastBy f
where f (VI s, VI e) (VI v) = v >= s && v <= e
f (VF s, VF e) (VF v) = v >= s && v <= e
f (VS s, VS e) (VS v) = v == s && v == e
f _ _ = False
-- | Like @contrast@, but one function is given, and applied to events with matching controls.
fix :: (ControlPattern -> ControlPattern) -> ControlPattern -> ControlPattern -> ControlPattern
fix f = contrast f id
-- | Like @contrast@, but one function is given, and applied to events
-- with controls which don't match.
unfix :: (ControlPattern -> ControlPattern) -> ControlPattern -> ControlPattern -> ControlPattern
unfix = contrast id
fixRange :: (ControlPattern -> Pattern ControlMap)
-> Pattern (Map.Map String (Value, Value))
-> ControlPattern
-> Pattern ControlMap
fixRange f = contrastRange f id
unfixRange :: (ControlPattern -> Pattern ControlMap)
-> Pattern (Map.Map String (Value, Value))
-> ControlPattern
-> Pattern ControlMap
unfixRange = contrastRange id
-- | limit values in a Pattern (or other Functor) to n equally spaced
-- divisions of 1.
quantise :: (Functor f, RealFrac b) => b -> f b -> f b
quantise n = fmap ((/n) . (fromIntegral :: RealFrac b => Int -> b) . floor . (*n))
-- | Inverts all the values in a boolean pattern
inv :: Functor f => f Bool -> f Bool
inv = (not <$>)
-- | Serialises a pattern so there's only one event playing at any one
-- time, making it 'monophonic'. Events which start/end earlier are given priority.
mono :: Pattern a -> Pattern a
mono p = Pattern $ \(State a cm) -> flatten $ query p (State a cm) where
flatten :: [Event a] -> [Event a]
flatten = mapMaybe constrainPart . truncateOverlaps . sortOn whole
truncateOverlaps [] = []
truncateOverlaps (e:es) = e : truncateOverlaps (mapMaybe (snip e) es)
-- TODO - decide what to do about analog events..
snip a b | start (wholeOrPart b) >= stop (wholeOrPart a) = Just b
| stop (wholeOrPart b) <= stop (wholeOrPart a) = Nothing
| otherwise = Just b {whole = Just $ Arc (stop $ wholeOrPart a) (stop $ wholeOrPart b)}
constrainPart :: Event a -> Maybe (Event a)
constrainPart e = do a <- subArc (wholeOrPart e) (part e)
return $ e {part = a}
-- serialize the given pattern
-- find the middle of the query's arc and use that to query the serialized pattern. We should get either no events or a single event back
-- if we don't get any events, return nothing
-- if we get an event, get the stop of its arc, and use that to query the serialized pattern, to see if there's an adjoining event
-- if there isn't, return the event as-is.
-- if there is, check where we are in the 'whole' of the event, and use that to tween between the values of the event and the next event
-- smooth :: Pattern Double -> Pattern Double
-- TODO - test this with analog events
smooth :: Fractional a => Pattern a -> Pattern a
smooth p = Pattern $ \st@(State a cm) -> tween st a $ query monoP (State (midArc a) cm)
where
midArc a = Arc (mid (start a, stop a)) (mid (start a, stop a))
tween _ _ [] = []
tween st queryA (e:_) = maybe [e {whole = Just queryA, part = queryA}] (tween' queryA) (nextV st)
where aStop = Arc (wholeStop e) (wholeStop e)
nextEs st' = query monoP (st' {arc = aStop})
nextV st' | null (nextEs st') = Nothing
| otherwise = Just $ value (head (nextEs st'))
tween' queryA' v =
[ Event
{ context = context e,
whole = Just queryA'
, part = queryA'
, value = value e + ((v - value e) * pc)}
]
pc | delta' (wholeOrPart e) == 0 = 0
| otherwise = fromRational $ (eventPartStart e - wholeStart e) / delta' (wholeOrPart e)
delta' a = stop a - start a
monoP = mono p
-- | Looks up values from a list of tuples, in order to swap values in the given pattern
swap :: Eq a => [(a, b)] -> Pattern a -> Pattern b
swap things p = filterJust $ (`lookup` things) <$> p
{-
snowball |
snowball takes a function that can combine patterns (like '+'),
a function that transforms a pattern (like 'slow'),
a depth, and a starting pattern,
it will then transform the pattern and combine it with the last transformation until the depth is reached
this is like putting an effect (like a filter) in the feedback of a delay line
each echo is more effected
d1 $ note (scale "hexDorian" $ snowball (+) (slow 2 . rev) 8 "0 ~ . -1 . 5 3 4 . ~ -2") # s "gtr"
-}
snowball :: Int -> (Pattern a -> Pattern a -> Pattern a) -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
snowball depth combinationFunction f pattern = cat $ take depth $ scanl combinationFunction pattern $ iterate f pattern
{- @soak@ |
applies a function to a pattern and cats the resulting pattern,
then continues applying the function until the depth is reached
this can be used to create a pattern that wanders away from
the original pattern by continually adding random numbers
d1 $ note (scale "hexDorian" mutateBy (+ (range -1 1 $ irand 2)) 8 $ "0 1 . 2 3 4") # s "gtr"
-}
soak :: Int -> (Pattern a -> Pattern a) -> Pattern a -> Pattern a
soak depth f pattern = cat $ take depth $ iterate f pattern
deconstruct :: Int -> Pattern String -> String
deconstruct n p = intercalate " " $ map showStep $ toList p
where
showStep :: [String] -> String
showStep [] = "~"
showStep [x] = x
showStep xs = "[" ++ (intercalate ", " xs) ++ "]"
toList :: Pattern a -> [[a]]
toList pat = map (\(s,e) -> map value $ queryArc (_segment n' pat) (Arc s e)) arcs
where breaks = [0, (1/n') ..]
arcs = zip (take n breaks) (drop 1 breaks)
n' = fromIntegral n
{- @bite@ n ipat pat |
slices a pattern `pat` into `n` pieces, then uses the `ipat` pattern of integers to index into those slices.
So `bite 4 "0 2*2" (run 8)` is the same as `"[0 1] [4 5]*2"`.
-}
bite :: Int -> Pattern Int -> Pattern a -> Pattern a
bite n ipat pat = squeezeJoin $ zoompat <$> ipat
where zoompat i = zoom (i'/(fromIntegral n), (i'+1)/(fromIntegral n)) pat
where i' = fromIntegral $ i `mod` n
{- @squeeze@ ipat pats | uses a pattern of integers to index into a list of patterns.
-}
squeeze :: Pattern Int -> [Pattern a] -> Pattern a
squeeze _ [] = silence
squeeze ipat pats = squeezeJoin $ (pats !!!) <$> ipat
squeezeJoinUp :: Pattern (ControlPattern) -> ControlPattern
squeezeJoinUp pp = pp {query = q}
where q st = concatMap (f st) (query (filterDigital pp) st)
f st (Event c (Just w) p v) =
mapMaybe (munge c w p) $ query (compressArc (cycleArc w) (v |* P.speed (pure $ fromRational $ 1/(stop w - start w)))) st {arc = p}
-- already ignoring analog events, but for completeness..
f _ _ = []
munge co oWhole oPart (Event ci (Just iWhole) iPart v) =
do w' <- subArc oWhole iWhole
p' <- subArc oPart iPart
return (Event (combineContexts [ci,co]) (Just w') p' v)
munge _ _ _ _ = Nothing
_chew :: Int -> Pattern Int -> ControlPattern -> ControlPattern
_chew n ipat pat = (squeezeJoinUp $ zoompat <$> ipat) |/ P.speed (pure $ fromIntegral n)
where zoompat i = zoom (i'/(fromIntegral n), (i'+1)/(fromIntegral n)) (pat)
where i' = fromIntegral $ i `mod` n
-- TODO maybe _chew could pattern the first parameter directly..
chew :: Pattern Int -> Pattern Int -> ControlPattern -> ControlPattern
chew npat ipat pat = innerJoin $ (\n -> _chew n ipat pat) <$> npat
__binary :: Data.Bits.Bits b => Int -> b -> [Bool]
__binary n num = map (testBit num) $ reverse [0 .. n-1]
_binary :: Data.Bits.Bits b => Int -> b -> Pattern Bool
_binary n num = listToPat $ __binary n num
binaryN :: Int -> Pattern Int -> Pattern Bool
binaryN n p = squeezeJoin $ _binary n <$> p
binary :: Pattern Int -> Pattern Bool
binary = binaryN 8
ascii :: Pattern String -> Pattern Bool
ascii p = squeezeJoin $ (listToPat . concatMap (__binary 8 . ord)) <$> p
| d0kt0r0/Tidal | src/Sound/Tidal/UI.hs | gpl-3.0 | 75,241 | 0 | 26 | 18,019 | 18,726 | 9,556 | 9,170 | -1 | -1 |
-- | Functions for parsing Stockholm alignments
module Biobase.StockholmAlignment.Import (
readExistingStockholm,
parseStockholm,
readStockholm,
module Biobase.StockholmAlignment.Types
) where
import Biobase.StockholmAlignment.Types
import Text.ParserCombinators.Parsec
import qualified Control.Exception.Base as CE
import qualified Data.Text as T
import Data.List
import System.Directory
import Data.Either.Unwrap
readExistingStockholm :: String -> IO (Either String [StockholmAlignment])
readExistingStockholm filePath =
if null filePath
then return (Left "")
else do
fileExists <- doesFileExist filePath
if fileExists
then do
stockholmInput <- readStockholm filePath
if isLeft stockholmInput
then return (Left (show stockholmInput))
else return (Right (fromRight stockholmInput))
else return (Left ("Could not find stockholm alignment file with path:" ++ filePath))
-- | parse
parseStockholm :: String -> Either ParseError [StockholmAlignment]
parseStockholm input = parse genParseStockholms "Stockholm" input
-- | parse StockholmAlignment from input filePath
readStockholm :: String -> IO (Either ParseError [StockholmAlignment])
readStockholm filePath = do
parsedFile <- parseFromFile genParseStockholms filePath
CE.evaluate parsedFile
-- | Parse the input as StockholmAlignment
genParseStockholms :: GenParser Char st [StockholmAlignment]
genParseStockholms = do
alns <- many1 genParseStockholm
eof
return alns
-- | Parse the input as StockholmAlignment
genParseStockholm :: GenParser Char st StockholmAlignment
genParseStockholm = do
string "# STOCKHOLM"
many1 (try (string " "))
_version <- many1 (noneOf "\n")
many (try newline)
_stockholmToken <- many1 genParseToken
string "//\n"
optional (try (string "\n"))
return (tokenToStockholm (T.pack _version) _stockholmToken)
-- | Parse the input as StockholmAlignment datatype
genParseToken :: GenParser Char st StockholmToken
genParseToken = do
choice [try genParseTokFileA, try genParseTokColA, try genParseTokResA, try genParseTokSeqA, try genParseTokSeq]
genParseTokFileA :: GenParser Char st StockholmToken
genParseTokFileA = do
many newline
string "#=GF"
char ' '
_tag <- many1 (oneOf "ABCDEFGHIJKLMNOPQRSTUVWXYZ*")
many1 (char ' ')
_info <- many1 (noneOf "\n")
newline
return (TokFileA (T.pack _tag) (T.pack _info))
genParseTokColA :: GenParser Char st StockholmToken
genParseTokColA = do
many newline
string "#=GC"
char ' '
_tag <- many1 (noneOf " \n")
many1 (char ' ')
_info <- many1 (noneOf "\n")
newline
return $ TokColA (T.pack _tag) (T.pack _info)
genParseTokResA :: GenParser Char st StockholmToken
genParseTokResA = do
many newline
string "#=GR"
char ' '
_id <- many1 (noneOf " \n")
many1 (char ' ')
_tag <- many1 (noneOf " \n")
_info <- many1 (noneOf "\n")
newline
return $ TokResA (T.pack _id) (T.pack _tag) (T.pack _info)
genParseTokSeqA :: GenParser Char st StockholmToken
genParseTokSeqA = do
many newline
string "#=GS"
char ' '
_id <- many1 (noneOf " \n")
many1 (char ' ')
_tag <- many1 (noneOf " \n")
_info <- many1 (noneOf "\n")
return $ TokSeqA (T.pack _id) (T.pack _tag) (T.pack _info)
genParseTokSeq :: GenParser Char st StockholmToken
genParseTokSeq = do
many newline
_sid <- many1 (noneOf " \n")
many1 (char ' ')
_sequence <- many1 (oneOf "ABCDEFGHIJKLMNOPQRSTUVWXYZ-.")
newline
return $ TokSeq (T.pack _sid) (T.pack _sequence)
tokenToStockholm :: T.Text -> [StockholmToken] -> StockholmAlignment
tokenToStockholm _version _token = StockholmAlignment _version _fileAnnotation _columnAnnotation _sequenceEntries
where _fileAtoken = filter isFileTok _token
_colAtoken = filter isColATok _token
_resAtoken = filter isResATok _token
_seqAtoken = filter isSeqATok _token
_seqtoken = filter isSeqTok _token
_fileAnnotation = mergeFileToken _fileAtoken
_columnAnnotation = mergeColToken _colAtoken
mergedSeqAToken = mergeSeqAToken _seqAtoken
mergedRAToken = mergeResAToken _resAtoken
_sequenceEntries = buildSeqEntries mergedSeqAToken mergedRAToken _seqtoken
isFileTok :: StockholmToken -> Bool
isFileTok (TokFileA _ _) = True
isFileTok _ = False
isColATok :: StockholmToken -> Bool
isColATok (TokColA _ _) = True
isColATok _ = False
isResATok :: StockholmToken -> Bool
isResATok (TokResA{}) = True
isResATok _ = False
isSeqATok :: StockholmToken -> Bool
isSeqATok (TokSeqA{}) = True
isSeqATok _ = False
isSeqTok :: StockholmToken -> Bool
isSeqTok (TokSeq _ _) = True
isSeqTok _ = False
mergeFileToken :: [StockholmToken] -> [AnnotationEntry]
mergeFileToken _token = entries
where tags = nub (map fTag _token)
entries = map (buildFEntry _token) tags
buildFEntry :: [StockholmToken] -> T.Text -> AnnotationEntry
buildFEntry _token currentTag = entry
where tagToken = filter (\t -> fTag t == currentTag) _token
tagInfos = T.concat (map fInfo tagToken)
entry = AnnotationEntry currentTag tagInfos
mergeColToken :: [StockholmToken] -> [AnnotationEntry]
mergeColToken _token = entries
where tags = nub (map cTag _token)
entries = map (buildCEntry _token) tags
buildCEntry :: [StockholmToken] -> T.Text -> AnnotationEntry
buildCEntry _token currentTag = entry
where tagToken = filter (\t -> cTag t == currentTag) _token
tagInfos = T.concat (map cInfo tagToken)
entry = AnnotationEntry currentTag tagInfos
mergeSeqAToken :: [StockholmToken] -> [StockholmToken]
mergeSeqAToken _token = entries
where aIds = nub (map aId _token)
entries = concatMap (mergeSAIdToken _token) aIds
mergeSAIdToken :: [StockholmToken] -> T.Text -> [StockholmToken]
mergeSAIdToken _token currentId = tagIdToken
where idToken = filter (\t -> aId t == currentId) _token
tags = nub (map aTag idToken)
tagIdToken = map (mergeSAIdTagToken idToken currentId) tags
mergeSAIdTagToken :: [StockholmToken] -> T.Text -> T.Text -> StockholmToken
mergeSAIdTagToken _token currentId currentTag = entry
where tagToken = filter (\t -> aId t == currentId) _token
tagInfos = T.concat (map aInfo tagToken)
entry = TokSeqA currentId currentTag tagInfos
mergeResAToken :: [StockholmToken] -> [StockholmToken]
mergeResAToken _token = entries
where rIds = nub (map rId _token)
entries = concatMap (mergeRAIdToken _token) rIds
mergeRAIdToken :: [StockholmToken] -> T.Text -> [StockholmToken]
mergeRAIdToken _token currentId = tagIdToken
where idToken = filter (\t -> rId t == currentId) _token
tags = nub (map rTag idToken)
tagIdToken = map (mergeRAIdTagToken idToken currentId) tags
mergeRAIdTagToken :: [StockholmToken] -> T.Text -> T.Text -> StockholmToken
mergeRAIdTagToken _token currentId currentTag= entry
where tagToken = filter (\t -> rId t == currentId) _token
tagInfos = T.concat (map rInfo tagToken)
entry = TokResA currentId currentTag tagInfos
buildSeqEntries :: [StockholmToken] -> [StockholmToken] -> [StockholmToken] -> [SequenceEntry]
buildSeqEntries seqA resA _token= entries
where currentId = map sId _token
entries = map (buildSeqEntry seqA resA _token) currentId
buildSeqEntry :: [StockholmToken] -> [StockholmToken] -> [StockholmToken] -> T.Text -> SequenceEntry
buildSeqEntry seqAtok resAtok _token currentId = entry
where idToken = filter (\t -> sId t == currentId ) _token
idSAToken = filter (\t -> aId t == currentId ) seqAtok
idRAToken = filter (\t -> rId t == currentId ) resAtok
seqA = map buildSAEntry idSAToken
resA = map buildRAEntry idRAToken
tagInfos = T.concat (map sSeq idToken)
entry = SequenceEntry currentId tagInfos seqA resA
buildSAEntry :: StockholmToken -> AnnotationEntry
buildSAEntry tok = AnnotationEntry (aTag tok) (aInfo tok)
buildRAEntry :: StockholmToken -> AnnotationEntry
buildRAEntry tok = AnnotationEntry (rTag tok) (rInfo tok)
| eggzilla/StockholmAlignment | Biobase/StockholmAlignment/Import.hs | gpl-3.0 | 8,132 | 0 | 16 | 1,635 | 2,483 | 1,236 | 1,247 | 188 | 4 |
module View.Panel
( drawPanel
) where
import Control.Lens
import View.State
import View.Convert
import GameLogic
import Middleware.Gloss.Facade
--TODO: Pause checkbox
--TODO: Collapsible panel
drawPanel :: ViewData -> Picture
drawPanel state =
let size = state ^. windowSize
width = fromIntegral $ fst size
height = fromIntegral $ snd size
halfWidth = width / 2
halfHeight = height / 2
shiftX = halfWidth - (panelWidth/2)
rect = Color panelBkColor $ rectangleSolid panelWidth height
positionPic = Translate (-panelWidth/2.2) (halfHeight - 20) $ drawPosition state
playerPicts = mapP drawPlayer $ state ^. game . players
playersPict = Translate 0 (halfHeight - 30) $ Pictures playerPicts
pausedPict = Translate 5 (20 - halfHeight) $ drawPaused state
speedPict = Translate (-5) (40 - halfHeight) $ drawGameSpeed state
miniMapPict = Translate (-panelWidth/2.2) (15 - halfHeight)
$ drawMiniMap $ state ^. game
in Translate shiftX 0 $ Pictures [rect, positionPic, playersPict, miniMapPict
, pausedPict, speedPict]
drawPosition :: ViewData -> Picture
drawPosition state
= Color black $ Scale panelTextScale panelTextScale $ Text str
where str = "Position: " ++ show x ++ "x" ++ show y
Just (x,y) = state ^? game . players . ix activePlayerIndex . selectedPos
drawPlayer :: (Int, Player) -> Picture
drawPlayer (index, player)
= Translate 0 shiftY . Color color . Pictures $ drawPlayerInfoParts player
where color = playerColor index
shiftY = (0.5 - fromIntegral index) * playerInfoHeight
drawPlayerInfoParts :: Player -> [Picture]
drawPlayerInfoParts player = fmap p playerInfoParts
where p (shift, p) = Translate (playerInfoWidth*shift) 0 . drawInfoText $ p player
playerInfoParts :: [(Float, Player -> String)]
playerInfoParts = [(-0.50, show.view num)
,(-0.22, show.view free)
--,(-0.01, remainText)
,( 0.22, shieldText)
,( 0.40, aggrText)
]
drawInfoText :: String -> Picture
drawInfoText = Translate 0 (-playerInfoHeight/2)
. Scale panelTextScale panelTextScale . Text
--remainText :: Player -> String
--remainText player = show $ (view remain player) `div` remainDivMult
shieldText :: Player -> String
shieldText player
| strength < 128
= show strength
| active
= "+1"
| otherwise
= "+0"
where strength = player ^. shieldStrength
active = player ^. shieldActive
aggrText :: Player -> String
aggrText player
| aggro > 0
= show aggro
| otherwise
= ""
where aggro = player ^. aggr
drawPaused :: ViewData -> Picture
drawPaused state
| state ^. game . paused
= Color black $ drawInfoText "PAUSED"
| otherwise
= Blank
drawMiniMap :: GameData -> Picture
drawMiniMap game = Pictures cells
where cells = mapW (drawMiniMapCell mapCellScale) w
--swap for testing drawing speed degradation
--cells = fmap (drawMiniMapCell mapCellScale) [((x,y), mkCell 1 1) | x<-[1..wSize], y<-[1..wSize]]
w = game ^. world
wSize = getWorldSize w
mapCellScale = mapSize / fromIntegral wSize
drawMiniMapCell :: Float -> (WorldPos, Cell) -> Picture
drawMiniMapCell mapCellScale (pos, cell)
| isFree cell
= translateCell pos $ Color emptyCellColor rect
| otherwise
= translateCell pos $ Color color rect
where translateCell (x,y) pict =
let xx = (fromIntegral x - 0.5) * mapCellScale
yy = (fromIntegral y - 0.5) * mapCellScale
in Translate xx yy pict
rect = rectangleSolid mapCellScale mapCellScale
color = playerColor $ cell ^. playerIndex
drawGameSpeed :: ViewData -> Picture
drawGameSpeed state = do
let gs = state ^. game . gameSpeed
panelFontSize = 15
gaudgeLeft = panelWidth * 0.065
gaudgeWidth = panelWidth * 0.3
gaudgeStep = gaudgeWidth / fromIntegral (fromEnum (maxBound :: GameSpeed))
gaudgeTop = panelFontSize * 1.5
gaudgeHeight = panelFontSize
gaudgePos sp = gaudgeLeft + gaudgeStep * fromIntegral (fromEnum sp)
pText = Translate (-10) (gaudgeHeight + panelFontSize) . Color black
$ drawInfoText "Game speed"
pHLine = translate 0 (gaudgeHeight / 2) . Color black
$ Line [(gaudgeLeft, 0), (gaudgeLeft + gaudgeWidth, 0)]
pVLines = Color black $ Pictures
[Line [(x, 0), (x, gaudgeHeight)]
| sp <- enumFrom (minBound :: GameSpeed)
, let x = gaudgePos sp]
pMarker = translate (gaudgePos gs) 0 $ drawGameSpeedMarker gaudgeHeight
Pictures [pText, pHLine, pVLines, pMarker]
drawGameSpeedMarker :: Float -> Picture
drawGameSpeedMarker gaudgeHeight
= Color gray $ Polygon [ (0, gaudgeHeight)
, (-hw, gaudgeHeight-hw)
, (-hw, 0)
, (hw, 0)
, (hw, gaudgeHeight-hw)
]
where hw = 5
| EPashkin/gamenumber-gloss | src/View/Panel.hs | gpl-3.0 | 5,194 | 0 | 17 | 1,510 | 1,515 | 792 | 723 | -1 | -1 |
module Expand (expandAll, replaceSourceInfoOnXObj) where
import Control.Monad.State.Lazy (StateT(..), runStateT, liftIO, modify, get, put)
import Control.Monad.State
import Debug.Trace
import Types
import Obj
import Util
import Lookup
-- | Used for calling back to the 'eval' function in Eval.hs
type DynamicEvaluator = Env -> XObj -> StateT Context IO (Either EvalError XObj)
-- | Keep expanding the form until it doesn't change anymore.
-- | Note: comparing environments is tricky! Make sure they *can* be equal, otherwise this won't work at all!
expandAll :: DynamicEvaluator -> Env -> XObj -> StateT Context IO (Either EvalError XObj)
expandAll eval env root =
do fullyExpanded <- expandAllInternal root
return (fmap setNewIdentifiers fullyExpanded)
where expandAllInternal xobj =
do expansionResult <- expand eval env xobj
case expansionResult of
Right expanded -> if expanded == xobj
then return (Right expanded)
else expandAll eval env expanded
err -> return err
-- | Macro expansion of a single form
expand :: DynamicEvaluator -> Env -> XObj -> StateT Context IO (Either EvalError XObj)
expand eval env xobj =
case obj xobj of
--case obj (trace ("Expand: " ++ pretty xobj) xobj) of
Lst _ -> expandList xobj
Arr _ -> expandArray xobj
Sym _ _ -> expandSymbol xobj
_ -> return (Right xobj)
where
expandList :: XObj -> StateT Context IO (Either EvalError XObj)
expandList (XObj (Lst xobjs) i t) =
case xobjs of
[] -> return (Right xobj)
XObj (External _) _ _ : _ -> return (Right xobj)
XObj (Instantiate _) _ _ : _ -> return (Right xobj)
XObj (Deftemplate _) _ _ : _ -> return (Right xobj)
XObj (Defalias _) _ _ : _ -> return (Right xobj)
[defnExpr@(XObj Defn _ _), name, args, body] ->
do expandedBody <- expand eval env body
return $ do okBody <- expandedBody
Right (XObj (Lst [defnExpr, name, args, okBody]) i t)
[defExpr@(XObj Def _ _), name, expr] ->
do expandedExpr <- expand eval env expr
return $ do okExpr <- expandedExpr
Right (XObj (Lst [defExpr, name, okExpr]) i t)
[theExpr@(XObj The _ _), typeXObj, value] ->
do expandedValue <- expand eval env value
return $ do okValue <- expandedValue
Right (XObj (Lst [theExpr, typeXObj, okValue]) i t)
[ifExpr@(XObj If _ _), condition, trueBranch, falseBranch] ->
do expandedCondition <- expand eval env condition
expandedTrueBranch <- expand eval env trueBranch
expandedFalseBranch <- expand eval env falseBranch
return $ do okCondition <- expandedCondition
okTrueBranch <- expandedTrueBranch
okFalseBranch <- expandedFalseBranch
-- This is a HACK so that each branch of the if statement
-- has a "safe place" (= a do-expression with just one element)
-- where it can store info about its deleters. Without this,
-- An if statement with let-expression inside will duplicate
-- the calls to Delete when emitting code.
let wrappedTrue =
case okTrueBranch of
XObj (Lst (XObj Do _ _ : _)) _ _ -> okTrueBranch -- Has a do-expression already
_ -> XObj (Lst [XObj Do Nothing Nothing, okTrueBranch]) (info okTrueBranch) Nothing
wrappedFalse =
case okFalseBranch of
XObj (Lst (XObj Do _ _ : _)) _ _ -> okFalseBranch -- Has a do-expression already
_ -> XObj (Lst [XObj Do Nothing Nothing, okFalseBranch]) (info okFalseBranch) Nothing
Right (XObj (Lst [ifExpr, okCondition, wrappedTrue, wrappedFalse]) i t)
[letExpr@(XObj Let _ _), XObj (Arr bindings) bindi bindt, body] ->
if even (length bindings)
then do bind <- mapM (\(n, x) -> do x' <- expand eval env x
return $ do okX <- x'
(Right [n, okX]))
(pairwise bindings)
expandedBody <- expand eval env body
return $ do okBindings <- sequence bind
okBody <- expandedBody
Right (XObj (Lst [letExpr, XObj (Arr (concat okBindings)) bindi bindt, okBody]) i t)
else return (Left (EvalError ("Uneven number of forms in let-statement: " ++ pretty xobj)))
doExpr@(XObj Do _ _) : expressions ->
do expandedExpressions <- mapM (expand eval env) expressions
return $ do okExpressions <- sequence expandedExpressions
Right (XObj (Lst (doExpr : okExpressions)) i t)
[withExpr@(XObj With _ _), pathExpr@(XObj (Sym path _) _ _), expression] ->
do expandedExpression <- expand eval env expression
return $ do okExpression <- expandedExpression
Right (XObj (Lst [withExpr, pathExpr , okExpression]) i t) -- Replace the with-expression with just the expression!
[withExpr@(XObj With _ _), _, _] ->
return (Left (EvalError ("Non-symbol in 'with' expression: " ++ show xobj ++ " at " ++ prettyInfoFromXObj xobj)))
(XObj With _ _) : _ ->
return (Left (EvalError ("Can't have multiple forms within a 'with' expression (except at top-level) at " ++ prettyInfoFromXObj xobj)))
XObj Mod{} _ _ : _ ->
return (Left (EvalError "Can't eval module"))
f:args -> do expandedF <- expand eval env f
expandedArgs <- fmap sequence (mapM (expand eval env) args)
case expandedF of
Right (XObj (Lst [XObj Dynamic _ _, _, XObj (Arr _) _ _, _]) _ _) ->
--trace ("Found dynamic: " ++ pretty xobj)
eval env xobj
Right (XObj (Lst [XObj Macro _ _, _, XObj (Arr _) _ _, _]) _ _) ->
--trace ("Found macro: " ++ pretty xobj ++ " at " ++ prettyInfoFromXObj xobj)
eval env xobj
Right (XObj (Lst [XObj (Command callback) _ _, _]) _ _) ->
(getCommand callback) args
Right _ ->
return $ do okF <- expandedF
okArgs <- expandedArgs
Right (XObj (Lst (okF : okArgs)) i t)
Left err -> return (Left err)
expandList _ = error "Can't expand non-list in expandList."
expandArray :: XObj -> StateT Context IO (Either EvalError XObj)
expandArray (XObj (Arr xobjs) i t) =
do evaledXObjs <- fmap sequence (mapM (expand eval env) xobjs)
return $ do okXObjs <- evaledXObjs
Right (XObj (Arr okXObjs) i t)
expandArray _ = error "Can't expand non-array in expandArray."
expandSymbol :: XObj -> StateT Context IO (Either a XObj)
expandSymbol (XObj (Sym path _) _ _) =
case lookupInEnv path env of
Just (_, Binder _ (XObj (Lst (XObj (External _) _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ (XObj (Lst (XObj (Instantiate _) _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ (XObj (Lst (XObj (Deftemplate _) _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ (XObj (Lst (XObj Defn _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ (XObj (Lst (XObj Def _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ (XObj (Lst (XObj (Defalias _) _ _ : _)) _ _)) -> return (Right xobj)
Just (_, Binder _ found) -> return (Right found) -- use the found value
Nothing -> return (Right xobj) -- symbols that are not found are left as-is
expandSymbol _ = error "Can't expand non-symbol in expandSymbol."
-- | Replace all the infoIdentifier:s on all nested XObj:s
setNewIdentifiers :: XObj -> XObj
setNewIdentifiers root = let final = evalState (visit root) 0
in final
--trace ("ROOT: " ++ prettyTyped root ++ "FINAL: " ++ prettyTyped final) final
where
visit :: XObj -> State Int XObj
visit xobj =
case obj xobj of
(Lst _) -> visitList xobj
(Arr _) -> visitArray xobj
_ -> bumpAndSet xobj
visitList :: XObj -> State Int XObj
visitList (XObj (Lst xobjs) i t) =
do visited <- mapM visit xobjs
let xobj' = XObj (Lst visited) i t
bumpAndSet xobj'
visitList _ = error "The function 'visitList' only accepts XObjs with lists in them."
visitArray :: XObj -> State Int XObj
visitArray (XObj (Arr xobjs) i t) =
do visited <- mapM visit xobjs
let xobj' = XObj (Arr visited) i t
bumpAndSet xobj'
visitArray _ = error "The function 'visitArray' only accepts XObjs with arrays in them."
bumpAndSet :: XObj -> State Int XObj
bumpAndSet xobj =
do counter <- get
put (counter + 1)
case info xobj of
Just i -> return (xobj { info = Just (i { infoIdentifier = counter })})
Nothing -> return xobj
-- | Replaces the file, line and column info on an XObj an all its children.
replaceSourceInfo :: FilePath -> Int -> Int -> XObj -> XObj
replaceSourceInfo newFile newLine newColumn root = visit root
where
visit :: XObj -> XObj
visit xobj =
case obj xobj of
(Lst _) -> visitList xobj
(Arr _) -> visitArray xobj
_ -> setNewInfo xobj
visitList :: XObj -> XObj
visitList (XObj (Lst xobjs) i t) =
setNewInfo (XObj (Lst (map visit xobjs)) i t)
visitList _ =
error "The function 'visitList' only accepts XObjs with lists in them."
visitArray :: XObj -> XObj
visitArray (XObj (Arr xobjs) i t) =
setNewInfo (XObj (Arr (map visit xobjs)) i t)
visitArray _ = error "The function 'visitArray' only accepts XObjs with arrays in them."
setNewInfo :: XObj -> XObj
setNewInfo xobj =
case info xobj of
Just i -> (xobj { info = Just (i { infoFile = newFile
, infoLine = newLine
, infoColumn = newColumn
})})
Nothing -> xobj
replaceSourceInfoOnXObj :: Maybe Info -> XObj -> XObj
replaceSourceInfoOnXObj newInfo xobj =
case newInfo of
Just i -> replaceSourceInfo (infoFile i) (infoLine i) (infoColumn i) xobj
Nothing -> xobj
| eriksvedang/Carp | src/Expand.hs | mpl-2.0 | 10,945 | 0 | 26 | 3,817 | 3,431 | 1,695 | 1,736 | 178 | 36 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.Scores.ListWindow
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the scores in a leaderboard around (and including) a player\'s
-- score.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.scores.listWindow@.
module Network.Google.Resource.Games.Scores.ListWindow
(
-- * REST Resource
ScoresListWindowResource
-- * Creating a Request
, scoresListWindow
, ScoresListWindow
-- * Request Lenses
, slwConsistencyToken
, slwCollection
, slwTimeSpan
, slwReturnTopIfAbsent
, slwLeaderboardId
, slwLanguage
, slwResultsAbove
, slwPageToken
, slwMaxResults
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.scores.listWindow@ method which the
-- 'ScoresListWindow' request conforms to.
type ScoresListWindowResource =
"games" :>
"v1" :>
"leaderboards" :>
Capture "leaderboardId" Text :>
"window" :>
Capture "collection" ScoresListWindowCollection :>
QueryParam "timeSpan" ScoresListWindowTimeSpan :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "returnTopIfAbsent" Bool :>
QueryParam "language" Text :>
QueryParam "resultsAbove" (Textual Int32) :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] LeaderboardScores
-- | Lists the scores in a leaderboard around (and including) a player\'s
-- score.
--
-- /See:/ 'scoresListWindow' smart constructor.
data ScoresListWindow = ScoresListWindow'
{ _slwConsistencyToken :: !(Maybe (Textual Int64))
, _slwCollection :: !ScoresListWindowCollection
, _slwTimeSpan :: !ScoresListWindowTimeSpan
, _slwReturnTopIfAbsent :: !(Maybe Bool)
, _slwLeaderboardId :: !Text
, _slwLanguage :: !(Maybe Text)
, _slwResultsAbove :: !(Maybe (Textual Int32))
, _slwPageToken :: !(Maybe Text)
, _slwMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ScoresListWindow' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'slwConsistencyToken'
--
-- * 'slwCollection'
--
-- * 'slwTimeSpan'
--
-- * 'slwReturnTopIfAbsent'
--
-- * 'slwLeaderboardId'
--
-- * 'slwLanguage'
--
-- * 'slwResultsAbove'
--
-- * 'slwPageToken'
--
-- * 'slwMaxResults'
scoresListWindow
:: ScoresListWindowCollection -- ^ 'slwCollection'
-> ScoresListWindowTimeSpan -- ^ 'slwTimeSpan'
-> Text -- ^ 'slwLeaderboardId'
-> ScoresListWindow
scoresListWindow pSlwCollection_ pSlwTimeSpan_ pSlwLeaderboardId_ =
ScoresListWindow'
{ _slwConsistencyToken = Nothing
, _slwCollection = pSlwCollection_
, _slwTimeSpan = pSlwTimeSpan_
, _slwReturnTopIfAbsent = Nothing
, _slwLeaderboardId = pSlwLeaderboardId_
, _slwLanguage = Nothing
, _slwResultsAbove = Nothing
, _slwPageToken = Nothing
, _slwMaxResults = Nothing
}
-- | The last-seen mutation timestamp.
slwConsistencyToken :: Lens' ScoresListWindow (Maybe Int64)
slwConsistencyToken
= lens _slwConsistencyToken
(\ s a -> s{_slwConsistencyToken = a})
. mapping _Coerce
-- | The collection of scores you\'re requesting.
slwCollection :: Lens' ScoresListWindow ScoresListWindowCollection
slwCollection
= lens _slwCollection
(\ s a -> s{_slwCollection = a})
-- | The time span for the scores and ranks you\'re requesting.
slwTimeSpan :: Lens' ScoresListWindow ScoresListWindowTimeSpan
slwTimeSpan
= lens _slwTimeSpan (\ s a -> s{_slwTimeSpan = a})
-- | True if the top scores should be returned when the player is not in the
-- leaderboard. Defaults to true.
slwReturnTopIfAbsent :: Lens' ScoresListWindow (Maybe Bool)
slwReturnTopIfAbsent
= lens _slwReturnTopIfAbsent
(\ s a -> s{_slwReturnTopIfAbsent = a})
-- | The ID of the leaderboard.
slwLeaderboardId :: Lens' ScoresListWindow Text
slwLeaderboardId
= lens _slwLeaderboardId
(\ s a -> s{_slwLeaderboardId = a})
-- | The preferred language to use for strings returned by this method.
slwLanguage :: Lens' ScoresListWindow (Maybe Text)
slwLanguage
= lens _slwLanguage (\ s a -> s{_slwLanguage = a})
-- | The preferred number of scores to return above the player\'s score. More
-- scores may be returned if the player is at the bottom of the
-- leaderboard; fewer may be returned if the player is at the top. Must be
-- less than or equal to maxResults.
slwResultsAbove :: Lens' ScoresListWindow (Maybe Int32)
slwResultsAbove
= lens _slwResultsAbove
(\ s a -> s{_slwResultsAbove = a})
. mapping _Coerce
-- | The token returned by the previous request.
slwPageToken :: Lens' ScoresListWindow (Maybe Text)
slwPageToken
= lens _slwPageToken (\ s a -> s{_slwPageToken = a})
-- | The maximum number of leaderboard scores to return in the response. For
-- any response, the actual number of leaderboard scores returned may be
-- less than the specified maxResults.
slwMaxResults :: Lens' ScoresListWindow (Maybe Int32)
slwMaxResults
= lens _slwMaxResults
(\ s a -> s{_slwMaxResults = a})
. mapping _Coerce
instance GoogleRequest ScoresListWindow where
type Rs ScoresListWindow = LeaderboardScores
type Scopes ScoresListWindow =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient ScoresListWindow'{..}
= go _slwLeaderboardId _slwCollection
(Just _slwTimeSpan)
_slwConsistencyToken
_slwReturnTopIfAbsent
_slwLanguage
_slwResultsAbove
_slwPageToken
_slwMaxResults
(Just AltJSON)
gamesService
where go
= buildClient
(Proxy :: Proxy ScoresListWindowResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/Scores/ListWindow.hs | mpl-2.0 | 7,004 | 0 | 21 | 1,716 | 1,010 | 582 | 428 | 145 | 1 |
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.Files(insertFiles,
associateFilesWithBuild,
associateFilesWithPackage,
groupIdToFiles)
where
import Control.Monad.IO.Class(MonadIO)
import Control.Monad.Trans.Resource(MonadResource)
import Data.Conduit((.|), Source)
import qualified Data.Conduit.List as CL
import Database.Esqueleto
import BDCS.DB
insertFiles :: MonadIO m => [Files] -> SqlPersistT m [Key Files]
insertFiles = mapM insert
associateFilesWithBuild :: MonadIO m => [Key Files] -> Key Builds -> SqlPersistT m [Key BuildFiles]
associateFilesWithBuild files build =
mapM (\(fID, bID) -> insert $ BuildFiles bID fID)
(zip files $ repeat build)
associateFilesWithPackage :: MonadIO m => [Key Files] -> Key KeyVal -> SqlPersistT m [Key FileKeyValues]
associateFilesWithPackage files package =
mapM (\(fID, pID) -> insert $ FileKeyValues fID pID)
(zip files $ repeat package)
groupIdToFiles :: MonadResource m => Key Groups -> Source (SqlPersistT m) Files
groupIdToFiles groupid = do
let source = selectSource $ from $ \(files `InnerJoin` group_files) -> do
on $ files ^. FilesId ==. group_files ^. GroupFilesFile_id
where_ $ group_files ^. GroupFilesGroup_id ==. val groupid
return files
source .| CL.map entityVal
| dashea/bdcs | importer/BDCS/Files.hs | lgpl-2.1 | 2,107 | 0 | 17 | 486 | 436 | 234 | 202 | 27 | 1 |
s=2:3:5:filter(\n->not.any(\p->mod n p==0).takeWhile(<(n-7))$s)[7..];main=print s | Crazycolorz5/Haskell-Code | PrintPrimes.hs | unlicense | 81 | 0 | 14 | 3 | 83 | 44 | 39 | 1 | 1 |
module Git.Command.VerifyTag (run) where
run :: [String] -> IO ()
run args = return () | wereHamster/yag | Git/Command/VerifyTag.hs | unlicense | 87 | 0 | 7 | 15 | 42 | 23 | 19 | 3 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.