code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
module Cauterize.Specification ( module X ) where import Cauterize.Specification.Types as X import Cauterize.Specification.Parser as X
reiddraper/cauterize
src/Cauterize/Specification.hs
bsd-3-clause
136
0
4
16
28
20
8
3
0
module Network.Orchid.Format.Plain (fPlain) where import Data.FileStore (FileStore) import Network.Orchid.Core.Format fPlain :: WikiFormat fPlain = WikiFormat "txt" "text/plain" plain plain :: FileStore -> FilePath -> FilePath -> String -> IO Output plain _ _ _ = return . TextOutput
sebastiaanvisser/orchid
src/Network/Orchid/Format/Plain.hs
bsd-3-clause
288
0
9
42
87
49
38
7
1
module Main where import Prelude hiding ((/), (-), head, (>), (.), div) import Air.Light ((-)) import Text.HTML.Moe2 import System.Nemesis.Titan test_page :: String test_page = render - html - do head - do meta ! [http_equiv "Content-Type", content "text/html; charset=utf-8"] - (/) title - str "my title" link ! [rel "icon", _type "image/png", href "panda_icon.png"] - (/) body - do div ! [_class "container"] - do str "hello world" main :: IO () main = do putStrLn test_page halt
nfjinjing/moe
src/Main.hs
bsd-3-clause
534
0
14
130
205
113
92
19
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE DataKinds, EmptyDataDecls, TypeInType, PolyKinds, TypeOperators, ExistentialQuantification, RankNTypes, DefaultSignatures, GADTs, DuplicateRecordFields, PatternSynonyms, DeriveTraversable, DeriveGeneric, DeriveDataTypeable, DeriveLift, StandaloneDeriving, DeriveAnyClass #-} module GRIN.GrinValue where import Data.Kind import GRIN.GrinTag import Data.Data import GRIN.GrinVariable import GRIN.GrinIdentifiers import GRIN.GrinSimpleValue import Control.Lens.Plated import Data.Data.Lens (uniplate) import Control.Lens import Control.Lens.Combinators import Control.Applicative newtype Pointer ty = Pointer {locationName :: GrinVariable ty} deriving (Functor, Foldable, Traversable, Typeable, Show, Eq, Data) -- | 'NodeType a' represents the type of GRIN node. A node can either have a known constant tag, a variable tag, or have an empty tag ('tagless'). Another name for a tagless inode is an unboxed node. data NodeType a where KnownTag :: {_knownTag :: GrinTag} -> NodeType a Tagless :: NodeType a VariableTag :: GrinVariable a -> NodeType a deriving ( Eq, Functor, Foldable, Traversable, Typeable, Show) deriving instance (Data a, Typeable a) => Data (NodeType a) -- Have to extend this to include things such as original high-level type. Could use the LLVM format? -- TODO: Remove EmptyValue from GrinValue and replace with pattern synonym. Likewise for PlainTag. -- | A node is the basic object that goes on the heap. It is composed of a tag, and a (possibly empty) collection of fields. -- Ideally, the codegen should be able to rearrange the fields to optimise memory access behaviour. -- A tagless node without any fields is equivalent to an 'EmptyValue'. data Node f a where Node :: Traversable f => {_nodeType :: NodeType a, _fields :: f (GrinValue f a)} -> Node f a deriving instance (Show a, Show (f (GrinValue f a))) => Show (Node f a) class WithKnownTag a where knownTag :: Prism' a GrinTag instance (Alternative f, Traversable f) => WithKnownTag (Node f a) where knownTag = prism' (\a -> Node (KnownTag a) empty) (\a -> case a of (BoxedNode t _) -> Just t _ -> Nothing) pattern BoxedNode tag fields = Node (KnownTag tag) fields pattern UnboxedNode fields = Node Tagless fields instance (Eq a, Eq (f a), Eq (f (GrinValue f a))) => Eq (Node f a) where (Node ty l) == (Node ty' l') = ty == ty' && l == l' -- Just because all these fucking Data/Typeable constraints should die in a fire type ValueConstraint f a = (Data a, Data (f (GrinValue f a)), Typeable f, Typeable a, Traversable f) deriving instance (Functor f, Functor (GrinValue f)) => Functor (Node f) deriving instance (Foldable f, Foldable (GrinValue f)) => Foldable (Node f) deriving instance (Traversable f, Traversable (GrinValue f)) => Traversable (Node f) deriving instance ValueConstraint f a => Data (Node f a) data GrinValue f ty where SimpleValue :: GrinSimpleValue ty -> GrinValue f ty EmptyValue :: GrinValue f ty PlainTag :: {t :: GrinTag } -> GrinValue f ty NodeValue :: Traversable f => Node f a -> GrinValue f a -- BasicValue :: BasicValue ty -> GrinValue f ty PointerValue :: Pointer ty -> GrinValue f ty instance (Alternative f, Traversable f) => WithKnownTag (GrinValue f ty) where knownTag = prism' PlainTag (\a -> case a of (PlainTag t) -> Just t (NodeValue n) -> n^?knownTag _ -> Nothing ) deriving instance (Show ty, Show (f (GrinValue f ty))) => Show (GrinValue f ty) deriving instance Typeable (GrinValue f ty) deriving instance ValueConstraint f ty => Data (GrinValue f ty) deriving instance ValueConstraint f a => Plated (GrinValue f a) pattern Variable a name = SimpleValue (VarValue (Var a name )) deriving instance (Eq ty, Eq (f ty), Eq (f (GrinValue f ty))) => Eq (GrinValue f ty) deriving instance Functor (GrinValue f) deriving instance Foldable (GrinValue f) deriving instance Traversable (GrinValue f) instance Applicative f => Applicative (GrinValue f) where pure a = Variable a Nothing Variable f _ <*> a = f <$> a instance Applicative f => Monad (GrinValue f) where (Variable a _) >>= f = f a
spacekitteh/libgrin
src/GRIN/GrinValue.hs
bsd-3-clause
4,608
0
13
976
1,286
670
616
-1
-1
import System.Environment (getArgs) pic :: [Double] -> String pic xs | x*x + y*y <= r*r = "true" | otherwise = "false" where x = head xs - xs!!3 y = last xs - xs!!1 r = xs!!2 main :: IO () main = do [inpFile] <- getArgs input <- readFile inpFile putStr . unlines . map (pic . map read . words) $ lines [x | x <- input, elem x "0123456789 -.\n"]
nikai3d/ce-challenges
moderate/point_in_circle.hs
bsd-3-clause
408
0
13
137
199
98
101
12
1
import qualified Data.Set as S import Data.List (sort) import Common.List (nub') {-- analysis: - It is equvilant to consider Q_n = n*(3n-1), denote the answer as Q_c/2, - then there exists a and b, with Q_b-Q_a=Q_c, by factorization, we get (b-a)(3b+3a-1)=Q_c, - then we can enumerate the divisor b-a, solve the equations to get a & b, and verify Q_a+Q_b by binary search. --} isPentagonal :: Integer -> Bool isPentagonal x = helper 1 (ceiling $ sqrt (fromIntegral x)) where helper l r | l > r = False | l == r = l * (3 * l - 1) == x | l < r = case (compare x midValue) of EQ -> True LT -> helper l (mid - 1) GT -> helper (mid + 1) r where mid = (l + r) `div` 2 midValue = mid * (3 * mid - 1) pFactor :: Integer -> [Integer] pFactor x = nub' f where factor x = [ d | d <- [1 .. x], x `mod` d == 0 ] f1 = factor x f2 = factor (3*x-1) f = [ a*b | a <- f1, b <- f2 ] checkDiff index = any (\(a,b) -> isPentagonal (a*(3*a-1)+b*(3*b-1))) candidate where x = index * (3 * index - 1) f = pFactor index candidate = [ ((plus - minus) `div` 2, (plus + minus) `div` 2) | minus <- f, (x `div` minus + 1) `mod` 3 == 0, let plus = (x `div` minus + 1) `div` 3, plus > minus, even (plus - minus) ] main = print $ n*(3*n-1) `div` 2 where n = head $ dropWhile (not . checkDiff) [1 .. ]
foreverbell/project-euler-solutions
src/44.hs
bsd-3-clause
1,436
0
15
449
628
341
287
26
3
module Problem132 where import Prime main :: IO () main = print . sum . take 40 . filter (dividesRepunit (10 ^ 9)) $ getPrimesUpto 1000000 where dividesRepunit n p | gcd 10 p /= 1 = False | otherwise = n `mod` a p == 0 a :: Int -> Int a = go 1 go v n = case (10 * v + 1) `rem` n of 0 -> 2 r -> 1 + go r n
adityagupta1089/Project-Euler-Haskell
src/problems/Problem132.hs
bsd-3-clause
361
0
11
135
178
90
88
12
2
import Numeric.LinearAlgebra import HVX -- declare a symbolic variable x = EVar "x" -- give x a value (a 4-element column vector) vars = [("x", (4><1) [0,-3,1,2])] -- define the expression to sudifferentiate: max(abs(x)) myexpr = hmax(habs(x)) -- compute the subgradient mysubgrad = jacobianWrtVar myexpr vars "x" main :: IO () main = print mysubgrad
chrisnc/hvx
examples/subgrad.hs
bsd-3-clause
353
0
9
57
106
60
46
8
1
{-# LANGUAGE CPP, DeriveDataTypeable #-} module Main where import Distribution.Client.DistDirLayout import Distribution.Client.ProjectConfig import Distribution.Client.Config (defaultCabalDir) import Distribution.Client.ProjectPlanning import Distribution.Client.ProjectPlanning.Types import Distribution.Client.ProjectBuilding import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.Types (GenericReadyPackage(..), installedPackageId) import Distribution.Package hiding (installedPackageId) import Distribution.PackageDescription import Distribution.InstalledPackageInfo (InstalledPackageInfo) import Distribution.Simple.Setup (toFlag) import Distribution.Version import Distribution.Verbosity import Distribution.Text #if !MIN_VERSION_base(4,8,0) import Data.Monoid #endif import qualified Data.Map as Map import Control.Monad import Control.Exception import System.FilePath import System.Directory import Test.Tasty import Test.Tasty.HUnit import Test.Tasty.Options import Data.Tagged (Tagged(..)) import Data.Proxy (Proxy(..)) import Data.Typeable (Typeable) main :: IO () main = defaultMainWithIngredients (defaultIngredients ++ [includingOptions projectConfigOptionDescriptions]) (withProjectConfig $ \config -> testGroup "Integration tests (internal)" (tests config)) tests :: ProjectConfig -> [TestTree] tests config = --TODO: tests for: -- * normal success -- * dry-run tests with changes [ testGroup "Exceptions during discovey and planning" $ [ testCase "no package" (testExceptionInFindingPackage config) , testCase "no package2" (testExceptionInFindingPackage2 config) ] , testGroup "Exceptions during building (local inplace)" $ [ testCase "configure" (testExceptionInConfigureStep config) , testCase "build" (testExceptionInBuildStep config) -- , testCase "register" testExceptionInRegisterStep ] --TODO: need to repeat for packages for the store , testGroup "Successful builds" $ [ testCaseSteps "Setup script styles" (testSetupScriptStyles config) ] , testGroup "Regression tests" $ [ testCase "issue #3324" (testRegressionIssue3324 config) ] ] testExceptionInFindingPackage :: ProjectConfig -> Assertion testExceptionInFindingPackage config = do BadPackageLocations locs <- expectException "BadPackageLocations" $ void $ planProject testdir config case locs of [BadLocGlobEmptyMatch "./*.cabal"] -> return () _ -> assertFailure "expected BadLocGlobEmptyMatch" cleanProject testdir where testdir = "exception/no-pkg" testExceptionInFindingPackage2 :: ProjectConfig -> Assertion testExceptionInFindingPackage2 config = do BadPackageLocations locs <- expectException "BadPackageLocations" $ void $ planProject testdir config case locs of [BadLocGlobBadMatches "./" [BadLocDirNoCabalFile "."]] -> return () _ -> assertFailure $ "expected BadLocGlobBadMatches, got " ++ show locs cleanProject testdir where testdir = "exception/no-pkg2" testExceptionInConfigureStep :: ProjectConfig -> Assertion testExceptionInConfigureStep config = do plan <- planProject testdir config plan' <- executePlan plan (_pkga1, failure) <- expectPackageFailed plan' pkgidA1 case failure of ConfigureFailed _str -> return () _ -> assertFailure $ "expected ConfigureFailed, got " ++ show failure cleanProject testdir where testdir = "exception/configure" pkgidA1 = PackageIdentifier (PackageName "a") (Version [1] []) testExceptionInBuildStep :: ProjectConfig -> Assertion testExceptionInBuildStep config = do plan <- planProject testdir config plan' <- executePlan plan (_pkga1, failure) <- expectPackageFailed plan' pkgidA1 expectBuildFailed failure where testdir = "exception/build" pkgidA1 = PackageIdentifier (PackageName "a") (Version [1] []) testSetupScriptStyles :: ProjectConfig -> (String -> IO ()) -> Assertion testSetupScriptStyles config reportSubCase = do reportSubCase (show SetupCustomExplicitDeps) plan1 <- executePlan =<< planProject testdir1 config (pkg1, _, _) <- expectPackageInstalled plan1 pkgidA pkgSetupScriptStyle pkg1 @?= SetupCustomExplicitDeps hasDefaultSetupDeps pkg1 @?= Just False marker1 <- readFile (basedir </> testdir1 </> "marker") marker1 @?= "ok" removeFile (basedir </> testdir1 </> "marker") reportSubCase (show SetupCustomImplicitDeps) plan2 <- executePlan =<< planProject testdir2 config (pkg2, _, _) <- expectPackageInstalled plan2 pkgidA pkgSetupScriptStyle pkg2 @?= SetupCustomImplicitDeps hasDefaultSetupDeps pkg2 @?= Just True marker2 <- readFile (basedir </> testdir2 </> "marker") marker2 @?= "ok" removeFile (basedir </> testdir2 </> "marker") reportSubCase (show SetupNonCustomInternalLib) plan3 <- executePlan =<< planProject testdir3 config (pkg3, _, _) <- expectPackageInstalled plan3 pkgidA pkgSetupScriptStyle pkg3 @?= SetupNonCustomInternalLib {- --TODO: the SetupNonCustomExternalLib case is hard to test since it -- requires a version of Cabal that's later than the one we're testing -- e.g. needs a .cabal file that specifies cabal-version: >= 2.0 -- and a corresponding Cabal package that we can use to try and build a -- default Setup.hs. reportSubCase (show SetupNonCustomExternalLib) plan4 <- executePlan =<< planProject testdir4 config (pkg4, _, _) <- expectPackageInstalled plan4 pkgidA pkgSetupScriptStyle pkg4 @?= SetupNonCustomExternalLib -} where testdir1 = "build/setup-custom1" testdir2 = "build/setup-custom2" testdir3 = "build/setup-simple" pkgidA = PackageIdentifier (PackageName "a") (Version [0,1] []) -- The solver fills in default setup deps explicitly, but marks them as such hasDefaultSetupDeps = fmap defaultSetupDepends . setupBuildInfo . pkgDescription -- | See <https://github.com/haskell/cabal/issues/3324> -- testRegressionIssue3324 :: ProjectConfig -> Assertion testRegressionIssue3324 config = do -- expected failure first time due to missing dep plan1 <- executePlan =<< planProject testdir config (_pkgq, failure) <- expectPackageFailed plan1 pkgidQ expectBuildFailed failure -- add the missing dep, now it should work let qcabal = basedir </> testdir </> "q" </> "q.cabal" withFileFinallyRestore qcabal $ do appendFile qcabal (" build-depends: p\n") plan2 <- executePlan =<< planProject testdir config _ <- expectPackageInstalled plan2 pkgidP _ <- expectPackageInstalled plan2 pkgidQ return () where testdir = "regression/3324" pkgidP = PackageIdentifier (PackageName "p") (Version [0,1] []) pkgidQ = PackageIdentifier (PackageName "q") (Version [0,1] []) --------------------------------- -- Test utils to plan and build -- basedir :: FilePath basedir = "tests" </> "IntegrationTests2" planProject :: FilePath -> ProjectConfig -> IO PlanDetails planProject testdir cliConfig = do cabalDir <- defaultCabalDir let cabalDirLayout = defaultCabalDirLayout cabalDir projectRootDir <- canonicalizePath ("tests" </> "IntegrationTests2" </> testdir) let distDirLayout = defaultDistDirLayout projectRootDir -- Clear state between test runs. The state remains if the previous run -- ended in an exception (as we leave the files to help with debugging). cleanProject testdir (elaboratedPlan, elaboratedShared, projectConfig) <- rebuildInstallPlan verbosity projectRootDir distDirLayout cabalDirLayout cliConfig let targets = Map.fromList [ (installedPackageId pkg, [BuildDefaultComponents]) | InstallPlan.Configured pkg <- InstallPlan.toList elaboratedPlan , pkgBuildStyle pkg == BuildInplaceOnly ] elaboratedPlan' = pruneInstallPlanToTargets targets elaboratedPlan (elaboratedPlan'', pkgsBuildStatus) <- rebuildTargetsDryRun distDirLayout elaboratedPlan' let buildSettings = resolveBuildTimeSettings verbosity cabalDirLayout (projectConfigShared projectConfig) (projectConfigBuildOnly projectConfig) (projectConfigBuildOnly cliConfig) return (distDirLayout, elaboratedPlan'', elaboratedShared, pkgsBuildStatus, buildSettings) type PlanDetails = (DistDirLayout, ElaboratedInstallPlan, ElaboratedSharedConfig, BuildStatusMap, BuildTimeSettings) executePlan :: PlanDetails -> IO ElaboratedInstallPlan executePlan (distDirLayout, elaboratedPlan, elaboratedShared, pkgsBuildStatus, buildSettings) = rebuildTargets verbosity distDirLayout elaboratedPlan elaboratedShared pkgsBuildStatus -- Avoid trying to use act-as-setup mode: buildSettings { buildSettingNumJobs = 1 } cleanProject :: FilePath -> IO () cleanProject testdir = do alreadyExists <- doesDirectoryExist distDir when alreadyExists $ removeDirectoryRecursive distDir where projectRootDir = "tests" </> "IntegrationTests2" </> testdir distDirLayout = defaultDistDirLayout projectRootDir distDir = distDirectory distDirLayout verbosity :: Verbosity verbosity = minBound --normal --verbose --maxBound --minBound ------------------------------------------- -- Tasty integration to adjust the config -- withProjectConfig :: (ProjectConfig -> TestTree) -> TestTree withProjectConfig testtree = askOption $ \ghcPath -> testtree (mkProjectConfig ghcPath) mkProjectConfig :: GhcPath -> ProjectConfig mkProjectConfig (GhcPath ghcPath) = mempty { projectConfigShared = mempty { projectConfigHcPath = maybeToFlag ghcPath }, projectConfigBuildOnly = mempty { projectConfigNumJobs = toFlag (Just 1) } } where maybeToFlag = maybe mempty toFlag data GhcPath = GhcPath (Maybe FilePath) deriving Typeable instance IsOption GhcPath where defaultValue = GhcPath Nothing optionName = Tagged "with-ghc" optionHelp = Tagged "The ghc compiler to use" parseValue = Just . GhcPath . Just projectConfigOptionDescriptions :: [OptionDescription] projectConfigOptionDescriptions = [Option (Proxy :: Proxy GhcPath)] --------------------------------------- -- HUint style utils for this context -- expectException :: Exception e => String -> IO a -> IO e expectException expected action = do res <- try action case res of Left e -> return e Right _ -> throwIO $ HUnitFailure $ "expected an exception " ++ expected expectPackagePreExisting :: ElaboratedInstallPlan -> PackageId -> IO InstalledPackageInfo expectPackagePreExisting plan pkgid = do planpkg <- expectPlanPackage plan pkgid case planpkg of InstallPlan.PreExisting pkg -> return pkg _ -> unexpectedPackageState "PreExisting" planpkg expectPackageConfigured :: ElaboratedInstallPlan -> PackageId -> IO ElaboratedConfiguredPackage expectPackageConfigured plan pkgid = do planpkg <- expectPlanPackage plan pkgid case planpkg of InstallPlan.Configured pkg -> return pkg _ -> unexpectedPackageState "Configured" planpkg expectPackageInstalled :: ElaboratedInstallPlan -> PackageId -> IO (ElaboratedConfiguredPackage, Maybe InstalledPackageInfo, BuildSuccess) expectPackageInstalled plan pkgid = do planpkg <- expectPlanPackage plan pkgid case planpkg of InstallPlan.Installed (ReadyPackage pkg) mipkg result -> return (pkg, mipkg, result) _ -> unexpectedPackageState "Installed" planpkg expectPackageFailed :: ElaboratedInstallPlan -> PackageId -> IO (ElaboratedConfiguredPackage, BuildFailure) expectPackageFailed plan pkgid = do planpkg <- expectPlanPackage plan pkgid case planpkg of InstallPlan.Failed pkg failure -> return (pkg, failure) _ -> unexpectedPackageState "Failed" planpkg unexpectedPackageState :: String -> ElaboratedPlanPackage -> IO a unexpectedPackageState expected planpkg = throwIO $ HUnitFailure $ "expected to find " ++ display (packageId planpkg) ++ " in the " ++ expected ++ " state, but it is actually in the " ++ actual ++ "state." where actual = case planpkg of InstallPlan.PreExisting{} -> "PreExisting" InstallPlan.Configured{} -> "Configured" InstallPlan.Processing{} -> "Processing" InstallPlan.Installed{} -> "Installed" InstallPlan.Failed{} -> "Failed" expectPlanPackage :: ElaboratedInstallPlan -> PackageId -> IO ElaboratedPlanPackage expectPlanPackage plan pkgid = case [ pkg | pkg <- InstallPlan.toList plan , packageId pkg == pkgid ] of [pkg] -> return pkg [] -> throwIO $ HUnitFailure $ "expected to find " ++ display pkgid ++ " in the install plan but it's not there" _ -> throwIO $ HUnitFailure $ "expected to find only one instance of " ++ display pkgid ++ " in the install plan but there's several" expectBuildFailed :: BuildFailure -> IO () expectBuildFailed (BuildFailed _str) = return () expectBuildFailed failure = assertFailure $ "expected BuildFailed, got " ++ show failure --------------------------------------- -- Other utils -- -- | Allow altering a file during a test, but then restore it afterwards -- withFileFinallyRestore :: FilePath -> IO a -> IO a withFileFinallyRestore file action = do copyFile file backup action `finally` renameFile backup file where backup = file <.> "backup"
headprogrammingczar/cabal
cabal-install/tests/IntegrationTests2.hs
bsd-3-clause
14,311
0
15
3,342
2,913
1,474
1,439
282
5
module KParse where import KTypes import qualified Data.Binary.Get as Get import qualified Data.ByteString as BS import qualified Data.Int as I import qualified Data.Word as W import qualified Data.Vector as V import qualified Data.Vector.Unboxed as U import Data.Binary.IEEE754 (floatToWord, wordToFloat, doubleToWord, wordToDouble) import Control.Monad (replicateM) import Debug.Trace (trace) parseObject :: Bool -> Get.Get KObject parseObject le = do typeCode <- Get.getWord8 parseWithType (fromIntegral typeCode) le parseWithType :: I.Int8 -> Bool -> Get.Get KObject parseWithType tc le | tc >= -19 && tc < 0 = fmap Atom $ parseAtom tc le | tc < 19 && tc >= 0 = fmap Vector $ parseVector tc le | tc >= 20 && tc <= 76 = return $ Error "Enumerated types NYI" | tc >= 77 && tc <= 97 = return $ Error "Nested types NYI" | tc == 98 = parseTable le | tc == 99 = parseDictOrKeyed le | otherwise = return $ Error "Unknown type, or NYI" parseAtom :: I.Int8 -> Bool -> Get.Get KAtom parseAtom tc le = case fromIntegral tc of -19 -> fmap (Time . fromIntegral) getWord32 -18 -> fmap (Second . fromIntegral) getWord32 -17 -> fmap (Minute . fromIntegral) getWord32 -16 -> fmap (Timespan . fromIntegral) getWord64 -15 -> fmap DateTime parseDbl -14 -> fmap (Date . fromIntegral) getWord32 -13 -> fmap (Month . fromIntegral) getWord32 -12 -> fmap (Timestamp . fromIntegral) getWord64 -11 -> fmap Symbol parseSym -10 -> fmap (Char . fromIntegral) Get.getWord8 -9 -> fmap Float parseDbl -8 -> fmap Real parseFlt -7 -> fmap (Long . fromIntegral) getWord64 -6 -> fmap (Int . fromIntegral) getWord32 -5 -> fmap (Short . fromIntegral) getWord16 -4 -> fmap (Byte . fromIntegral) Get.getWord8 -2 -> fmap Guid (Get.getByteString 128) -1 -> fmap (Boolean . not . (==0)) Get.getWord8 _ -> undefined where getWord16 = if le then Get.getWord16le else Get.getWord16be getWord32 = if le then Get.getWord32le else Get.getWord32be getWord64 = if le then Get.getWord64le else Get.getWord64be parseDbl = wordToDouble `fmap` getWord64 parseFlt = wordToFloat `fmap` getWord32 parseVector :: I.Int8 -> Bool -> Get.Get KVector parseVector tc le = do attr <- Get.getWord8 len <- getWord32 parseVectorLen tc le len where getWord32 = if le then Get.getWord32le else Get.getWord32be parseVectorLen :: I.Int8 -> Bool -> W.Word32 -> Get.Get KVector parseVectorLen tc le len = case tc of 19 -> fmap TimeV (ui getWord32) 18 -> fmap SecondV (ui getWord32) 17 -> fmap MinuteV (ui getWord32) 16 -> fmap TimespanV (ui getWord64) 15 -> fmap DateTimeV (u parseDbl) 14 -> fmap DateV (ui getWord32) 13 -> fmap MonthV (ui getWord32) 12 -> fmap TimestampV (ui getWord64) 11 -> fmap SymbolV (v parseSym) 10 -> fmap CharV (ui Get.getWord8) 9 -> fmap FloatV (u parseDbl) 8 -> fmap RealV (u parseFlt) 7 -> fmap LongV (ui getWord64) 6 -> fmap IntV (ui getWord32) 5 -> fmap ShortV (ui getWord16) 4 -> fmap ByteV (ui Get.getWord8) 2 -> fmap GuidV (v $ Get.getByteString 128) 1 -> fmap BooleanV (u (fmap (not . (==0)) Get.getWord8)) 0 -> fmap ListV (v $ parseObject le) _ -> undefined where getWord16 = if le then Get.getWord16le else Get.getWord16be getWord32 = if le then Get.getWord32le else Get.getWord32be getWord64 = if le then Get.getWord64le else Get.getWord64be parseDbl = return . wordToDouble =<< getWord64 parseFlt = return . wordToFloat =<< getWord32 v :: Get.Get a -> Get.Get (V.Vector a) v = V.replicateM (fromIntegral len) u :: U.Unbox a => Get.Get a -> Get.Get (U.Vector a) u = U.replicateM (fromIntegral len) ui :: (Integral b, Num a, U.Unbox a, U.Unbox b) => Get.Get b -> Get.Get (U.Vector a) ui = fmap (U.map fromIntegral) . u parseDictOrKeyed :: Bool -> Get.Get KObject parseDictOrKeyed le = do key <- parseObject le val <- parseObject le return $ case (key, val) of (Vector k, Vector v) -> Dictionary (k, v) (Table k, Table v) -> KeyTable (k, v) parseTable :: Bool -> Get.Get KObject parseTable le = do attr <- Get.getWord8 dict <- parseObject le return $ case dict of Dictionary (k, ListV v) -> Table (k, v) _ -> Error "Unable to parse table" -- TODO: find more efficient construction parseSym :: Get.Get BS.ByteString parseSym = go >>= return . BS.pack where go = Get.getWord8 >>= \b -> if b == 0 then return [] else go >>= return . (b:)
carrutstick/hasq
src/Hasq/KParse.hs
bsd-3-clause
4,818
0
15
1,331
1,815
919
896
108
23
{-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} module Servant.Ekg where import Control.Concurrent.MVar import Control.Exception import Control.Monad import qualified Data.HashMap.Strict as H import Data.Monoid import Data.Proxy import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time.Clock import GHC.TypeLits import Network.HTTP.Types (Method, Status (..)) import Network.Wai import Servant.API import System.Metrics import qualified System.Metrics.Counter as Counter import qualified System.Metrics.Distribution as Distribution import qualified System.Metrics.Gauge as Gauge gaugeInflight :: Gauge.Gauge -> Middleware gaugeInflight inflight application request respond = bracket_ (Gauge.inc inflight) (Gauge.dec inflight) (application request respond) -- | Count responses with 2XX, 4XX, 5XX, and XXX response codes. countResponseCodes :: (Counter.Counter, Counter.Counter, Counter.Counter, Counter.Counter) -> Middleware countResponseCodes (c2XX, c4XX, c5XX, cXXX) application request respond = application request respond' where respond' res = count (responseStatus res) >> respond res count Status{statusCode = sc } | 200 <= sc && sc < 300 = Counter.inc c2XX | 400 <= sc && sc < 500 = Counter.inc c4XX | 500 <= sc && sc < 600 = Counter.inc c5XX | otherwise = Counter.inc cXXX responseTimeDistribution :: Distribution.Distribution -> Middleware responseTimeDistribution dist application request respond = bracket getCurrentTime stop $ const $ application request respond where stop t1 = do t2 <- getCurrentTime let dt = diffUTCTime t2 t1 Distribution.add dist $ fromRational $ (*1000) $ toRational dt data Meters = Meters { metersInflight :: Gauge.Gauge , metersC2XX :: Counter.Counter , metersC4XX :: Counter.Counter , metersC5XX :: Counter.Counter , metersCXXX :: Counter.Counter , metersTime :: Distribution.Distribution } monitorEndpoints :: HasEndpoint api => Proxy api -> Store -> MVar (H.HashMap Text Meters) -> Middleware monitorEndpoints proxy store meters application request respond = do let path = case getEndpoint proxy request of Nothing -> "unknown" Just (ps,method) -> T.intercalate "." $ ps <> [T.decodeUtf8 method] Meters{..} <- modifyMVar meters $ \ms -> case H.lookup path ms of Nothing -> do let prefix = "servant.path." <> path <> "." metersInflight <- createGauge (prefix <> "in_flight") store metersC2XX <- createCounter (prefix <> "responses.2XX") store metersC4XX <- createCounter (prefix <> "responses.4XX") store metersC5XX <- createCounter (prefix <> "responses.5XX") store metersCXXX <- createCounter (prefix <> "responses.XXX") store metersTime <- createDistribution (prefix <> "time_ms") store let m = Meters{..} return (H.insert path m ms, m) Just m -> return (ms,m) let application' = responseTimeDistribution metersTime . countResponseCodes (metersC2XX, metersC4XX, metersC5XX, metersCXXX) . gaugeInflight metersInflight $ application application' request respond class HasEndpoint a where getEndpoint :: Proxy a -> Request -> Maybe ([Text], Method) instance (HasEndpoint (a :: *), HasEndpoint (b :: *)) => HasEndpoint (a :<|> b) where getEndpoint _ req = getEndpoint (Proxy :: Proxy a) req `mplus` getEndpoint (Proxy :: Proxy b) req instance (KnownSymbol (path :: Symbol), HasEndpoint (sub :: *)) => HasEndpoint (path :> sub) where getEndpoint _ req = case pathInfo req of p:ps | p == T.pack (symbolVal (Proxy :: Proxy path)) -> do (end, method) <- getEndpoint (Proxy :: Proxy sub) req{ pathInfo = ps } return (p:end, method) _ -> Nothing instance (KnownSymbol (capture :: Symbol), HasEndpoint (sub :: *)) => HasEndpoint (Capture capture a :> sub) where getEndpoint _ req = case pathInfo req of _:ps -> do (end, method) <- getEndpoint (Proxy :: Proxy sub) req{ pathInfo = ps } let p = T.pack $ (':':) $ symbolVal (Proxy :: Proxy capture) return (p:end, method) _ -> Nothing instance HasEndpoint (sub :: *) => HasEndpoint (Header h a :> sub) where getEndpoint _ = getEndpoint (Proxy :: Proxy sub) instance HasEndpoint (sub :: *) => HasEndpoint (QueryParam (h :: Symbol) a :> sub) where getEndpoint _ = getEndpoint (Proxy :: Proxy sub) instance HasEndpoint (sub :: *) => HasEndpoint (QueryParams (h :: Symbol) a :> sub) where getEndpoint _ = getEndpoint (Proxy :: Proxy sub) instance HasEndpoint (sub :: *) => HasEndpoint (ReqBody a :> sub) where getEndpoint _ = getEndpoint (Proxy :: Proxy sub) instance HasEndpoint (Get a) where getEndpoint _ req = case pathInfo req of [] | requestMethod req == "GET" -> Just ([],"GET") _ -> Nothing instance HasEndpoint (Put a) where getEndpoint _ req = case pathInfo req of [] | requestMethod req == "PUT" -> Just ([],"PUT") _ -> Nothing instance HasEndpoint (Post a) where getEndpoint _ req = case pathInfo req of [] | requestMethod req == "POST" -> Just ([],"POST") _ -> Nothing instance HasEndpoint (Delete) where getEndpoint _ req = case pathInfo req of [] | requestMethod req == "DELETE" -> Just ([],"DELETE") _ -> Nothing instance HasEndpoint (Raw) where getEndpoint _ _ = Just ([],"RAW")
anchor/servant-ekg
lib/Servant/Ekg.hs
bsd-3-clause
6,177
0
19
1,700
1,920
996
924
132
3
{-# LANGUAGE RankNTypes, TypeFamilies, FlexibleInstances, MultiParamTypeClasses, UndecidableInstances, BangPatterns #-} -- | Duck utility functions module Util ( -- * IO exit , die , debug , debugVal -- * Data -- ** List , unique , groupPairs , spanJust , zipCheck , zipWithCheck -- ** Stack , Stack(..) , (++.) , splitStack -- * Functionals , (...) , first, second , left, right -- * Monad , nop , (>.), (>.=), (>=.) , (<<), (.<), (=.<), (.=<) , foldM1 -- ** Error and Exception , tryError , MonadInterrupt, handleE -- ** Strict Identity , Sequence, runSequence ) where import Control.Arrow import Control.Exception import Control.Monad import Control.Monad.Error import Control.Monad.Trans.Reader as Reader import Control.Monad.Trans.State as State import Data.Function import Data.List import Debug.Trace import System.Exit import System.IO debug :: Show a => a -> b -> b debug = traceShow debugVal :: Show a => a -> a debugVal a = debug a a unique :: Eq a => [a] -> Maybe a unique (x:l) | all (x ==) l = Just x unique _ = Nothing -- |'group' on keyed pairs. groupPairs :: (Ord a, Eq a) => [(a,b)] -> [(a,[b])] groupPairs = map ((head *** id) . unzip) . groupBy ((==) `on` fst) . sortBy (compare `on` fst) -- |Return the longest prefix that is Just, and the rest. spanJust :: (a -> Maybe b) -> [a] -> ([b],[a]) spanJust _ [] = ([],[]) spanJust f l@(x:r) = maybe ([],l) (\y -> first (y:) $ spanJust f r) $ f x -- |Same as 'zip', but bails if the lists aren't equal size zipCheck :: MonadPlus m => [a] -> [b] -> m [(a,b)] zipCheck [] [] = return [] zipCheck (x:xl) (y:yl) = ((x,y) :) =.< zipCheck xl yl zipCheck _ _ = mzero zipWithCheck :: MonadPlus m => (a -> b -> c) -> [a] -> [b] -> m [c] zipWithCheck f x y = map (uncurry f) =.< zipCheck x y (...) :: (c -> d) -> (a -> b -> c) -> a -> b -> d (...) f g x y = f (g x y) exit :: Int -> IO a exit 0 = exitSuccess exit i = exitWith (ExitFailure i) -- |Print a string on stderr and exit with the given value die :: MonadIO m => Int -> String -> m a die i s = liftIO $ do hPutStrLn stderr s exit i -- |Stacks are lists with an extra bit of information at the bottom -- This is useful to represent stacks with different layers of types data Stack a b = Base b | a :. Stack a b -- |append a list and a stack (++.) :: [a] -> Stack a b -> Stack a b (++.) [] r = r (++.) (h:t) r = h :. (t ++. r) instance (Show a, Show b) => Show (Stack a b) where show s = '[' : intercalate "," (map show a) ++ " . " ++ show b ++ "]" where (a,b) = splitStack s splitStack :: Stack a b -> ([a],b) splitStack (Base b) = ([],b) splitStack (a :. s) = (a:l,b) where (l,b) = splitStack s -- Some convenient extra monad operators infixl 1 >., >.=, >=. infixr 1 <<, .<, =.<, .=< (>.) :: Monad m => m a -> b -> m b (.<) :: Monad m => b -> m a -> m b (<<) :: Monad m => m b -> m a -> m b (>.=) :: Monad m => m a -> (a -> b) -> m b (=.<) :: Monad m => (a -> b) -> m a -> m b (>=.) :: Monad m => (a -> m b) -> (b -> c) -> a -> m c (.=<) :: Monad m => (b -> c) -> (a -> m b) -> a -> m c (>.) e r = e >> return r (.<) r e = e >> return r -- <$ (<<) r e = e >> r (>.=) = flip liftM (=.<) = liftM -- <$> (>=.) e r = e >=> return . r (.=<) r e = e >=> return . r nop :: Monad m => m () nop = return () foldM1 :: Monad m => (a -> a -> m a) -> [a] -> m a foldM1 f (h:t) = foldM f h t foldM1 _ [] = error "foldM1 applied to an empty list" tryError :: (MonadError e m, Error e) => m a -> m (Either e a) tryError f = catchError (Right =.< f) (return . Left) -- A monad for asynchronously interruptible computations -- I.e., the equivalent of handle for general monads class Monad m => MonadInterrupt m where catchE :: Exception e => m a -> (e -> m a) -> m a handleE :: (MonadInterrupt m, Exception e) => (e -> m a) -> m a -> m a handleE = flip catchE instance MonadInterrupt IO where catchE = Control.Exception.catch instance MonadInterrupt m => MonadInterrupt (Reader.ReaderT r m) where catchE = Reader.liftCatch catchE instance MonadInterrupt m => MonadInterrupt (State.StateT s m) where catchE = State.liftCatch catchE instance (MonadInterrupt m, Error e) => MonadInterrupt (ErrorT e m) where catchE = flip $ mapErrorT . handleE . (runErrorT .) -- |Strict identity monad, similar (but not identical) to Control.Parallel.Strategies.Eval newtype Sequence a = Sequence { runSequence :: a } instance Functor Sequence where fmap f = Sequence . f . runSequence instance Monad Sequence where return = Sequence m >>= k = k $! runSequence m m >> k = seq (runSequence m) k
girving/duck
duck/Util.hs
bsd-3-clause
4,608
0
12
1,075
2,054
1,117
937
-1
-1
-- | 0 から 9 までの 10 種類の数字を高々一度だけ使って 4 桁の数字を 2 つ用意する -- それら 2 つの数の差の最小を求めよ -- module Num8 where import Data.List (foldl') import Control.Arrow ((***)) import Control.Monad.State (StateT, evalStateT, get, put, forM_, when, liftIO) select :: [Int] -> [(Int, [Int])] select [x] = [(x, [])] select (x:xs) = (x,xs) : [ (y, x:ys) | (y, ys) <- select xs ] perms :: Int -> [Int] -> [([Int], [Int])] perms 0 xs = [([], xs)] perms n xs = [ (y:zs, ws) | (y, ys) <- select xs , (zs, ws) <- perms (n-1) ys ] gen :: [Int] -> Int -> [([Int], [Int])] gen seeds n = [ (xs, zs) | (xs, ys) <- perms n seeds , (zs, ws) <- perms n ys ] gen' :: [Int] -> Int -> [(Int, Int)] gen' seeds = filter (uncurry (<)) . map (toInt***toInt ) . gen seeds where toInt = foldl' (\b a -> b*10+a) 0 num :: [Int] -> Int -> StateT (Int, Int, Int) IO () num seeds n = do forM_ (gen' seeds n) $ \(xs, ys) -> do let v' = (xs, ys, ys-xs) v <- get when (thd3 v' < thd3 v) $ do { put v' ; liftIO $ putStrLn $ "=> " ++ show v' } thd3 :: (a, b, c) -> c thd3 (_, _, x) = x quiz :: [Int] -> Int -> IO Int quiz seeds n | n <= 5 = do { num seeds n ; (_, _, v) <- get ; return v } `evalStateT` (def, 0, def) | otherwise = fail "digits must be less than or equal 5" where def = 10^n-1 main :: IO Int main = quiz [0..9] 4
cutsea110/aop
src/Num8.hs
bsd-3-clause
1,637
0
16
568
785
436
349
39
1
{-| This module provides efficient and streaming left folds that you can combine using 'Applicative' style. Import this module qualified to avoid clashing with the Prelude: >>> import qualified Control.Foldl as L Use 'fold' to apply a 'Fold' to a list: >>> L.fold L.sum [1..100] 5050 'Fold's are 'Applicative's, so you can combine them using 'Applicative' combinators: >>> import Control.Applicative >>> let average = (/) <$> L.sum <*> L.genericLength Taking the sum, the sum of squares, ..., upto the sum of x^5 >>> import Data.Traversable >>> let powerSums = sequenceA [premap (^n) L.sum | n <- [1..5]] >>> L.fold powerSums [1..10] [55,385,3025,25333,220825] These combined folds will still traverse the list only once, streaming efficiently over the list in constant space without space leaks: >>> L.fold average [1..10000000] 5000000.5 >>> L.fold ((,) <$> L.minimum <*> L.maximum) [1..10000000] (Just 1,Just 10000000) -} {-# LANGUAGE ExistentialQuantification, RankNTypes, Trustworthy #-} module Control.Foldl ( -- * Fold Types Fold(..) , FoldM(..) -- * Folding , fold , foldM , scan -- * Folds , Control.Foldl.mconcat , Control.Foldl.foldMap , head , last , lastDef , null , length , and , or , all , any , sum , product , maximum , minimum , elem , notElem , find , index , elemIndex , findIndex , random -- * Generic Folds , genericLength , genericIndex -- * Container folds , list , revList , nub , eqNub , set , vector -- * Utilities -- $utilities , purely , impurely , generalize , simplify , _Fold1 , premap , premapM , Handler , handles , EndoM(..) , HandlerM , handlesM -- * Re-exports -- $reexports , module Control.Monad.Primitive , module Data.Foldable , module Data.Vector.Generic ) where import Control.Applicative (Applicative(pure, (<*>)),liftA2) import Control.Foldl.Internal (Maybe'(..), lazy, Either'(..), hush) import Control.Monad ((>=>)) import Control.Monad.Primitive (PrimMonad) import Data.Foldable (Foldable) import qualified Data.Foldable as F import Data.Functor.Constant (Constant(Constant, getConstant)) import Data.Functor.Identity (Identity, runIdentity) import Data.Profunctor import Data.Monoid (Monoid(mempty, mappend), Endo(Endo, appEndo)) import Data.Vector.Generic (Vector) import qualified Data.Vector.Generic as V import qualified Data.Vector.Generic.Mutable as M import qualified Data.List as List import qualified Data.Set as Set import System.Random.MWC (createSystemRandom, uniformR) import Prelude hiding ( head , last , null , length , and , or , all , any , sum , product , maximum , minimum , elem , notElem ) {-| Efficient representation of a left fold that preserves the fold's step function, initial accumulator, and extraction function This allows the 'Applicative' instance to assemble derived folds that traverse the container only once A \''Fold' a b\' processes elements of type __a__ and results in a value of type __b__. -} data Fold a b -- | @Fold @ @ step @ @ initial @ @ extract@ = forall x. Fold (x -> a -> x) x (x -> b) data Pair a b = Pair !a !b instance Functor (Fold a) where fmap f (Fold step begin done) = Fold step begin (f . done) {-# INLINABLE fmap #-} instance Profunctor Fold where lmap = premap rmap = fmap instance Applicative (Fold a) where pure b = Fold (\() _ -> ()) () (\() -> b) {-# INLINABLE pure #-} (Fold stepL beginL doneL) <*> (Fold stepR beginR doneR) = let step (Pair xL xR) a = Pair (stepL xL a) (stepR xR a) begin = Pair beginL beginR done (Pair xL xR) = doneL xL (doneR xR) in Fold step begin done {-# INLINABLE (<*>) #-} instance Monoid b => Monoid (Fold a b) where mempty = pure mempty {-# INLINABLE mempty #-} mappend = liftA2 mappend {-# INLINABLE mappend #-} instance Num b => Num (Fold a b) where fromInteger = pure . fromInteger {-# INLINABLE fromInteger #-} negate = fmap negate {-# INLINABLE negate #-} abs = fmap abs {-# INLINABLE abs #-} signum = fmap signum {-# INLINABLE signum #-} (+) = liftA2 (+) {-# INLINABLE (+) #-} (*) = liftA2 (*) {-# INLINABLE (*) #-} (-) = liftA2 (-) {-# INLINABLE (-) #-} instance Fractional b => Fractional (Fold a b) where fromRational = pure . fromRational {-# INLINABLE fromRational #-} recip = fmap recip {-# INLINABLE recip #-} (/) = liftA2 (/) {-# INLINABLE (/) #-} instance Floating b => Floating (Fold a b) where pi = pure pi {-# INLINABLE pi #-} exp = fmap exp {-# INLINABLE exp #-} sqrt = fmap sqrt {-# INLINABLE sqrt #-} log = fmap log {-# INLINABLE log #-} sin = fmap sin {-# INLINABLE sin #-} tan = fmap tan {-# INLINABLE tan #-} cos = fmap cos {-# INLINABLE cos #-} asin = fmap sin {-# INLINABLE asin #-} atan = fmap atan {-# INLINABLE atan #-} acos = fmap acos {-# INLINABLE acos #-} sinh = fmap sinh {-# INLINABLE sinh #-} tanh = fmap tanh {-# INLINABLE tanh #-} cosh = fmap cosh {-# INLINABLE cosh #-} asinh = fmap asinh {-# INLINABLE asinh #-} atanh = fmap atanh {-# INLINABLE atanh #-} acosh = fmap acosh {-# INLINABLE acosh #-} (**) = liftA2 (**) {-# INLINABLE (**) #-} logBase = liftA2 logBase {-# INLINABLE logBase #-} {-| Like 'Fold', but monadic. A \''FoldM' m a b\' processes elements of type __a__ and results in a monadic value of type __m b__. -} data FoldM m a b = -- | @FoldM @ @ step @ @ initial @ @ extract@ forall x . FoldM (x -> a -> m x) (m x) (x -> m b) instance Monad m => Functor (FoldM m a) where fmap f (FoldM step start done) = FoldM step start done' where done' x = do b <- done x return $! f b {-# INLINABLE fmap #-} instance Monad m => Applicative (FoldM m a) where pure b = FoldM (\() _ -> return ()) (return ()) (\() -> return b) {-# INLINABLE pure #-} (FoldM stepL beginL doneL) <*> (FoldM stepR beginR doneR) = let step (Pair xL xR) a = do xL' <- stepL xL a xR' <- stepR xR a return $! Pair xL' xR' begin = do xL <- beginL xR <- beginR return $! Pair xL xR done (Pair xL xR) = do f <- doneL xL x <- doneR xR return $! f x in FoldM step begin done {-# INLINABLE (<*>) #-} instance Monad m => Profunctor (FoldM m) where rmap = fmap lmap = premapM instance (Monoid b, Monad m) => Monoid (FoldM m a b) where mempty = pure mempty {-# INLINABLE mempty #-} mappend = liftA2 mappend {-# INLINABLE mappend #-} instance (Monad m, Num b) => Num (FoldM m a b) where fromInteger = pure . fromInteger {-# INLINABLE fromInteger #-} negate = fmap negate {-# INLINABLE negate #-} abs = fmap abs {-# INLINABLE abs #-} signum = fmap signum {-# INLINABLE signum #-} (+) = liftA2 (+) {-# INLINABLE (+) #-} (*) = liftA2 (*) {-# INLINABLE (*) #-} (-) = liftA2 (-) {-# INLINABLE (-) #-} instance (Monad m, Fractional b) => Fractional (FoldM m a b) where fromRational = pure . fromRational {-# INLINABLE fromRational #-} recip = fmap recip {-# INLINABLE recip #-} (/) = liftA2 (/) {-# INLINABLE (/) #-} instance (Monad m, Floating b) => Floating (FoldM m a b) where pi = pure pi {-# INLINABLE pi #-} exp = fmap exp {-# INLINABLE exp #-} sqrt = fmap sqrt {-# INLINABLE sqrt #-} log = fmap log {-# INLINABLE log #-} sin = fmap sin {-# INLINABLE sin #-} tan = fmap tan {-# INLINABLE tan #-} cos = fmap cos {-# INLINABLE cos #-} asin = fmap sin {-# INLINABLE asin #-} atan = fmap atan {-# INLINABLE atan #-} acos = fmap acos {-# INLINABLE acos #-} sinh = fmap sinh {-# INLINABLE sinh #-} tanh = fmap tanh {-# INLINABLE tanh #-} cosh = fmap cosh {-# INLINABLE cosh #-} asinh = fmap asinh {-# INLINABLE asinh #-} atanh = fmap atanh {-# INLINABLE atanh #-} acosh = fmap acosh {-# INLINABLE acosh #-} (**) = liftA2 (**) {-# INLINABLE (**) #-} logBase = liftA2 logBase {-# INLINABLE logBase #-} -- | Apply a strict left 'Fold' to a 'Foldable' container fold :: Foldable f => Fold a b -> f a -> b fold (Fold step begin done) as = F.foldr cons done as begin where cons a k x = k $! step x a {-# INLINE fold #-} -- | Like 'fold', but monadic foldM :: (Foldable f, Monad m) => FoldM m a b -> f a -> m b foldM (FoldM step begin done) as0 = do x0 <- begin F.foldr step' done as0 $! x0 where step' a k x = do x' <- step x a k $! x' {-# INLINE foldM #-} -- | Convert a strict left 'Fold' into a scan scan :: Fold a b -> [a] -> [b] scan (Fold step begin done) as = foldr cons nil as begin where nil x = done x:[] cons a k x = done x:(k $! step x a) {-# INLINE scan #-} -- | Fold all values within a container using 'mappend' and 'mempty' mconcat :: Monoid a => Fold a a mconcat = Fold mappend mempty id {-# INLINABLE mconcat #-} -- | Convert a \"@foldMap@\" to a 'Fold' foldMap :: Monoid w => (a -> w) -> (w -> b) -> Fold a b foldMap to = Fold (\x a -> mappend x (to a)) mempty {-# INLINABLE foldMap #-} {-| Get the first element of a container or return 'Nothing' if the container is empty -} head :: Fold a (Maybe a) head = _Fold1 const {-# INLINABLE head #-} {-| Get the last element of a container or return 'Nothing' if the container is empty -} last :: Fold a (Maybe a) last = _Fold1 (flip const) {-# INLINABLE last #-} {-| Get the last element of a container or return a default value if the container is empty -} lastDef :: a -> Fold a a lastDef a = Fold (\_ a' -> a') a id {-# INLINABLE lastDef #-} -- | Returns 'True' if the container is empty, 'False' otherwise null :: Fold a Bool null = Fold (\_ _ -> False) True id {-# INLINABLE null #-} -- | Return the length of the container length :: Fold a Int length = genericLength {- Technically, 'length' is just 'genericLength' specialized to 'Int's. I keep the two separate so that I can later provide an 'Int'-specialized implementation of 'length' for performance reasons like "GHC.List" does without breaking backwards compatibility. -} {-# INLINABLE length #-} -- | Returns 'True' if all elements are 'True', 'False' otherwise and :: Fold Bool Bool and = Fold (&&) True id {-# INLINABLE and #-} -- | Returns 'True' if any element is 'True', 'False' otherwise or :: Fold Bool Bool or = Fold (||) False id {-# INLINABLE or #-} {-| @(all predicate)@ returns 'True' if all elements satisfy the predicate, 'False' otherwise -} all :: (a -> Bool) -> Fold a Bool all predicate = Fold (\x a -> x && predicate a) True id {-# INLINABLE all #-} {-| @(any predicate)@ returns 'True' if any element satisfies the predicate, 'False' otherwise -} any :: (a -> Bool) -> Fold a Bool any predicate = Fold (\x a -> x || predicate a) False id {-# INLINABLE any #-} -- | Computes the sum of all elements sum :: Num a => Fold a a sum = Fold (+) 0 id {-# INLINABLE sum #-} -- | Computes the product all elements product :: Num a => Fold a a product = Fold (*) 1 id {-# INLINABLE product #-} -- | Computes the maximum element maximum :: Ord a => Fold a (Maybe a) maximum = _Fold1 max {-# INLINABLE maximum #-} -- | Computes the minimum element minimum :: Ord a => Fold a (Maybe a) minimum = _Fold1 min {-# INLINABLE minimum #-} {-| @(elem a)@ returns 'True' if the container has an element equal to @a@, 'False' otherwise -} elem :: Eq a => a -> Fold a Bool elem a = any (a ==) {-# INLINABLE elem #-} {-| @(notElem a)@ returns 'False' if the container has an element equal to @a@, 'True' otherwise -} notElem :: Eq a => a -> Fold a Bool notElem a = all (a /=) {-# INLINABLE notElem #-} {-| @(find predicate)@ returns the first element that satisfies the predicate or 'Nothing' if no element satisfies the predicate -} find :: (a -> Bool) -> Fold a (Maybe a) find predicate = Fold step Nothing' lazy where step x a = case x of Nothing' -> if predicate a then Just' a else Nothing' _ -> x {-# INLINABLE find #-} {-| @(index n)@ returns the @n@th element of the container, or 'Nothing' if the container has an insufficient number of elements -} index :: Int -> Fold a (Maybe a) index = genericIndex {-# INLINABLE index #-} {-| @(elemIndex a)@ returns the index of the first element that equals @a@, or 'Nothing' if no element matches -} elemIndex :: Eq a => a -> Fold a (Maybe Int) elemIndex a = findIndex (a ==) {-# INLINABLE elemIndex #-} {-| @(findIndex predicate)@ returns the index of the first element that satisfies the predicate, or 'Nothing' if no element satisfies the predicate -} findIndex :: (a -> Bool) -> Fold a (Maybe Int) findIndex predicate = Fold step (Left' 0) hush where step x a = case x of Left' i -> if predicate a then Right' i else Left' (i + 1) _ -> x {-# INLINABLE findIndex #-} data Pair3 a b c = Pair3 !a !b !c -- | Pick a random element, using reservoir sampling random :: FoldM IO a (Maybe a) random = FoldM step begin done where begin = do gen <- createSystemRandom return $! Pair3 gen Nothing' (1 :: Int) step (Pair3 gen Nothing' _) a = return $! Pair3 gen (Just' a) 2 step (Pair3 gen (Just' a) m) b = do n <- uniformR (1, m) gen let c = if n == 1 then b else a return $! Pair3 gen (Just' c) (m + 1) done (Pair3 _ ma _) = return (lazy ma) {-# INLINABLE random #-} -- | Like 'length', except with a more general 'Num' return value genericLength :: Num b => Fold a b genericLength = Fold (\n _ -> n + 1) 0 id {-# INLINABLE genericLength #-} -- | Like 'index', except with a more general 'Integral' argument genericIndex :: Integral i => i -> Fold a (Maybe a) genericIndex i = Fold step (Left' 0) done where step x a = case x of Left' j -> if i == j then Right' a else Left' (j + 1) _ -> x done x = case x of Left' _ -> Nothing Right' a -> Just a {-# INLINABLE genericIndex #-} -- | Fold all values into a list list :: Fold a [a] list = Fold (\x a -> x . (a:)) id ($ []) {-# INLINABLE list #-} -- | Fold all values into a list, in reverse order revList :: Fold a [a] revList = Fold (\x a -> a:x) [] id {-# INLINABLE revList #-} {-| /O(n log n)/. Fold values into a list with duplicates removed, while preserving their first occurrences -} nub :: Ord a => Fold a [a] nub = Fold step (Pair Set.empty id) fin where step (Pair s r) a = if Set.member a s then Pair s r else Pair (Set.insert a s) (r . (a :)) fin (Pair _ r) = r [] {-# INLINABLE nub #-} {-| /O(n^2)/. Fold values into a list with duplicates removed, while preserving their first occurrences -} eqNub :: Eq a => Fold a [a] eqNub = Fold step (Pair [] id) fin where step (Pair known r) a = if List.elem a known then Pair known r else Pair (a : known) (r . (a :)) fin (Pair _ r) = r [] {-# INLINABLE eqNub #-} -- | Fold values into a set set :: Ord a => Fold a (Set.Set a) set = Fold (flip Set.insert) Set.empty id {-# INLINABLE set #-} maxChunkSize :: Int maxChunkSize = 8 * 1024 * 1024 -- | Fold all values into a vector vector :: (PrimMonad m, Vector v a) => FoldM m a (v a) vector = FoldM step begin done where begin = do mv <- M.unsafeNew 10 return (Pair mv 0) step (Pair mv idx) a = do let len = M.length mv mv' <- if idx >= len then M.unsafeGrow mv (min len maxChunkSize) else return mv M.unsafeWrite mv' idx a return (Pair mv' (idx + 1)) done (Pair mv idx) = do v <- V.unsafeFreeze mv return (V.unsafeTake idx v) {-# INLINABLE vector #-} {- $utilities 'purely' and 'impurely' allow you to write folds compatible with the @foldl@ library without incurring a @foldl@ dependency. Write your fold to accept three parameters corresponding to the step function, initial accumulator, and extraction function and then users can upgrade your function to accept a 'Fold' or 'FoldM' using the 'purely' or 'impurely' combinators. For example, the @pipes@ library implements a @foldM@ function in @Pipes.Prelude@ with the following type: > foldM > :: Monad m > => (x -> a -> m x) -> m x -> (x -> m b) -> Producer a m () -> m b @foldM@ is set up so that you can wrap it with 'impurely' to accept a 'FoldM' instead: > impurely foldM :: Monad m => FoldM m a b -> Producer a m () -> m b -} -- | Upgrade a fold to accept the 'Fold' type purely :: (forall x . (x -> a -> x) -> x -> (x -> b) -> r) -> Fold a b -> r purely f (Fold step begin done) = f step begin done {-# INLINABLE purely #-} -- | Upgrade a monadic fold to accept the 'FoldM' type impurely :: Monad m => (forall x . (x -> a -> m x) -> m x -> (x -> m b) -> r) -> FoldM m a b -> r impurely f (FoldM step begin done) = f step begin done {-# INLINABLE impurely #-} {-| Generalize a `Fold` to a `FoldM` > generalize (pure r) = pure r > > generalize (f <*> x) = generalize f <*> generalize x -} generalize :: Monad m => Fold a b -> FoldM m a b generalize (Fold step begin done) = FoldM step' begin' done' where step' x a = return (step x a) begin' = return begin done' x = return (done x) {-# INLINABLE generalize #-} {-| Simplify a pure `FoldM` to a `Fold` > simplify (pure r) = pure r > > simplify (f <*> x) = simplify f <*> simplify x -} simplify :: FoldM Identity a b -> Fold a b simplify (FoldM step begin done) = Fold step' begin' done' where step' x a = runIdentity (step x a) begin' = runIdentity begin done' x = runIdentity (done x) {-# INLINABLE simplify #-} {-| @_Fold1 step@ returns a new 'Fold' using just a step function that has the same type for the accumulator and the element. The result type is the accumulator type wrapped in 'Maybe'. The initial accumulator is retrieved from the 'Foldable', the result is 'None' for empty containers. -} _Fold1 :: (a -> a -> a) -> Fold a (Maybe a) _Fold1 step = Fold step_ Nothing' lazy where step_ mx a = Just' (case mx of Nothing' -> a Just' x -> step x a) {-| @(premap f folder)@ returns a new 'Fold' where f is applied at each step > fold (premap f folder) list = fold folder (map f list) >>> fold (premap Sum mconcat) [1..10] Sum {getSum = 55} >>> fold mconcat (map Sum [1..10]) Sum {getSum = 55} > premap id = id > > premap (f . g) = premap g . premap f > premap k (pure r) = pure r > > premap k (f <*> x) = premap k f <*> premap k x -} premap :: (a -> b) -> Fold b r -> Fold a r premap f (Fold step begin done) = Fold step' begin done where step' x a = step x (f a) {-# INLINABLE premap #-} {-| @(premapM f folder)@ returns a new 'FoldM' where f is applied to each input element > foldM (premapM f folder) list = foldM folder (map f list) > premapM id = id > > premapM (f . g) = premap g . premap f > premapM k (pure r) = pure r > > premapM k (f <*> x) = premapM k f <*> premapM k x -} premapM :: (a -> b) -> FoldM m b r -> FoldM m a r premapM f (FoldM step begin done) = FoldM step' begin done where step' x a = step x (f a) {-# INLINABLE premapM #-} {-| A handler for the upstream input of a `Fold` Any lens, traversal, or prism will type-check as a `Handler` -} type Handler a b = forall x . (b -> Constant (Endo x) b) -> a -> Constant (Endo x) a {-| @(handles t folder)@ transforms the input of a `Fold` using a lens, traversal, or prism: > handles _1 :: Fold a r -> Fold (a, b) r > handles _Left :: Fold a r -> Fold (Either a b) r > handles traverse :: Traversable t => Fold a r -> Fold (t a) r >>> fold (handles traverse sum) [[1..5],[6..10]] 55 >>> fold (handles (traverse.traverse) sum) [[Nothing, Just 2, Just 7],[Just 13, Nothing, Just 20]] 42 >>> fold (handles (filtered even) sum) [1,3,5,7,21,21] 42 >>> fold (handles _2 mconcat) [(1,"Hello "),(2,"World"),(3,"!")] "Hello World!" > handles id = id > > handles (f . g) = handles f . handles g > handles t (pure r) = pure r > > handles t (f <*> x) = handles t f <*> handles t x -} handles :: Handler a b -> Fold b r -> Fold a r handles k (Fold step begin done) = Fold step' begin done where step' = flip (appEndo . getConstant . k (Constant . Endo . flip step)) {-# INLINABLE handles #-} {-| > instance Monad m => Monoid (EndoM m a) where > mempty = EndoM return > mappend (EndoM f) (EndoM g) = EndoM (f >=> g) -} newtype EndoM m a = EndoM { appEndoM :: a -> m a } instance Monad m => Monoid (EndoM m a) where mempty = EndoM return mappend (EndoM f) (EndoM g) = EndoM (f >=> g) {-| A Handler for the upstream input of `FoldM` Any lens, traversal, or prism will type-check as a `HandlerM` -} type HandlerM m a b = forall x . (b -> Constant (EndoM m x) b) -> a -> Constant (EndoM m x) a {-| @(handlesM t folder)@ transforms the input of a `FoldM` using a lens, traversal, or prism: > handlesM _1 :: FoldM m a r -> FoldM (a, b) r > handlesM _Left :: FoldM m a r -> FoldM (Either a b) r > handlesM traverse :: Traversable t => FoldM m a r -> FoldM m (t a) r `handlesM` obeys these laws: > handlesM id = id > > handlesM (f . g) = handlesM f . handlesM g > handlesM t (pure r) = pure r > > handlesM t (f <*> x) = handlesM t f <*> handlesM t x -} handlesM :: Monad m => HandlerM m a b -> FoldM m b r -> FoldM m a r handlesM k (FoldM step begin done) = FoldM step' begin done where step' = flip (appEndoM . getConstant . k (Constant . EndoM . flip step)) {-# INLINABLE handlesM #-} {- $reexports @Control.Monad.Primitive@ re-exports the 'PrimMonad' type class @Data.Foldable@ re-exports the 'Foldable' type class @Data.Vector.Generic@ re-exports the 'Vector' type class -}
danidiaz/Haskell-Foldl-Library
src/Control/Foldl.hs
bsd-3-clause
22,449
0
14
6,090
5,294
2,823
2,471
370
4
-- | -- Copyright : (c) Sam Truzjan 2013 -- License : BSD3 -- Maintainer : pxqr.sta@gmail.com -- Stability : stable -- Portability : portable -- -- (Word5 <-> Word8) and (Word8 -> Word5) bytestring packers using -- lookup table. -- {-# LANGUAGE CPP, BangPatterns #-} module Data.ByteString.Base32.Internal ( Word5 , Word8 , EncTable , unpack5 , DecTable , pack5 , pack5Lenient , invIx ) where #if !MIN_VERSION_base(4,6,0) import Prelude hiding (catch) #endif import Control.Exception hiding (mask) import Data.Bits.Extras import Data.ByteString as BS import Data.ByteString.Internal as BS import Data.Word import Foreign hiding (unsafePerformIO) import System.IO.Unsafe (unsafePerformIO) import System.Endian {----------------------------------------------------------------------- -- Utils -----------------------------------------------------------------------} type Word5 = Word8 -- System.Endian.toBE32 is slower because toBE32 implemented using -- cbits shuffle functions while toBE32' implemented used gcc -- intrinsics -- toBE64' :: Word64 -> Word64 toBE64' = if getSystemEndianness == BigEndian then id else byteSwap {-# INLINE toBE64' #-} toBE32' :: Word32 -> Word32 toBE32' = if getSystemEndianness == BigEndian then id else byteSwap {-# INLINE toBE32' #-} fromBE32' :: Word32 -> Word32 fromBE32' = toBE32' {-# INLINE fromBE32' #-} -- n = 2 ^ d padCeilN :: Int -> Int -> Int padCeilN !n !x | remd == 0 = x | otherwise = (x - remd) + n where mask = n - 1 remd = x .&. mask {----------------------------------------------------------------------- -- Encoding -----------------------------------------------------------------------} unpack5Ptr :: Ptr Word8 -> ByteString -> ByteString unpack5Ptr !tbl bs @ (PS fptr off sz) = unsafePerformIO $ do let unpackedSize = dstSize $ BS.length bs BS.create unpackedSize $ \ dst -> do withForeignPtr fptr $ \ ptr -> do dst_end <- bigStep dst (advancePtr ptr off) sz _ <- fillPadding dst_end (unpackedSize - (dst_end `minusPtr` dst)) return () where dstSize x = padCeilN 8 (d + if m == 0 then 0 else 1) where (d, m) = (x * 8) `quotRem` 5 fillPadding dst s = memset dst (c2w '=') (fromIntegral s) bigStep !dst !src !s | s >= 5 = do unpack5_40 dst src bigStep (dst `advancePtr` 8) (src `advancePtr` 5) (s - 5) | otherwise = smallStep dst src s 0 0 unpack5_40 !dst !src = do w32he <- peek (castPtr src) :: IO Word32 let w32 = toBE32' w32he fill8_32 0 (w32 `unsafeShiftR` 27) fill8_32 1 (w32 `unsafeShiftR` 22) fill8_32 2 (w32 `unsafeShiftR` 17) fill8_32 3 (w32 `unsafeShiftR` 12) fill8_32 4 (w32 `unsafeShiftR` 7) fill8_32 5 (w32 `unsafeShiftR` 2) w8 <- peekElemOff src 4 fill8_32 6 ( (w32 `unsafeShiftL` 3) .|. fromIntegral (w8 `unsafeShiftR` 5)) fill8_32 7 (fromIntegral w8) where fill8_32 :: Int -> Word32 -> IO () fill8_32 !i !w32 = do w8 <- peekByteOff tbl (fromIntegral w32 .&. 0x1f) poke (dst `advancePtr` i) w8 smallStep !dst !src !s !unused !un_cnt | un_cnt >= 5 = do let ix = unused `unsafeShiftR` 3 peekByteOff tbl (fromIntegral ix) >>= poke dst smallStep (advancePtr dst 1) src s (unused `unsafeShiftL` 5) (un_cnt - 5) | s == 0 = do if un_cnt == 0 then return dst else do let ix = unused `unsafeShiftR` 3 peekByteOff tbl (fromIntegral ix) >>= poke dst return (dst `advancePtr` 1) | otherwise = do w8 <- peek src let usd_cnt = 5 - un_cnt let bits = w8 .&. complement (bit (8 - usd_cnt) - 1) let ix = (unused .|. bits `shiftR` un_cnt) `unsafeShiftR` 3 peekByteOff tbl (fromIntegral ix) >>= poke dst smallStep (advancePtr dst 1) (advancePtr src 1) (pred s) (w8 `shiftL` usd_cnt) (8 - usd_cnt) type EncTable = ByteString unpack5 :: EncTable -> ByteString -> ByteString unpack5 (PS fptr off len) bs | len /= 32 = error $ "base32: unpack5: invalid lookup table size " ++ show len | otherwise = unsafePerformIO $ do withForeignPtr fptr $ \ptr -> do return $ unpack5Ptr (ptr `advancePtr` off) bs {----------------------------------------------------------------------- -- Decoding -----------------------------------------------------------------------} invIx :: Word5 invIx = 255 type Result = Either String cleanup :: IO a -> Result a cleanup io = unsafePerformIO $ catch (io >>= evaluate >>= return . Right) handler where handler (ErrorCall msg) = return (Left msg) pack5Ptr :: Ptr Word5 -> ByteString -> Result ByteString pack5Ptr !tbl bs @ (PS fptr off sz) = cleanup $ do let packedSize = dstSize $ BS.length bs BS.createAndTrim packedSize $ \ dst -> do withForeignPtr fptr $ \ ptr -> do dst_end <- bigStep dst (advancePtr ptr off) sz return (dst_end `minusPtr` dst) where lookupTable :: Word8 -> Word5 lookupTable ix | x == invIx = error $ show (w2c ix) ++ " is not base32 character" | otherwise = x where x = inlinePerformIO (peekByteOff tbl (fromIntegral ix)) {-# INLINE lookupTable #-} dstSize x = d + if m == 0 then 0 else 1 where (d, m) = (x * 5) `quotRem` 8 bigStep !dst !src !s | s > 8 = do pack5_40 dst src bigStep (dst `advancePtr` 5) (src `advancePtr` 8) (s - 8) | otherwise = smallStep dst src s (0 :: Word64) 0 pack5_40 !dst !src = do w64he <- peek (castPtr src) :: IO Word64 let w64 = toBE64' w64he let w40 = putAsW5 (w64 `unsafeShiftR` 00) $ putAsW5 (w64 `unsafeShiftR` 08) $ putAsW5 (w64 `unsafeShiftR` 16) $ putAsW5 (w64 `unsafeShiftR` 24) $ putAsW5 (w64 `unsafeShiftR` 32) $ putAsW5 (w64 `unsafeShiftR` 40) $ putAsW5 (w64 `unsafeShiftR` 48) $ putAsW5 (w64 `unsafeShiftR` 56) 0 pokeW40 w40 where putAsW5 :: Word64 -> Word64 -> Word64 {-# INLINE putAsW5 #-} putAsW5 !w8 !acc = (acc `unsafeShiftL` 5) .|. fromIntegral (lookupTable (fromIntegral w8)) pokeW40 :: Word64 -> IO () {-# INLINE pokeW40 #-} pokeW40 !w40 = do poke dst (fromIntegral (w40 `unsafeShiftR` 32) :: Word8) poke (castPtr (dst `advancePtr` 1)) (fromBE32' (fromIntegral w40 :: Word32)) smallStep !dst !src !s !unused !un_cnt | un_cnt >= 8 = do poke dst $ fromIntegral (unused `unsafeShiftR` (un_cnt - 8)) smallStep (dst `advancePtr` 1) src s unused (un_cnt - 8) | s == 0 = return dst | otherwise = do w8 <- peek src if w2c w8 == '=' then if (bit un_cnt - 1) .&. unused == 0 then smallStep dst src 0 0 0 else smallStep dst src 0 (unused `shiftL` (8 - un_cnt)) 8 else smallStep dst (src `advancePtr` 1) (pred s) ((unused `unsafeShiftL` 5) .|. fromIntegral (lookupTable (fromIntegral w8))) (un_cnt + 5) type DecTable = ByteString pack5 :: DecTable -> ByteString -> Result ByteString pack5 (PS fptr off len) bs | len /= 256 = error $ "base32: pack5: invalid lookup table size " ++ show len | otherwise = unsafePerformIO $ do withForeignPtr fptr $ \ptr -> return $ pack5Ptr (ptr `advancePtr` off) bs {----------------------------------------------------------------------- -- Lenient Decoding -----------------------------------------------------------------------} isInAlphabet :: Ptr Word5 -> Word8 -> Bool isInAlphabet !tbl !ix = inlinePerformIO (peekByteOff tbl (fromIntegral ix)) /= invIx pack5Lenient :: DecTable -> ByteString -> Either String ByteString pack5Lenient tbl @ (PS fptr _ _) bs = unsafePerformIO $ do withForeignPtr fptr $ \ !tbl_ptr -> do return $! pack5 tbl $ BS.filter (isInAlphabet tbl_ptr) bs
pxqr/base32-bytestring
src/Data/ByteString/Base32/Internal.hs
bsd-3-clause
8,324
3
21
2,427
2,653
1,352
1,301
185
4
module Signal.Wavelet.Eval.CommonBench where import Control.Arrow ((&&&)) import Signal.Wavelet.Eval.Common {-# INLINE benchLattice #-} benchLattice :: ((Double, Double), [Double]) -> [Double] benchLattice (baseOp, sig) = latticePar baseOp sig dataLattice :: ([Double], [Double]) -> ((Double, Double), [Double]) dataLattice (ls, sig) = ((sin &&& cos) . head $ ls , sig)
jstolarek/lattice-structure-hs
bench/Signal/Wavelet/Eval/CommonBench.hs
bsd-3-clause
387
0
9
64
145
90
55
9
1
{-# LANGUAGE TypeOperators, EmptyDataDecls, RankNTypes #-} {-# LANGUAGE TypeFamilies, DataKinds, PolyKinds, KindSignatures #-} {-# LANGUAGE GADTs, TypeInType, PatternGuards, ScopedTypeVariables #-} -- | -- Module : Data.Type.RList -- Copyright : (c) 2016 Edwin Westbrook -- -- License : BSD3 -- -- Maintainer : westbrook@galois.com -- Stability : experimental -- Portability : GHC -- -- A /right list/, or 'RList', is a list where cons adds to the end, or the -- right-hand side, of a list. This is useful conceptually for contexts of -- name-bindings, where the most recent name-binding is intuitively at the end -- of the context. module Data.Type.RList ( RList, RNil, (:>), (:++:) , Member(..), weakenMemberL , Append(..), mkAppend, mkMonoAppend, proxiesFromAppend , RAssign(..), empty, singleton, get, HApply(..), hget, modify, set , memberElem, SplitAtMemberRet(..), memberSplitAt, map, mapRAssign , map2, head, tail, toList, mapToList, append, foldr, split , members, TypeCtx(..), appendAssoc, appendRNilConsEq, prependRNilEq, Eq1(..) ) where import Prelude hiding (map, foldr, head, tail, any) import Data.Kind import Data.Type.Equality import Data.Functor.Constant import Data.Typeable ------------------------------------------------------------------------------- -- * Right-lists as a datatype ------------------------------------------------------------------------------- -- | A form of lists where elements are added to the right instead of the left data RList a = RNil | (RList a) :> a type RNil = 'RNil type (:>) = '(:>) -- | Append two 'RList's at the type level type family ((r1 :: RList k) :++: (r2 :: RList k)) :: RList k infixr 5 :++: type instance (r :++: 'RNil) = r type instance (r1 :++: (r2 ':> a)) = (r1 :++: r2) ':> a ------------------------------------------------------------------------------- -- * Proofs of membership in a type-level list ------------------------------------------------------------------------------- {-| A @Member ctx a@ is a \"proof\" that the type @a@ is in the type list @ctx@, meaning that @ctx@ equals > t0 ':>' a ':>' t1 ':>' ... ':>' tn for some types @t0,t1,...,tn@. -} data Member (ctx :: RList k1) (a :: k2) where Member_Base :: Member (ctx :> a) a Member_Step :: Member ctx a -> Member (ctx :> b) a deriving Typeable instance Show (Member r a) where showsPrec p = showsPrecMember (p > 10) where showsPrecMember :: Bool -> Member ctx a -> ShowS showsPrecMember _ Member_Base = showString "Member_Base" showsPrecMember p' (Member_Step prf) = showParen p' $ showString "Member_Step" . showsPrec 10 prf instance TestEquality (Member ctx) where testEquality Member_Base Member_Base = Just Refl testEquality (Member_Step memb1) (Member_Step memb2) | Just Refl <- testEquality memb1 memb2 = Just Refl testEquality _ _ = Nothing instance Eq (Member ctx a) where Member_Base == Member_Base = True (Member_Step memb1) == (Member_Step memb2) = memb1 == memb2 _ == _ = False --toEq :: Member (Nil :> a) b -> b :~: a --toEq Member_Base = Refl --toEq _ = error "Should not happen! (toEq)" -- | Weaken a 'Member' proof by prepending another context to the context it -- proves membership in weakenMemberL :: Proxy r1 -> Member r2 a -> Member (r1 :++: r2) a weakenMemberL _ Member_Base = Member_Base weakenMemberL tag (Member_Step mem) = Member_Step (weakenMemberL tag mem) ------------------------------------------------------------ -- * Proofs that one list equals the append of two others ------------------------------------------------------------ {-| An @Append ctx1 ctx2 ctx@ is a \"proof\" that @ctx = ctx1 ':++:' ctx2@. -} data Append ctx1 ctx2 ctx where Append_Base :: Append ctx RNil ctx Append_Step :: Append ctx1 ctx2 ctx -> Append ctx1 (ctx2 :> a) (ctx :> a) -- | Make an 'Append' proof from any 'RAssign' vector for the second -- argument of the append. mkAppend :: RAssign f c2 -> Append c1 c2 (c1 :++: c2) mkAppend MNil = Append_Base mkAppend (c :>: _) = Append_Step (mkAppend c) -- | A version of 'mkAppend' that takes in a 'Proxy' argument. mkMonoAppend :: Proxy c1 -> RAssign f c2 -> Append c1 c2 (c1 :++: c2) mkMonoAppend _ = mkAppend -- | The inverse of 'mkAppend', that builds an 'RAssign' from an 'Append' proxiesFromAppend :: Append c1 c2 c -> RAssign Proxy c2 proxiesFromAppend Append_Base = MNil proxiesFromAppend (Append_Step a) = proxiesFromAppend a :>: Proxy ------------------------------------------------------------------------------- -- * Contexts ------------------------------------------------------------------------------- {-| An @RAssign f r@ an assignment of an @f a@ for each @a@ in the 'RList' @r@ -} data RAssign (f :: k -> *) (c :: RList k) where MNil :: RAssign f RNil (:>:) :: RAssign f c -> f a -> RAssign f (c :> a) -- | Create an empty 'RAssign' vector. empty :: RAssign f RNil empty = MNil -- | Create a singleton 'RAssign' vector. singleton :: f a -> RAssign f (RNil :> a) singleton x = MNil :>: x -- | Look up an element of an 'RAssign' vector using a 'Member' proof get :: Member c a -> RAssign f c -> f a get Member_Base (_ :>: x) = x get (Member_Step mem') (mc :>: _) = get mem' mc -- | Heterogeneous type application, including a proof that the input kind of -- the function equals the kind of the type argument data HApply (f :: k1 -> Type) (a :: k2) where HApply :: forall k (f :: k -> Type) (a :: k). f a -> HApply f a -- | Look up an element of an 'RAssign' vector using a 'Member' proof at what -- GHC thinks might be a different kind, i.e., heterogeneously hget :: forall k1 k2 (f :: k1 -> Type) (c :: RList k1) (a :: k2). Member c a -> RAssign f c -> HApply f a hget Member_Base (_ :>: x) = HApply x hget (Member_Step mem') (mc :>: _) = hget mem' mc -- | Modify an element of an 'RAssign' vector using a 'Member' proof. modify :: Member c a -> (f a -> f a) -> RAssign f c -> RAssign f c modify Member_Base f (xs :>: x) = xs :>: f x modify (Member_Step mem') f (xs :>: x) = modify mem' f xs :>: x -- | Set an element of an 'RAssign' vector using a 'Member' proof. set :: Member c a -> f a -> RAssign f c -> RAssign f c set memb x = modify memb (const x) -- | Test if an object is in an 'RAssign', returning a 'Member' proof if it is memberElem :: TestEquality f => f a -> RAssign f ctx -> Maybe (Member ctx a) memberElem _ MNil = Nothing memberElem x (_ :>: y) | Just Refl <- testEquality x y = Just Member_Base memberElem x (xs :>: _) = fmap Member_Step $ memberElem x xs -- | Existential return value from 'memberSplitAt' data SplitAtMemberRet f ctx a where SplitAtMemberRet :: RAssign f ctx1 -> f a -> RAssign f ctx2 -> SplitAtMemberRet f (ctx1 :> a :++: ctx2) a -- | Split an assignment at the point specified by a 'Member' proof memberSplitAt :: RAssign f ctx -> Member ctx a -> SplitAtMemberRet f ctx a memberSplitAt (ctx :>: x) Member_Base = SplitAtMemberRet ctx x MNil memberSplitAt (ctx :>: y) (Member_Step memb) = case memberSplitAt ctx memb of SplitAtMemberRet ctx1 x ctx2 -> SplitAtMemberRet ctx1 x (ctx2 :>: y) -- | Map a function on all elements of an 'RAssign' vector. map :: (forall x. f x -> g x) -> RAssign f c -> RAssign g c map _ MNil = MNil map f (mc :>: x) = map f mc :>: f x -- | An alternate name for 'map' that does not clash with the prelude mapRAssign :: (forall x. f x -> g x) -> RAssign f c -> RAssign g c mapRAssign = map -- | Map a binary function on all pairs of elements of two 'RAssign' vectors. map2 :: (forall x. f x -> g x -> h x) -> RAssign f c -> RAssign g c -> RAssign h c map2 _ MNil MNil = MNil map2 f (xs :>: x) (ys :>: y) = map2 f xs ys :>: f x y -- | Take the head of an 'RAssign' head :: RAssign f (ctx :> a) -> f a head (_ :>: x) = x -- | Take the tail of an 'RAssign' tail :: RAssign f (ctx :> a) -> RAssign f ctx tail (xs :>: _) = xs -- | Convert a monomorphic 'RAssign' to a list toList :: RAssign (Constant a) c -> [a] toList = mapToList getConstant -- | Map a function with monomorphic output type across an 'RAssign' to create a -- standard list: -- -- > mapToList f = toList . map (Constant . f) mapToList :: forall f ctx b. (forall a. f a -> b) -> RAssign f ctx -> [b] mapToList f = go [] where go :: forall d. [b] -> RAssign f d -> [b] go acc MNil = acc go acc (xs :>: x) = go (f x : acc) xs -- | Append two 'RAssign' vectors. append :: RAssign f c1 -> RAssign f c2 -> RAssign f (c1 :++: c2) append mc MNil = mc append mc1 (mc2 :>: x) = append mc1 mc2 :>: x -- | Perform a right fold across an 'RAssign' foldr :: (forall a. f a -> r -> r) -> r -> RAssign f ctx -> r foldr _ r MNil = r foldr f r (xs :>: x) = f x $ foldr f r xs -- | Split an 'RAssign' vector into two pieces. The first argument is a -- phantom argument that gives the form of the first list piece. split :: (c ~ (c1 :++: c2)) => prx c1 -> RAssign any c2 -> RAssign f c -> (RAssign f c1, RAssign f c2) split _ MNil mc = (mc, MNil) split _ (any :>: _) (mc :>: x) = case split Proxy any mc of (mc1, mc2) -> (mc1, mc2 :>: x) -- | Create a vector of proofs that each type in @c@ is a 'Member' of @c@. members :: RAssign f c -> RAssign (Member c) c members MNil = MNil members (c :>: _) = map Member_Step (members c) :>: Member_Base -- | A type-class which ensures that ctx is a valid context, i.e., has -- | the form (RNil :> t1 :> ... :> tn) for some types t1 through tn class TypeCtx ctx where typeCtxProxies :: RAssign Proxy ctx instance TypeCtx 'RNil where typeCtxProxies = MNil instance TypeCtx ctx => TypeCtx (ctx ':> a) where typeCtxProxies = typeCtxProxies :>: Proxy -- | Proof that append on right-lists is associative appendAssoc :: f1 ctx1 -> f2 ctx2 -> RAssign f3 ctx3 -> ctx1 :++: (ctx2 :++: ctx3) :~: (ctx1 :++: ctx2) :++: ctx3 appendAssoc _ _ MNil = Refl appendAssoc c1 c2 (c3 :>: _) = case appendAssoc c1 c2 c3 of Refl -> Refl -- | Proof that appending a right-list that starts with @a@ is the same as -- consing @a@ and then appending appendRNilConsEq :: prx1 ps1 -> prx_a a -> RAssign f ps2 -> (ps1 :++: (RNil :> a :++: ps2)) :~: (ps1 :> a :++: ps2) appendRNilConsEq _ _ MNil = Refl appendRNilConsEq ps1 a (ps2 :>: _) | Refl <- appendRNilConsEq ps1 a ps2 = Refl -- | Proof that prepending an empty 'RList' is the identity prependRNilEq :: RAssign f ctx -> RNil :++: ctx :~: ctx prependRNilEq MNil = Refl prependRNilEq (ctx :>: _) | Refl <- prependRNilEq ctx = Refl instance TestEquality f => TestEquality (RAssign f) where testEquality MNil MNil = Just Refl testEquality (xs1 :>: x1) (xs2 :>: x2) | Just Refl <- testEquality xs1 xs2 , Just Refl <- testEquality x1 x2 = Just Refl testEquality _ _ = Nothing class Eq1 f where eq1 :: f a -> f a -> Bool instance Eq1 f => Eq (RAssign f ctx) where MNil == MNil = True (xs1 :>: x1) == (xs2 :>: x2) = xs1 == xs2 && eq1 x1 x2
eddywestbrook/hobbits
src/Data/Type/RList.hs
bsd-3-clause
10,946
0
13
2,310
3,187
1,685
1,502
159
2
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section[SimplCore]{Driver for simplifying @Core@ programs} -} {-# LANGUAGE CPP #-} module SimplCore ( core2core, simplifyExpr ) where #include "HsVersions.h" import DynFlags import CoreSyn import HscTypes import CSE ( cseProgram ) import Rules ( mkRuleBase, unionRuleBase, extendRuleBaseList, ruleCheckProgram, addRuleInfo, ) import PprCore ( pprCoreBindings, pprCoreExpr ) import OccurAnal ( occurAnalysePgm, occurAnalyseExpr ) import IdInfo import CoreStats ( coreBindsSize, coreBindsStats, exprSize ) import CoreUtils ( mkTicks, stripTicksTop ) import CoreLint ( endPass, lintPassResult, dumpPassResult, lintAnnots ) import Simplify ( simplTopBinds, simplExpr, simplRules ) import SimplUtils ( simplEnvForGHCi, activeRule ) import SimplEnv import SimplMonad import CoreMonad import qualified ErrUtils as Err import FloatIn ( floatInwards ) import FloatOut ( floatOutwards ) import FamInstEnv import Id import ErrUtils ( withTiming ) import BasicTypes ( CompilerPhase(..), isDefaultInlinePragma ) import VarSet import VarEnv import LiberateCase ( liberateCase ) import SAT ( doStaticArgs ) import Specialise ( specProgram) import SpecConstr ( specConstrProgram) import DmdAnal ( dmdAnalProgram ) import CallArity ( callArityAnalProgram ) import WorkWrap ( wwTopBinds ) import Vectorise ( vectorise ) import SrcLoc import Util import Module import Maybes import UniqSupply ( UniqSupply, mkSplitUniqSupply, splitUniqSupply ) import Outputable import Control.Monad #ifdef GHCI import DynamicLoading ( loadPlugins ) import Plugins ( installCoreToDos ) #endif {- ************************************************************************ * * \subsection{The driver for the simplifier} * * ************************************************************************ -} core2core :: HscEnv -> ModGuts -> IO ModGuts core2core hsc_env guts@(ModGuts { mg_module = mod , mg_loc = loc , mg_deps = deps , mg_rdr_env = rdr_env }) = do { us <- mkSplitUniqSupply 's' -- make sure all plugins are loaded ; let builtin_passes = getCoreToDo dflags orph_mods = mkModuleSet (mod : dep_orphs deps) ; ; (guts2, stats) <- runCoreM hsc_env hpt_rule_base us mod orph_mods print_unqual loc $ do { all_passes <- addPluginPasses builtin_passes ; runCorePasses all_passes guts } ; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl_stats "Grand total simplifier statistics" (pprSimplCount stats) ; return guts2 } where dflags = hsc_dflags hsc_env home_pkg_rules = hptRules hsc_env (dep_mods deps) hpt_rule_base = mkRuleBase home_pkg_rules print_unqual = mkPrintUnqualified dflags rdr_env -- mod: get the module out of the current HscEnv so we can retrieve it from the monad. -- This is very convienent for the users of the monad (e.g. plugins do not have to -- consume the ModGuts to find the module) but somewhat ugly because mg_module may -- _theoretically_ be changed during the Core pipeline (it's part of ModGuts), which -- would mean our cached value would go out of date. {- ************************************************************************ * * Generating the main optimisation pipeline * * ************************************************************************ -} getCoreToDo :: DynFlags -> [CoreToDo] getCoreToDo dflags = flatten_todos core_todo where opt_level = optLevel dflags phases = simplPhases dflags max_iter = maxSimplIterations dflags rule_check = ruleCheck dflags call_arity = gopt Opt_CallArity dflags strictness = gopt Opt_Strictness dflags full_laziness = gopt Opt_FullLaziness dflags do_specialise = gopt Opt_Specialise dflags do_float_in = gopt Opt_FloatIn dflags cse = gopt Opt_CSE dflags spec_constr = gopt Opt_SpecConstr dflags liberate_case = gopt Opt_LiberateCase dflags late_dmd_anal = gopt Opt_LateDmdAnal dflags static_args = gopt Opt_StaticArgumentTransformation dflags rules_on = gopt Opt_EnableRewriteRules dflags eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags ww_on = gopt Opt_WorkerWrapper dflags maybe_rule_check phase = runMaybe rule_check (CoreDoRuleCheck phase) maybe_strictness_before phase = runWhen (phase `elem` strictnessBefore dflags) CoreDoStrictness base_mode = SimplMode { sm_phase = panic "base_mode" , sm_names = [] , sm_rules = rules_on , sm_eta_expand = eta_expand_on , sm_inline = True , sm_case_case = True } simpl_phase phase names iter = CoreDoPasses $ [ maybe_strictness_before phase , CoreDoSimplify iter (base_mode { sm_phase = Phase phase , sm_names = names }) , maybe_rule_check (Phase phase) ] -- Vectorisation can introduce a fair few common sub expressions involving -- DPH primitives. For example, see the Reverse test from dph-examples. -- We need to eliminate these common sub expressions before their definitions -- are inlined in phase 2. The CSE introduces lots of v1 = v2 bindings, -- so we also run simpl_gently to inline them. ++ (if gopt Opt_Vectorise dflags && phase == 3 then [CoreCSE, simpl_gently] else []) vectorisation = runWhen (gopt Opt_Vectorise dflags) $ CoreDoPasses [ simpl_gently, CoreDoVectorisation ] -- By default, we have 2 phases before phase 0. -- Want to run with inline phase 2 after the specialiser to give -- maximum chance for fusion to work before we inline build/augment -- in phase 1. This made a difference in 'ansi' where an -- overloaded function wasn't inlined till too late. -- Need phase 1 so that build/augment get -- inlined. I found that spectral/hartel/genfft lost some useful -- strictness in the function sumcode' if augment is not inlined -- before strictness analysis runs simpl_phases = CoreDoPasses [ simpl_phase phase ["main"] max_iter | phase <- [phases, phases-1 .. 1] ] -- initial simplify: mk specialiser happy: minimum effort please simpl_gently = CoreDoSimplify max_iter (base_mode { sm_phase = InitialPhase , sm_names = ["Gentle"] , sm_rules = rules_on -- Note [RULEs enabled in SimplGently] , sm_inline = False , sm_case_case = False }) -- Don't do case-of-case transformations. -- This makes full laziness work better strictness_pass = if ww_on then [CoreDoStrictness,CoreDoWorkerWrapper] else [CoreDoStrictness] -- New demand analyser demand_analyser = (CoreDoPasses ( strictness_pass ++ [simpl_phase 0 ["post-worker-wrapper"] max_iter] )) core_todo = if opt_level == 0 then [ vectorisation , CoreDoSimplify max_iter (base_mode { sm_phase = Phase 0 , sm_names = ["Non-opt simplification"] }) ] else {- opt_level >= 1 -} [ -- We want to do the static argument transform before full laziness as it -- may expose extra opportunities to float things outwards. However, to fix -- up the output of the transformation we need at do at least one simplify -- after this before anything else runWhen static_args (CoreDoPasses [ simpl_gently, CoreDoStaticArgs ]), -- We run vectorisation here for now, but we might also try to run -- it later vectorisation, -- initial simplify: mk specialiser happy: minimum effort please simpl_gently, -- Specialisation is best done before full laziness -- so that overloaded functions have all their dictionary lambdas manifest runWhen do_specialise CoreDoSpecialising, runWhen full_laziness $ CoreDoFloatOutwards FloatOutSwitches { floatOutLambdas = Just 0, floatOutConstants = True, floatOutOverSatApps = False }, -- Was: gentleFloatOutSwitches -- -- I have no idea why, but not floating constants to -- top level is very bad in some cases. -- -- Notably: p_ident in spectral/rewrite -- Changing from "gentle" to "constantsOnly" -- improved rewrite's allocation by 19%, and -- made 0.0% difference to any other nofib -- benchmark -- -- Not doing floatOutOverSatApps yet, we'll do -- that later on when we've had a chance to get more -- accurate arity information. In fact it makes no -- difference at all to performance if we do it here, -- but maybe we save some unnecessary to-and-fro in -- the simplifier. simpl_phases, -- Phase 0: allow all Ids to be inlined now -- This gets foldr inlined before strictness analysis -- At least 3 iterations because otherwise we land up with -- huge dead expressions because of an infelicity in the -- simpifier. -- let k = BIG in foldr k z xs -- ==> let k = BIG in letrec go = \xs -> ...(k x).... in go xs -- ==> let k = BIG in letrec go = \xs -> ...(BIG x).... in go xs -- Don't stop now! simpl_phase 0 ["main"] (max max_iter 3), runWhen do_float_in CoreDoFloatInwards, -- Run float-inwards immediately before the strictness analyser -- Doing so pushes bindings nearer their use site and hence makes -- them more likely to be strict. These bindings might only show -- up after the inlining from simplification. Example in fulsom, -- Csg.calc, where an arg of timesDouble thereby becomes strict. runWhen call_arity $ CoreDoPasses [ CoreDoCallArity , simpl_phase 0 ["post-call-arity"] max_iter ], runWhen strictness demand_analyser, runWhen full_laziness $ CoreDoFloatOutwards FloatOutSwitches { floatOutLambdas = floatLamArgs dflags, floatOutConstants = True, floatOutOverSatApps = True }, -- nofib/spectral/hartel/wang doubles in speed if you -- do full laziness late in the day. It only happens -- after fusion and other stuff, so the early pass doesn't -- catch it. For the record, the redex is -- f_el22 (f_el21 r_midblock) runWhen cse CoreCSE, -- We want CSE to follow the final full-laziness pass, because it may -- succeed in commoning up things floated out by full laziness. -- CSE used to rely on the no-shadowing invariant, but it doesn't any more runWhen do_float_in CoreDoFloatInwards, maybe_rule_check (Phase 0), -- Case-liberation for -O2. This should be after -- strictness analysis and the simplification which follows it. runWhen liberate_case (CoreDoPasses [ CoreLiberateCase, simpl_phase 0 ["post-liberate-case"] max_iter ]), -- Run the simplifier after LiberateCase to vastly -- reduce the possiblility of shadowing -- Reason: see Note [Shadowing] in SpecConstr.hs runWhen spec_constr CoreDoSpecConstr, maybe_rule_check (Phase 0), -- Final clean-up simplification: simpl_phase 0 ["final"] max_iter, runWhen late_dmd_anal $ CoreDoPasses ( strictness_pass ++ [simpl_phase 0 ["post-late-ww"] max_iter] ), -- Final run of the demand_analyser, ensures that one-shot thunks are -- really really one-shot thunks. Only needed if the demand analyser -- has run at all. See Note [Final Demand Analyser run] in DmdAnal runWhen (strictness || late_dmd_anal) CoreDoStrictness, maybe_rule_check (Phase 0) ] -- Remove 'CoreDoNothing' and flatten 'CoreDoPasses' for clarity. flatten_todos [] = [] flatten_todos (CoreDoNothing : rest) = flatten_todos rest flatten_todos (CoreDoPasses passes : rest) = flatten_todos passes ++ flatten_todos rest flatten_todos (todo : rest) = todo : flatten_todos rest -- Loading plugins addPluginPasses :: [CoreToDo] -> CoreM [CoreToDo] #ifndef GHCI addPluginPasses builtin_passes = return builtin_passes #else addPluginPasses builtin_passes = do { hsc_env <- getHscEnv ; named_plugins <- liftIO (loadPlugins hsc_env) ; foldM query_plug builtin_passes named_plugins } where query_plug todos (_, plug, options) = installCoreToDos plug options todos #endif {- ************************************************************************ * * The CoreToDo interpreter * * ************************************************************************ -} runCorePasses :: [CoreToDo] -> ModGuts -> CoreM ModGuts runCorePasses passes guts = foldM do_pass guts passes where do_pass guts CoreDoNothing = return guts do_pass guts (CoreDoPasses ps) = runCorePasses ps guts do_pass guts pass = withTiming getDynFlags (ppr pass <+> brackets (ppr mod)) (const ()) $ do { guts' <- lintAnnots (ppr pass) (doCorePass pass) guts ; endPass pass (mg_binds guts') (mg_rules guts') ; return guts' } mod = mg_module guts doCorePass :: CoreToDo -> ModGuts -> CoreM ModGuts doCorePass pass@(CoreDoSimplify {}) = {-# SCC "Simplify" #-} simplifyPgm pass doCorePass CoreCSE = {-# SCC "CommonSubExpr" #-} doPass cseProgram doCorePass CoreLiberateCase = {-# SCC "LiberateCase" #-} doPassD liberateCase doCorePass CoreDoFloatInwards = {-# SCC "FloatInwards" #-} doPassD floatInwards doCorePass (CoreDoFloatOutwards f) = {-# SCC "FloatOutwards" #-} doPassDUM (floatOutwards f) doCorePass CoreDoStaticArgs = {-# SCC "StaticArgs" #-} doPassU doStaticArgs doCorePass CoreDoCallArity = {-# SCC "CallArity" #-} doPassD callArityAnalProgram doCorePass CoreDoStrictness = {-# SCC "NewStranal" #-} doPassDFM dmdAnalProgram doCorePass CoreDoWorkerWrapper = {-# SCC "WorkWrap" #-} doPassDFU wwTopBinds doCorePass CoreDoSpecialising = {-# SCC "Specialise" #-} specProgram doCorePass CoreDoSpecConstr = {-# SCC "SpecConstr" #-} specConstrProgram doCorePass CoreDoVectorisation = {-# SCC "Vectorise" #-} vectorise doCorePass CoreDoPrintCore = observe printCore doCorePass (CoreDoRuleCheck phase pat) = ruleCheckPass phase pat doCorePass CoreDoNothing = return doCorePass (CoreDoPasses passes) = runCorePasses passes #ifdef GHCI doCorePass (CoreDoPluginPass _ pass) = {-# SCC "Plugin" #-} pass #endif doCorePass pass = pprPanic "doCorePass" (ppr pass) {- ************************************************************************ * * \subsection{Core pass combinators} * * ************************************************************************ -} printCore :: DynFlags -> CoreProgram -> IO () printCore dflags binds = Err.dumpIfSet dflags True "Print Core" (pprCoreBindings binds) ruleCheckPass :: CompilerPhase -> String -> ModGuts -> CoreM ModGuts ruleCheckPass current_phase pat guts = withTiming getDynFlags (text "RuleCheck"<+>brackets (ppr $ mg_module guts)) (const ()) $ do { rb <- getRuleBase ; dflags <- getDynFlags ; vis_orphs <- getVisibleOrphanMods ; liftIO $ log_action dflags dflags NoReason Err.SevDump noSrcSpan defaultDumpStyle (ruleCheckProgram current_phase pat (RuleEnv rb vis_orphs) (mg_binds guts)) ; return guts } doPassDUM :: (DynFlags -> UniqSupply -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts doPassDUM do_pass = doPassM $ \binds -> do dflags <- getDynFlags us <- getUniqueSupplyM liftIO $ do_pass dflags us binds doPassDM :: (DynFlags -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts doPassDM do_pass = doPassDUM (\dflags -> const (do_pass dflags)) doPassD :: (DynFlags -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts doPassD do_pass = doPassDM (\dflags -> return . do_pass dflags) doPassDU :: (DynFlags -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts doPassDU do_pass = doPassDUM (\dflags us -> return . do_pass dflags us) doPassU :: (UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts doPassU do_pass = doPassDU (const do_pass) doPassDFM :: (DynFlags -> FamInstEnvs -> CoreProgram -> IO CoreProgram) -> ModGuts -> CoreM ModGuts doPassDFM do_pass guts = do dflags <- getDynFlags p_fam_env <- getPackageFamInstEnv let fam_envs = (p_fam_env, mg_fam_inst_env guts) doPassM (liftIO . do_pass dflags fam_envs) guts doPassDFU :: (DynFlags -> FamInstEnvs -> UniqSupply -> CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts doPassDFU do_pass guts = do dflags <- getDynFlags us <- getUniqueSupplyM p_fam_env <- getPackageFamInstEnv let fam_envs = (p_fam_env, mg_fam_inst_env guts) doPass (do_pass dflags fam_envs us) guts -- Most passes return no stats and don't change rules: these combinators -- let us lift them to the full blown ModGuts+CoreM world doPassM :: Monad m => (CoreProgram -> m CoreProgram) -> ModGuts -> m ModGuts doPassM bind_f guts = do binds' <- bind_f (mg_binds guts) return (guts { mg_binds = binds' }) doPass :: (CoreProgram -> CoreProgram) -> ModGuts -> CoreM ModGuts doPass bind_f guts = return $ guts { mg_binds = bind_f (mg_binds guts) } -- Observer passes just peek; don't modify the bindings at all observe :: (DynFlags -> CoreProgram -> IO a) -> ModGuts -> CoreM ModGuts observe do_pass = doPassM $ \binds -> do dflags <- getDynFlags _ <- liftIO $ do_pass dflags binds return binds {- ************************************************************************ * * Gentle simplification * * ************************************************************************ -} simplifyExpr :: DynFlags -- includes spec of what core-to-core passes to do -> CoreExpr -> IO CoreExpr -- simplifyExpr is called by the driver to simplify an -- expression typed in at the interactive prompt -- -- Also used by Template Haskell simplifyExpr dflags expr = withTiming (pure dflags) (text "Simplify [expr]") (const ()) $ do { ; us <- mkSplitUniqSupply 's' ; let sz = exprSize expr ; (expr', counts) <- initSmpl dflags emptyRuleEnv emptyFamInstEnvs us sz (simplExprGently (simplEnvForGHCi dflags) expr) ; Err.dumpIfSet dflags (dopt Opt_D_dump_simpl_stats dflags) "Simplifier statistics" (pprSimplCount counts) ; Err.dumpIfSet_dyn dflags Opt_D_dump_simpl "Simplified expression" (pprCoreExpr expr') ; return expr' } simplExprGently :: SimplEnv -> CoreExpr -> SimplM CoreExpr -- Simplifies an expression -- does occurrence analysis, then simplification -- and repeats (twice currently) because one pass -- alone leaves tons of crud. -- Used (a) for user expressions typed in at the interactive prompt -- (b) the LHS and RHS of a RULE -- (c) Template Haskell splices -- -- The name 'Gently' suggests that the SimplifierMode is SimplGently, -- and in fact that is so.... but the 'Gently' in simplExprGently doesn't -- enforce that; it just simplifies the expression twice -- It's important that simplExprGently does eta reduction; see -- Note [Simplifying the left-hand side of a RULE] above. The -- simplifier does indeed do eta reduction (it's in Simplify.completeLam) -- but only if -O is on. simplExprGently env expr = do expr1 <- simplExpr env (occurAnalyseExpr expr) simplExpr env (occurAnalyseExpr expr1) {- ************************************************************************ * * \subsection{The driver for the simplifier} * * ************************************************************************ -} simplifyPgm :: CoreToDo -> ModGuts -> CoreM ModGuts simplifyPgm pass guts = do { hsc_env <- getHscEnv ; us <- getUniqueSupplyM ; rb <- getRuleBase ; liftIOWithCount $ simplifyPgmIO pass hsc_env us rb guts } simplifyPgmIO :: CoreToDo -> HscEnv -> UniqSupply -> RuleBase -> ModGuts -> IO (SimplCount, ModGuts) -- New bindings simplifyPgmIO pass@(CoreDoSimplify max_iterations mode) hsc_env us hpt_rule_base guts@(ModGuts { mg_module = this_mod , mg_rdr_env = rdr_env , mg_deps = deps , mg_binds = binds, mg_rules = rules , mg_fam_inst_env = fam_inst_env }) = do { (termination_msg, it_count, counts_out, guts') <- do_iteration us 1 [] binds rules ; Err.dumpIfSet dflags (dopt Opt_D_verbose_core2core dflags && dopt Opt_D_dump_simpl_stats dflags) "Simplifier statistics for following pass" (vcat [text termination_msg <+> text "after" <+> ppr it_count <+> text "iterations", blankLine, pprSimplCount counts_out]) ; return (counts_out, guts') } where dflags = hsc_dflags hsc_env print_unqual = mkPrintUnqualified dflags rdr_env simpl_env = mkSimplEnv mode active_rule = activeRule simpl_env do_iteration :: UniqSupply -> Int -- Counts iterations -> [SimplCount] -- Counts from earlier iterations, reversed -> CoreProgram -- Bindings in -> [CoreRule] -- and orphan rules -> IO (String, Int, SimplCount, ModGuts) do_iteration us iteration_no counts_so_far binds rules -- iteration_no is the number of the iteration we are -- about to begin, with '1' for the first | iteration_no > max_iterations -- Stop if we've run out of iterations = WARN( debugIsOn && (max_iterations > 2) , hang (text "Simplifier bailing out after" <+> int max_iterations <+> text "iterations" <+> (brackets $ hsep $ punctuate comma $ map (int . simplCountN) (reverse counts_so_far))) 2 (text "Size =" <+> ppr (coreBindsStats binds))) -- Subtract 1 from iteration_no to get the -- number of iterations we actually completed return ( "Simplifier baled out", iteration_no - 1 , totalise counts_so_far , guts { mg_binds = binds, mg_rules = rules } ) -- Try and force thunks off the binds; significantly reduces -- space usage, especially with -O. JRS, 000620. | let sz = coreBindsSize binds , () <- sz `seq` () -- Force it = do { -- Occurrence analysis let { -- Note [Vectorisation declarations and occurrences] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- During the 'InitialPhase' (i.e., before vectorisation), we need to make sure -- that the right-hand sides of vectorisation declarations are taken into -- account during occurrence analysis. After the 'InitialPhase', we need to ensure -- that the binders representing variable vectorisation declarations are kept alive. -- (In contrast to automatically vectorised variables, their unvectorised versions -- don't depend on them.) vectVars = mkVarSet $ catMaybes [ fmap snd $ lookupVarEnv (vectInfoVar (mg_vect_info guts)) bndr | Vect bndr _ <- mg_vect_decls guts] ++ catMaybes [ fmap snd $ lookupVarEnv (vectInfoVar (mg_vect_info guts)) bndr | bndr <- bindersOfBinds binds] -- FIXME: This second comprehensions is only needed as long as we -- have vectorised bindings where we get "Could NOT call -- vectorised from original version". ; (maybeVects, maybeVectVars) = case sm_phase mode of InitialPhase -> (mg_vect_decls guts, vectVars) _ -> ([], vectVars) ; tagged_binds = {-# SCC "OccAnal" #-} occurAnalysePgm this_mod active_rule rules maybeVects maybeVectVars binds } ; Err.dumpIfSet_dyn dflags Opt_D_dump_occur_anal "Occurrence analysis" (pprCoreBindings tagged_binds); -- Get any new rules, and extend the rule base -- See Note [Overall plumbing for rules] in Rules.hs -- We need to do this regularly, because simplification can -- poke on IdInfo thunks, which in turn brings in new rules -- behind the scenes. Otherwise there's a danger we'll simply -- miss the rules for Ids hidden inside imported inlinings eps <- hscEPS hsc_env ; let { rule_base1 = unionRuleBase hpt_rule_base (eps_rule_base eps) ; rule_base2 = extendRuleBaseList rule_base1 rules ; fam_envs = (eps_fam_inst_env eps, fam_inst_env) ; vis_orphs = this_mod : dep_orphs deps } ; -- Simplify the program ((binds1, rules1), counts1) <- initSmpl dflags (mkRuleEnv rule_base2 vis_orphs) fam_envs us1 sz $ do { env1 <- {-# SCC "SimplTopBinds" #-} simplTopBinds simpl_env tagged_binds -- Apply the substitution to rules defined in this module -- for imported Ids. Eg RULE map my_f = blah -- If we have a substitution my_f :-> other_f, we'd better -- apply it to the rule to, or it'll never match ; rules1 <- simplRules env1 Nothing rules ; return (getFloatBinds env1, rules1) } ; -- Stop if nothing happened; don't dump output if isZeroSimplCount counts1 then return ( "Simplifier reached fixed point", iteration_no , totalise (counts1 : counts_so_far) -- Include "free" ticks , guts { mg_binds = binds1, mg_rules = rules1 } ) else do { -- Short out indirections -- We do this *after* at least one run of the simplifier -- because indirection-shorting uses the export flag on *occurrences* -- and that isn't guaranteed to be ok until after the first run propagates -- stuff from the binding site to its occurrences -- -- ToDo: alas, this means that indirection-shorting does not happen at all -- if the simplifier does nothing (not common, I know, but unsavoury) let { binds2 = {-# SCC "ZapInd" #-} shortOutIndirections binds1 } ; -- Dump the result of this iteration dump_end_iteration dflags print_unqual iteration_no counts1 binds2 rules1 ; lintPassResult hsc_env pass binds2 ; -- Loop do_iteration us2 (iteration_no + 1) (counts1:counts_so_far) binds2 rules1 } } | otherwise = panic "do_iteration" where (us1, us2) = splitUniqSupply us -- Remember the counts_so_far are reversed totalise :: [SimplCount] -> SimplCount totalise = foldr (\c acc -> acc `plusSimplCount` c) (zeroSimplCount dflags) simplifyPgmIO _ _ _ _ _ = panic "simplifyPgmIO" ------------------- dump_end_iteration :: DynFlags -> PrintUnqualified -> Int -> SimplCount -> CoreProgram -> [CoreRule] -> IO () dump_end_iteration dflags print_unqual iteration_no counts binds rules = dumpPassResult dflags print_unqual mb_flag hdr pp_counts binds rules where mb_flag | dopt Opt_D_dump_simpl_iterations dflags = Just Opt_D_dump_simpl_iterations | otherwise = Nothing -- Show details if Opt_D_dump_simpl_iterations is on hdr = text "Simplifier iteration=" <> int iteration_no pp_counts = vcat [ text "---- Simplifier counts for" <+> hdr , pprSimplCount counts , text "---- End of simplifier counts for" <+> hdr ] {- ************************************************************************ * * Shorting out indirections * * ************************************************************************ If we have this: x_local = <expression> ...bindings... x_exported = x_local where x_exported is exported, and x_local is not, then we replace it with this: x_exported = <expression> x_local = x_exported ...bindings... Without this we never get rid of the x_exported = x_local thing. This save a gratuitous jump (from \tr{x_exported} to \tr{x_local}), and makes strictness information propagate better. This used to happen in the final phase, but it's tidier to do it here. Note [Transferring IdInfo] ~~~~~~~~~~~~~~~~~~~~~~~~~~ We want to propagage any useful IdInfo on x_local to x_exported. STRICTNESS: if we have done strictness analysis, we want the strictness info on x_local to transfer to x_exported. Hence the copyIdInfo call. RULES: we want to *add* any RULES for x_local to x_exported. Note [Messing up the exported Id's RULES] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We must be careful about discarding (obviously) or even merging the RULES on the exported Id. The example that went bad on me at one stage was this one: iterate :: (a -> a) -> a -> [a] [Exported] iterate = iterateList iterateFB c f x = x `c` iterateFB c f (f x) iterateList f x = x : iterateList f (f x) [Not exported] {-# RULES "iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x) "iterateFB" iterateFB (:) = iterateList #-} This got shorted out to: iterateList :: (a -> a) -> a -> [a] iterateList = iterate iterateFB c f x = x `c` iterateFB c f (f x) iterate f x = x : iterate f (f x) {-# RULES "iterate" forall f x. iterate f x = build (\c _n -> iterateFB c f x) "iterateFB" iterateFB (:) = iterate #-} And now we get an infinite loop in the rule system iterate f x -> build (\cn -> iterateFB c f x) -> iterateFB (:) f x -> iterate f x Old "solution": use rule switching-off pragmas to get rid of iterateList in the first place But in principle the user *might* want rules that only apply to the Id he says. And inline pragmas are similar {-# NOINLINE f #-} f = local local = <stuff> Then we do not want to get rid of the NOINLINE. Hence hasShortableIdinfo. Note [Rules and indirection-zapping] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Problem: what if x_exported has a RULE that mentions something in ...bindings...? Then the things mentioned can be out of scope! Solution a) Make sure that in this pass the usage-info from x_exported is available for ...bindings... b) If there are any such RULES, rec-ify the entire top-level. It'll get sorted out next time round Other remarks ~~~~~~~~~~~~~ If more than one exported thing is equal to a local thing (i.e., the local thing really is shared), then we do one only: \begin{verbatim} x_local = .... x_exported1 = x_local x_exported2 = x_local ==> x_exported1 = .... x_exported2 = x_exported1 \end{verbatim} We rely on prior eta reduction to simplify things like \begin{verbatim} x_exported = /\ tyvars -> x_local tyvars ==> x_exported = x_local \end{verbatim} Hence,there's a possibility of leaving unchanged something like this: \begin{verbatim} x_local = .... x_exported1 = x_local Int \end{verbatim} By the time we've thrown away the types in STG land this could be eliminated. But I don't think it's very common and it's dangerous to do this fiddling in STG land because we might elminate a binding that's mentioned in the unfolding for something. Note [Indirection zapping and ticks] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Unfortunately this is another place where we need a special case for ticks. The following happens quite regularly: x_local = <expression> x_exported = tick<x> x_local Which we want to become: x_exported = tick<x> <expression> As it makes no sense to keep the tick and the expression on separate bindings. Note however that that this might increase the ticks scoping over the execution of x_local, so we can only do this for floatable ticks. More often than not, other references will be unfoldings of x_exported, and therefore carry the tick anyway. -} type IndEnv = IdEnv (Id, [Tickish Var]) -- Maps local_id -> exported_id, ticks shortOutIndirections :: CoreProgram -> CoreProgram shortOutIndirections binds | isEmptyVarEnv ind_env = binds | no_need_to_flatten = binds' -- See Note [Rules and indirect-zapping] | otherwise = [Rec (flattenBinds binds')] -- for this no_need_to_flatten stuff where ind_env = makeIndEnv binds -- These exported Ids are the subjects of the indirection-elimination exp_ids = map fst $ varEnvElts ind_env exp_id_set = mkVarSet exp_ids no_need_to_flatten = all (null . ruleInfoRules . idSpecialisation) exp_ids binds' = concatMap zap binds zap (NonRec bndr rhs) = [NonRec b r | (b,r) <- zapPair (bndr,rhs)] zap (Rec pairs) = [Rec (concatMap zapPair pairs)] zapPair (bndr, rhs) | bndr `elemVarSet` exp_id_set = [] | Just (exp_id, ticks) <- lookupVarEnv ind_env bndr = [(transferIdInfo exp_id bndr, mkTicks ticks rhs), (bndr, Var exp_id)] | otherwise = [(bndr,rhs)] makeIndEnv :: [CoreBind] -> IndEnv makeIndEnv binds = foldr add_bind emptyVarEnv binds where add_bind :: CoreBind -> IndEnv -> IndEnv add_bind (NonRec exported_id rhs) env = add_pair (exported_id, rhs) env add_bind (Rec pairs) env = foldr add_pair env pairs add_pair :: (Id,CoreExpr) -> IndEnv -> IndEnv add_pair (exported_id, exported) env | (ticks, Var local_id) <- stripTicksTop tickishFloatable exported , shortMeOut env exported_id local_id = extendVarEnv env local_id (exported_id, ticks) add_pair _ env = env ----------------- shortMeOut :: IndEnv -> Id -> Id -> Bool shortMeOut ind_env exported_id local_id -- The if-then-else stuff is just so I can get a pprTrace to see -- how often I don't get shorting out because of IdInfo stuff = if isExportedId exported_id && -- Only if this is exported isLocalId local_id && -- Only if this one is defined in this -- module, so that we *can* change its -- binding to be the exported thing! not (isExportedId local_id) && -- Only if this one is not itself exported, -- since the transformation will nuke it not (local_id `elemVarEnv` ind_env) -- Only if not already substituted for then if hasShortableIdInfo exported_id then True -- See Note [Messing up the exported Id's IdInfo] else WARN( True, text "Not shorting out:" <+> ppr exported_id ) False else False ----------------- hasShortableIdInfo :: Id -> Bool -- True if there is no user-attached IdInfo on exported_id, -- so we can safely discard it -- See Note [Messing up the exported Id's IdInfo] hasShortableIdInfo id = isEmptyRuleInfo (ruleInfo info) && isDefaultInlinePragma (inlinePragInfo info) && not (isStableUnfolding (unfoldingInfo info)) where info = idInfo id ----------------- transferIdInfo :: Id -> Id -> Id -- See Note [Transferring IdInfo] -- If we have -- lcl_id = e; exp_id = lcl_id -- and lcl_id has useful IdInfo, we don't want to discard it by going -- gbl_id = e; lcl_id = gbl_id -- Instead, transfer IdInfo from lcl_id to exp_id -- Overwriting, rather than merging, seems to work ok. transferIdInfo exported_id local_id = modifyIdInfo transfer exported_id where local_info = idInfo local_id transfer exp_info = exp_info `setStrictnessInfo` strictnessInfo local_info `setUnfoldingInfo` unfoldingInfo local_info `setInlinePragInfo` inlinePragInfo local_info `setRuleInfo` addRuleInfo (ruleInfo exp_info) new_info new_info = setRuleInfoHead (idName exported_id) (ruleInfo local_info) -- Remember to set the function-name field of the -- rules as we transfer them from one function to another
tjakway/ghcjvm
compiler/simplCore/SimplCore.hs
bsd-3-clause
41,072
2
22
13,888
5,493
2,953
2,540
450
7
{-# LANGUAGE QuasiQuotes #-} module HarmLang.QuasiQuoter where import qualified Language.Haskell.TH as TH import qualified Control.Applicative as Ctrl import Language.Haskell.TH.Quote import Text.ParserCombinators.Parsec import Data.Generics import HarmLang.Parser import HarmLang.Types import HarmLang.Expression parserMap :: GenParser Char st a -> (a -> b) -> GenParser Char st b parserMap parser f = f Ctrl.<$> parser hlParse :: GenParser Char st HLExp hlParse = try (parserMap parsePitchClassSingle ExpPitchClass) <|> try (parserMap parsePitchSingle ExpPitch) <|> try (parserMap parseTimedChordSingle ExpTimedChord) <|> try (parserMap parseNoteSingle ExpNote) <|> try (parserMap parseChordSingle ExpChord) <|> try (parserMap parsePitchProgression ExpPitchProgression) <|> try (parserMap parseChordProgression ExpChordProgression) <|> try (parserMap parseNoteProgression ExpNoteProgression) <|> try (parserMap parseTimedChordProgression ExpTimedChordProgression) <|> try (parserMap parseIntervalSingle ExpInterval) <|> -- TODO: stop being sneaky ;) -- also, add parseIntervalProgression -- and pitch class progressions error "Invalid HarmLang expression." ------------------ --The following takes an HLExp and converts it to an ExpQ by unpacking the inner HL type. hlExpToExpQ :: HLExp -> TH.ExpQ hlExpToExpQ (ExpPitchClass a) = makeHlExpQ a hlExpToExpQ (ExpInterval a) = makeHlExpQ a hlExpToExpQ (ExpPitch a) = makeHlExpQ a hlExpToExpQ (ExpTimedChord a) = makeHlExpQ a hlExpToExpQ (ExpNote a) = makeHlExpQ a hlExpToExpQ (ExpChord a) = makeHlExpQ a hlExpToExpQ (ExpPitchProgression a) = makeHlExpQ a hlExpToExpQ (ExpChordProgression a) = makeHlExpQ a hlExpToExpQ (ExpTimedChordProgression a) = makeHlExpQ a hlExpToExpQ (ExpNoteProgression a) = makeHlExpQ a --HLExp to PatQ hlExpToPatQ :: HLExp -> TH.PatQ hlExpToPatQ (ExpPitchClass a) = makeHlPatQ a hlExpToPatQ (ExpInterval a) = makeHlPatQ a hlExpToPatQ (ExpPitch a) = makeHlPatQ a hlExpToPatQ (ExpTimedChord a) = makeHlPatQ a hlExpToPatQ (ExpNote a) = makeHlPatQ a hlExpToPatQ (ExpChord a) = makeHlPatQ a hlExpToPatQ (ExpPitchProgression a) = makeHlPatQ a hlExpToPatQ (ExpChordProgression a) = makeHlPatQ a hlExpToPatQ (ExpTimedChordProgression a) = makeHlPatQ a hlExpToPatQ (ExpNoteProgression a) = makeHlPatQ a --Type driven hacks makeHlExpQ :: Data a => a -> TH.ExpQ makeHlExpQ = dataToExpQ (\ a -> Nothing) makeHlPatQ :: Data a => a -> TH.PatQ makeHlPatQ = dataToPatQ (\ a -> Nothing) -----------OK the bad stuff is over. hl :: QuasiQuoter hl = QuasiQuoter { quoteExp = quoteHLExp, quotePat = quoteHLPat } hlParseMonad :: Monad m => (String, Int, Int) -> String -> m HLExp hlParseMonad (file, line, col) s = case runParser p () "" s of Left err -> fail $ show err Right e -> return e where p = do pos <- getPosition setPosition $ (flip setSourceName) file $ (flip setSourceLine) line $ (flip setSourceColumn) col $ pos spaces e <- hlParse eof return e quoteHLExp :: String -> TH.ExpQ quoteHLExp s = do loc <- TH.location let pos = (TH.loc_filename loc, fst (TH.loc_start loc), snd (TH.loc_start loc)) hlExp <- hlParseMonad pos s hlExpToExpQ hlExp --dataToExpQ (const Nothing `extQ` defnothing) hlExp quoteHLPat :: String -> TH.PatQ quoteHLPat s = do loc <- TH.location let pos = (TH.loc_filename loc, fst (TH.loc_start loc), snd (TH.loc_start loc)) hlExp <- hlParseMonad pos s hlExpToPatQ hlExp -- Just for testing hlInterpret :: String -> HLExp hlInterpret = let checkResult (Right var) = var checkResult _ = error "Invalid expression." in checkResult . (parse hlParse "")
lrassaby/harmlang
src/HarmLang/QuasiQuoter.hs
mit
4,115
0
17
1,076
1,153
573
580
90
2
{-# Language RebindableSyntax #-} {-# Language TypeOperators #-} {-# Language FlexibleContexts #-} {-# Language ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-name-shadowing #-} {-# OPTIONS_GHC -fno-warn-unused-do-bind #-} module PingMulti00 where import Prelude hiding ((>>=), (>>), fail, return) import Symmetry.Language import Symmetry.Verify pingServer :: (DSL repr) => repr (Pid RSing -> Process repr ()) pingServer = lam $ \_ -> return tt master :: (DSL repr) => repr (RMulti -> Int -> Process repr ()) master = lam $ \r -> lam $ \n -> do myPid <- self ps <- spawnMany r n (app pingServer myPid) doMany "loop_0" ps (lam $ \p -> send p tt) return tt mainProc :: (DSL repr) => repr (Int -> ()) mainProc = lam $ \n -> exec $ do r <- newRMulti app (app master r) n main :: IO () main = checkerMain (arb |> mainProc)
abakst/symmetry
checker/tests/pos/PingMultiBaby01.hs
mit
924
0
15
205
306
164
142
24
1
module Model where import Prelude import Yesod import Data.Text (Text,pack) import Database.Persist.Quasi import Data.Time -- You can define all of your database entities in the entities file. -- You can find more information on persistent and how to declare entities -- at: -- http://www.yesodweb.com/book/persistent/ share [mkPersist sqlSettings, mkMigrate "migrateAll"] $(persistFileWith lowerCaseSettings "config/models") defaultVotes :: Int defaultVotes = 3 defaultVotesText :: Text defaultVotesText = pack $ show defaultVotes remainingVotesKey :: HackDayId -> Text remainingVotesKey hackDayId = pack $ "remainingVotes" ++ show hackDayId hackDayVotingOpen :: HackDay -> Bool hackDayVotingOpen hackday = not $ hackDayVotingClosed hackday
MaxGabriel/hackvote-yesod
Model.hs
cc0-1.0
751
0
8
102
146
80
66
-1
-1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.Glacier.SetVaultNotifications -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- This operation configures notifications that will be sent when specific -- events happen to a vault. By default, you don\'t get any notifications. -- -- To configure vault notifications, send a PUT request to the -- 'notification-configuration' subresource of the vault. The request -- should include a JSON document that provides an Amazon SNS topic and -- specific events for which you want Amazon Glacier to send notifications -- to the topic. -- -- Amazon SNS topics must grant permission to the vault to be allowed to -- publish notifications to the topic. You can configure a vault to publish -- a notification for the following vault events: -- -- - __ArchiveRetrievalCompleted__ This event occurs when a job that was -- initiated for an archive retrieval is completed (InitiateJob). The -- status of the completed job can be \"Succeeded\" or \"Failed\". The -- notification sent to the SNS topic is the same output as returned -- from DescribeJob. -- - __InventoryRetrievalCompleted__ This event occurs when a job that -- was initiated for an inventory retrieval is completed (InitiateJob). -- The status of the completed job can be \"Succeeded\" or \"Failed\". -- The notification sent to the SNS topic is the same output as -- returned from DescribeJob. -- -- An AWS account has full permission to perform all operations (actions). -- However, AWS Identity and Access Management (IAM) users don\'t have any -- permissions by default. You must grant them explicit permission to -- perform specific actions. For more information, see -- <http://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html Access Control Using AWS Identity and Access Management (IAM)>. -- -- For conceptual information and underlying REST API, go to -- <http://docs.aws.amazon.com/amazonglacier/latest/dev/configuring-notifications.html Configuring Vault Notifications in Amazon Glacier> -- and -- <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-vault-notifications-put.html Set Vault Notification Configuration> -- in the /Amazon Glacier Developer Guide/. -- -- /See:/ <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-SetVaultNotifications.html AWS API Reference> for SetVaultNotifications. module Network.AWS.Glacier.SetVaultNotifications ( -- * Creating a Request setVaultNotifications , SetVaultNotifications -- * Request Lenses , svnVaultNotificationConfig , svnAccountId , svnVaultName -- * Destructuring the Response , setVaultNotificationsResponse , SetVaultNotificationsResponse ) where import Network.AWS.Glacier.Types import Network.AWS.Glacier.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | Provides options to configure notifications that will be sent when -- specific events happen to a vault. -- -- /See:/ 'setVaultNotifications' smart constructor. data SetVaultNotifications = SetVaultNotifications' { _svnVaultNotificationConfig :: !(Maybe VaultNotificationConfig) , _svnAccountId :: !Text , _svnVaultName :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'SetVaultNotifications' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'svnVaultNotificationConfig' -- -- * 'svnAccountId' -- -- * 'svnVaultName' setVaultNotifications :: Text -- ^ 'svnAccountId' -> Text -- ^ 'svnVaultName' -> SetVaultNotifications setVaultNotifications pAccountId_ pVaultName_ = SetVaultNotifications' { _svnVaultNotificationConfig = Nothing , _svnAccountId = pAccountId_ , _svnVaultName = pVaultName_ } -- | Provides options for specifying notification configuration. svnVaultNotificationConfig :: Lens' SetVaultNotifications (Maybe VaultNotificationConfig) svnVaultNotificationConfig = lens _svnVaultNotificationConfig (\ s a -> s{_svnVaultNotificationConfig = a}); -- | The 'AccountId' value is the AWS account ID of the account that owns the -- vault. You can either specify an AWS account ID or optionally a single -- apos'-'apos (hyphen), in which case Amazon Glacier uses the AWS account -- ID associated with the credentials used to sign the request. If you use -- an account ID, do not include any hyphens (apos-apos) in the ID. svnAccountId :: Lens' SetVaultNotifications Text svnAccountId = lens _svnAccountId (\ s a -> s{_svnAccountId = a}); -- | The name of the vault. svnVaultName :: Lens' SetVaultNotifications Text svnVaultName = lens _svnVaultName (\ s a -> s{_svnVaultName = a}); instance AWSRequest SetVaultNotifications where type Rs SetVaultNotifications = SetVaultNotificationsResponse request = putJSON glacier response = receiveNull SetVaultNotificationsResponse' instance ToHeaders SetVaultNotifications where toHeaders = const mempty instance ToJSON SetVaultNotifications where toJSON SetVaultNotifications'{..} = object (catMaybes [("vaultNotificationConfig" .=) <$> _svnVaultNotificationConfig]) instance ToPath SetVaultNotifications where toPath SetVaultNotifications'{..} = mconcat ["/", toBS _svnAccountId, "/vaults/", toBS _svnVaultName, "/notification-configuration"] instance ToQuery SetVaultNotifications where toQuery = const mempty -- | /See:/ 'setVaultNotificationsResponse' smart constructor. data SetVaultNotificationsResponse = SetVaultNotificationsResponse' deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'SetVaultNotificationsResponse' with the minimum fields required to make a request. -- setVaultNotificationsResponse :: SetVaultNotificationsResponse setVaultNotificationsResponse = SetVaultNotificationsResponse'
fmapfmapfmap/amazonka
amazonka-glacier/gen/Network/AWS/Glacier/SetVaultNotifications.hs
mpl-2.0
6,663
0
11
1,216
572
357
215
74
1
{-| Module : Logic Description : Mathematical logic module for the MPL DSL Copyright : (c) Rohit Jha, 2015 License : BSD2 Maintainer : rohit305jha@gmail.com Stability : Stable Functionality for: * AND * OR * NOT * XOR * NAND * NOR * Logical Implication * Logical Equality * Operators for the above * Operations on list for the above -} module Logic ( and', or', xor, xnor, nand, nor, equals, implies, (/\), (\/), (==>), (<=>), notL, andL, orL, xorL, xnorL, nandL, norL ) where {-| The 'and'' function is for the binary AND operation. For example: >>> let a = True >>> let b = False >>> c = and' a b c = False >>> True `and'` False False -} and' :: Bool -> Bool -> Bool and' a b = a && b {-| The 'or'' function is for the binary OR operation. For example: >>> let a = True >>> let b = False >>> let c = or a b c = True >>> True `or'` False True -} or' :: Bool -> Bool -> Bool or' a b = a || b {-| The 'xor' function is for the binary XOR operation. For example: >>> let a = True >>> let b = False >>> let c = xor a b c = True >>> True `xor` False True -} xor :: Bool -> Bool -> Bool xor a b | a == b = False | otherwise = True {-| The 'xnor' function is for the binary XNOR operation. For example: >>> xnor True False False >>> xnor True True True >>> xnor False True False >>> False `xnor` False True -} xnor :: Bool -> Bool -> Bool xnor a b = not (xor a b) {-| The 'nand' function is for the binary NAND operation. For example: >>> let a = True >>> let b = False >>> let c = nand a b c = True >>> True `nand` False True -} nand :: Bool -> Bool -> Bool nand a b = not (a && b) {-| The 'nor' function is for the binary NOR operation. For example: >>> let a = True >>> let b = False >>> let c = nor a b c = False >>> True `nor` False False -} nor :: Bool -> Bool -> Bool nor a b = not (a || b) {-| The 'equals' function checks for binary logical equality. For example: >>> let a = True >>> let b = False >>> let c = equals a b c = False >>> True `equals` True True -} equals :: Bool -> Bool -> Bool equals a b = a == b {-| The 'implies' function checks for binart logical implication. For example: >>> let a = True >>> let b = False >>> let c = implies a b c = False >>> True `implies` False False -} implies :: Bool -> Bool -> Bool implies a b | a && not b = False | otherwise = True {-| The '/\' operator is the binary AND operator. For example: >>> True /\ True True >>> True /\ False False >>> False /\ True False >>> False /\ False False -} (/\) :: Bool -> Bool -> Bool a /\ b = a && b {-| The '\/' operator is the binary OR operator. For example: >>> True \/ True True >>> True \/ False True >>> False \/ True True >>> False \/ False False -} (\/) :: Bool -> Bool -> Bool a \/ b = a || b {-| The '==>' operator is the binary implication operator. This operator can be used instead of the 'implies' function. For example: >>> True ==> False False >>> False ==> False True >>> False ==> True True >>> True ==> True True -} (==>) :: Bool -> Bool -> Bool a ==> b = implies a b {-| The '<=>' operator is the binary equality operator. For example: >>> True <=> True True >>> True <=> False False >>> False <=> True False >>> False <=> False True -} (<=>) :: Bool -> Bool -> Bool a <=> b = a == b {-| The 'notL' function is a unary NOT function that is applied to each element of a Bool list. For example: >>> notL [True, True, False] [False,False,True] >>> notL [True, False, True, True, False] [False,True,False,False,True] >>> notL [True] [False] >>> notL [] [] -} notL :: [Bool] -> [Bool] notL = map not {-| The 'andL' function is an AND operator that is applied on all elements of a Bool list. For example: >>> andL [True, False, True, True, False] False >>> andL [True, True] True >>> andL [False] False -} andL :: [Bool] -> Bool andL = and {-| The 'orL' function is an OR operator that is applied on all elements of a Bool list. For example: >>> orL [True, False, True, True, False] True >>> orL [True, False] True >>> orL [False, False] False -} orL :: [Bool] -> Bool orL = or {-| The 'xorL' function is an XOR operator that is applied on all the elements of a Bool list. For example: >>> xorL [True, False, True, True, False] True >>> xorL [False, False] False -} xorL :: [Bool] -> Bool xorL = foldl1 xor {-| The 'nandL' function is a NAND operator that is applied on all the elements of a Bool list. For example: >>> nandL [True, False, True, True, False] True >>> nandL [False, False] True -} nandL :: [Bool] -> Bool nandL = foldl1 nand {-| The 'norL' function is a NOR operator that is applied on all the elements of a Bool list. For example: >>> norL [True, False, True, True, False] True >>> norL [False, False] True -} norL :: [Bool] -> Bool norL = foldl1 nor {-| The 'xnorL' function is an XNOR operator that is applied on all the elements of a Bool list. For example: >>> xnorL [True, False, True, True, False] True >>> xnorL [False, False] True -} xnorL :: [Bool] -> Bool xnorL = foldl1 xnor
rohitjha/DiMPL
src/Logic.hs
bsd-2-clause
5,930
0
9
2,048
617
345
272
63
1
{-| Implementation of command-line functions. This module holds the common command-line related functions for the binaries, separated into this module since "Ganeti.Utils" is used in many other places and this is more IO oriented. -} {- Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} module Ganeti.HTools.CLI ( Options(..) , OptType , defaultOptions , Ganeti.HTools.CLI.parseOpts , parseOptsInner , parseYesNo , parseISpecString , shTemplate , maybeSaveCommands , maybePrintNodes , maybePrintInsts , maybeShowWarnings , printKeys , printFinal , setNodeStatus -- * The options , oDataFile , oDiskMoves , oAvoidDiskMoves , oDiskTemplate , oSpindleUse , oDynuFile , oMonD , oMonDDataFile , oMonDXen , oEvacMode , oMonDExitMissing , oFirstJobGroup , oRestrictedMigrate , oExInst , oExTags , oExecJobs , oForce , oFullEvacuation , oGroup , oIAllocSrc , oIgnoreDyn , oIgnoreNonRedundant , oIgnoreSoftErrors , oIndependentGroups , oAcceptExisting , oInstMoves , oJobDelay , genOLuxiSocket , oLuxiSocket , oMachineReadable , oMaxCpu , oMaxSolLength , oMinDisk , oMinGain , oMinGainLim , oMinResources , oMinScore , oNoHeaders , oNoSimulation , oNodeSim , oNodeTags , oOfflineMaintenance , oOfflineNode , oOneStepOnly , oOutputDir , oPrintCommands , oPrintInsts , oPrintMoves , oPrintNodes , oQuiet , oRapiMaster , oReason , oRestrictToNodes , oSaveCluster , oSelInst , oShowHelp , oShowVer , oShowComp , oSkipNonRedundant , oStdSpec , oTargetResources , oTieredSpec , oVerbose , oPriority , oNoCapacityChecks , genericOpts ) where import Control.Monad import Data.Char (toUpper) import Data.Maybe (fromMaybe) import System.Console.GetOpt import System.IO import Text.Printf (printf) import qualified Ganeti.HTools.Container as Container import qualified Ganeti.HTools.Node as Node import qualified Ganeti.Path as Path import Ganeti.HTools.Types import Ganeti.BasicTypes import Ganeti.Common as Common import Ganeti.Types import Ganeti.Utils -- * Data types -- | Command line options structure. data Options = Options { optDataFile :: Maybe FilePath -- ^ Path to the cluster data file , optDiskMoves :: Bool -- ^ Allow disk moves , optAvoidDiskMoves :: Double -- ^ Allow only disk moves improving -- cluster score in more than -- optAvoidDiskMoves times , optInstMoves :: Bool -- ^ Allow instance moves , optDiskTemplate :: Maybe DiskTemplate -- ^ Override for the disk template , optSpindleUse :: Maybe Int -- ^ Override for the spindle usage , optDynuFile :: Maybe FilePath -- ^ Optional file with dynamic use data , optIgnoreDynu :: Bool -- ^ Do not use dynamic use data , optIgnoreSoftErrors :: Bool -- ^ Ignore soft errors in balancing moves , optIndependentGroups :: Bool -- ^ consider groups independently , optAcceptExisting :: Bool -- ^ accept existing N+1 violations , optMonD :: Bool -- ^ Query MonDs , optMonDFile :: Maybe FilePath -- ^ Optional file with data provided -- by MonDs , optMonDXen :: Bool -- ^ Should Xen-specific collectors be -- considered (only if MonD is queried) , optMonDExitMissing :: Bool -- ^ If the program should exit on missing -- MonD data , optEvacMode :: Bool -- ^ Enable evacuation mode , optRestrictedMigrate :: Bool -- ^ Disallow replace-primary moves , optExInst :: [String] -- ^ Instances to be excluded , optExTags :: Maybe [String] -- ^ Tags to use for exclusion , optExecJobs :: Bool -- ^ Execute the commands via Luxi , optFirstJobGroup :: Bool -- ^ Only execute the first group of jobs , optForce :: Bool -- ^ Force the execution , optFullEvacuation :: Bool -- ^ Fully evacuate nodes to be rebooted , optGroup :: Maybe GroupID -- ^ The UUID of the group to process , optIAllocSrc :: Maybe FilePath -- ^ The iallocation spec , optIgnoreNonRedundant :: Bool -- ^ Ignore non-redundant instances , optSelInst :: [String] -- ^ Instances to be excluded , optLuxi :: Maybe FilePath -- ^ Collect data from Luxi , optJobDelay :: Double -- ^ Delay before executing first job , optMachineReadable :: Bool -- ^ Output machine-readable format , optMaster :: String -- ^ Collect data from RAPI , optMaxLength :: Int -- ^ Stop after this many steps , optMcpu :: Maybe Double -- ^ Override max cpu ratio for nodes , optMdsk :: Double -- ^ Max disk usage ratio for nodes , optMinGain :: Score -- ^ Min gain we aim for in a step , optMinGainLim :: Score -- ^ Limit below which we apply mingain , optMinResources :: Double -- ^ Minimal resources for hsqueeze , optMinScore :: Score -- ^ The minimum score we aim for , optNoHeaders :: Bool -- ^ Do not show a header line , optNoSimulation :: Bool -- ^ Skip the rebalancing dry-run , optNodeSim :: [String] -- ^ Cluster simulation mode , optNodeTags :: Maybe [String] -- ^ List of node tags to restrict to , optOffline :: [String] -- ^ Names of offline nodes , optRestrictToNodes :: Maybe [String] -- ^ if not Nothing, restrict -- allocation to those nodes , optOfflineMaintenance :: Bool -- ^ Pretend all instances are offline , optOneStepOnly :: Bool -- ^ Only do the first step , optOutPath :: FilePath -- ^ Path to the output directory , optPrintMoves :: Bool -- ^ Whether to show the instance moves , optReason :: Maybe String -- ^ The reason to be passed when -- submitting jobs , optSaveCluster :: Maybe FilePath -- ^ Save cluster state to this file , optShowCmds :: Maybe FilePath -- ^ Whether to show the command list , optShowHelp :: Bool -- ^ Just show the help , optShowComp :: Bool -- ^ Just show the completion info , optShowInsts :: Bool -- ^ Whether to show the instance map , optShowNodes :: Maybe [String] -- ^ Whether to show node status , optShowVer :: Bool -- ^ Just show the program version , optSkipNonRedundant :: Bool -- ^ Skip nodes with non-redundant instance , optStdSpec :: Maybe RSpec -- ^ Requested standard specs , optTargetResources :: Double -- ^ Target resources for squeezing , optTestCount :: Maybe Int -- ^ Optional test count override , optTieredSpec :: Maybe RSpec -- ^ Requested specs for tiered mode , optReplay :: Maybe String -- ^ Unittests: RNG state , optVerbose :: Int -- ^ Verbosity level , optPriority :: Maybe OpSubmitPriority -- ^ OpCode submit priority , optCapacity :: Bool -- ^ Also do capacity-related checks } deriving Show -- | Default values for the command line options. defaultOptions :: Options defaultOptions = Options { optDataFile = Nothing , optDiskMoves = True , optAvoidDiskMoves = 1.0 , optInstMoves = True , optIndependentGroups = False , optAcceptExisting = False , optDiskTemplate = Nothing , optSpindleUse = Nothing , optIgnoreDynu = False , optIgnoreSoftErrors = False , optDynuFile = Nothing , optMonD = False , optMonDFile = Nothing , optMonDXen = False , optMonDExitMissing = False , optEvacMode = False , optRestrictedMigrate = False , optExInst = [] , optExTags = Nothing , optExecJobs = False , optFirstJobGroup = False , optForce = False , optFullEvacuation = False , optGroup = Nothing , optIAllocSrc = Nothing , optIgnoreNonRedundant = False , optSelInst = [] , optLuxi = Nothing , optJobDelay = 10 , optMachineReadable = False , optMaster = "" , optMaxLength = -1 , optMcpu = Nothing , optMdsk = defReservedDiskRatio , optMinGain = 1e-2 , optMinGainLim = 1e-1 , optMinResources = 2.0 , optMinScore = 1e-9 , optNoHeaders = False , optNoSimulation = False , optNodeSim = [] , optNodeTags = Nothing , optSkipNonRedundant = False , optOffline = [] , optRestrictToNodes = Nothing , optOfflineMaintenance = False , optOneStepOnly = False , optOutPath = "." , optPrintMoves = False , optReason = Nothing , optSaveCluster = Nothing , optShowCmds = Nothing , optShowHelp = False , optShowComp = False , optShowInsts = False , optShowNodes = Nothing , optShowVer = False , optStdSpec = Nothing , optTargetResources = 2.0 , optTestCount = Nothing , optTieredSpec = Nothing , optReplay = Nothing , optVerbose = 1 , optPriority = Nothing , optCapacity = True } -- | Abbreviation for the option type. type OptType = GenericOptType Options instance StandardOptions Options where helpRequested = optShowHelp verRequested = optShowVer compRequested = optShowComp requestHelp o = o { optShowHelp = True } requestVer o = o { optShowVer = True } requestComp o = o { optShowComp = True } -- * Helper functions parseISpecString :: String -> String -> Result RSpec parseISpecString descr inp = do let sp = sepSplit ',' inp err = Bad ("Invalid " ++ descr ++ " specification: '" ++ inp ++ "', expected disk,ram,cpu") when (length sp < 3 || length sp > 4) err prs <- mapM (\(fn, val) -> fn val) $ zip [ annotateResult (descr ++ " specs disk") . parseUnit , annotateResult (descr ++ " specs memory") . parseUnit , tryRead (descr ++ " specs cpus") , tryRead (descr ++ " specs spindles") ] sp case prs of {- Spindles are optional, so that they are not needed when exclusive storage is disabled. When exclusive storage is disabled, spindles are ignored, so the actual value doesn't matter. We use 1 as a default so that in case someone forgets and exclusive storage is enabled, we don't run into weird situations. -} [dsk, ram, cpu] -> return $ RSpec cpu ram dsk 1 [dsk, ram, cpu, spn] -> return $ RSpec cpu ram dsk spn _ -> err -- | Disk template choices. optComplDiskTemplate :: OptCompletion optComplDiskTemplate = OptComplChoices $ map diskTemplateToRaw [minBound..maxBound] -- * Command line options oDataFile :: OptType oDataFile = (Option "t" ["text-data"] (ReqArg (\ f o -> Ok o { optDataFile = Just f }) "FILE") "the cluster data FILE", OptComplFile) oDiskMoves :: OptType oDiskMoves = (Option "" ["no-disk-moves"] (NoArg (\ opts -> Ok opts { optDiskMoves = False})) "disallow disk moves from the list of allowed instance changes,\ \ thus allowing only the 'cheap' failover/migrate operations", OptComplNone) oAvoidDiskMoves :: OptType oAvoidDiskMoves = (Option "" ["avoid-disk-moves"] (reqWithConversion (tryRead "disk moves avoiding factor") (\f opts -> Ok opts { optAvoidDiskMoves = f }) "FACTOR") "gain in cluster metrics on each balancing step including disk moves\ \ should be FACTOR times higher than the gain after migrations in order to\ \ admit disk move during the step", OptComplFloat) oMonD :: OptType oMonD = (Option "" ["mond"] (OptArg (\ f opts -> do flag <- parseYesNo True f return $ opts { optMonD = flag }) "CHOICE") "pass either 'yes' or 'no' to query all monDs", optComplYesNo) oMonDDataFile :: OptType oMonDDataFile = (Option "" ["mond-data"] (ReqArg (\ f opts -> Ok opts { optMonDFile = Just f }) "FILE") "Import data provided by MonDs from the given FILE", OptComplFile) oMonDXen :: OptType oMonDXen = (Option "" ["mond-xen"] (NoArg (\ opts -> Ok opts { optMonDXen = True })) "also consider xen-specific collectors in MonD queries", OptComplNone) oMonDExitMissing :: OptType oMonDExitMissing = (Option "" ["exit-on-missing-mond-data"] (NoArg (\ opts -> Ok opts { optMonDExitMissing = True })) "abort if the data available from the monitoring daemons is incomplete", OptComplNone) oDiskTemplate :: OptType oDiskTemplate = (Option "" ["disk-template"] (reqWithConversion diskTemplateFromRaw (\dt opts -> Ok opts { optDiskTemplate = Just dt }) "TEMPLATE") "select the desired disk template", optComplDiskTemplate) oSpindleUse :: OptType oSpindleUse = (Option "" ["spindle-use"] (reqWithConversion (tryRead "parsing spindle-use") (\su opts -> do when (su < 0) $ fail "Invalid value of the spindle-use (expected >= 0)" return $ opts { optSpindleUse = Just su }) "SPINDLES") "select how many virtual spindle instances use\ \ [default read from cluster]", OptComplFloat) oSelInst :: OptType oSelInst = (Option "" ["select-instances"] (ReqArg (\ f opts -> Ok opts { optSelInst = sepSplit ',' f }) "INSTS") "only select given instances for any moves", OptComplManyInstances) oInstMoves :: OptType oInstMoves = (Option "" ["no-instance-moves"] (NoArg (\ opts -> Ok opts { optInstMoves = False})) "disallow instance (primary node) moves from the list of allowed,\ \ instance changes, thus allowing only slower, but sometimes\ \ safer, drbd secondary changes", OptComplNone) oDynuFile :: OptType oDynuFile = (Option "U" ["dynu-file"] (ReqArg (\ f opts -> Ok opts { optDynuFile = Just f }) "FILE") "Import dynamic utilisation data from the given FILE", OptComplFile) oIgnoreDyn :: OptType oIgnoreDyn = (Option "" ["ignore-dynu"] (NoArg (\ opts -> Ok opts {optIgnoreDynu = True})) "Ignore any dynamic utilisation information", OptComplNone) oIgnoreSoftErrors :: OptType oIgnoreSoftErrors = (Option "" ["ignore-soft-errors"] (NoArg (\ opts -> Ok opts {optIgnoreSoftErrors = True})) "Ignore any soft restrictions in balancing", OptComplNone) oIndependentGroups :: OptType oIndependentGroups = (Option "" ["independent-groups"] (NoArg (\ opts -> Ok opts {optIndependentGroups = True})) "Consider groups independently", OptComplNone) oAcceptExisting :: OptType oAcceptExisting = (Option "" ["accept-existing-errors"] (NoArg (\ opts -> Ok opts {optAcceptExisting = True})) "Accept existing N+1 violations; just don't add new ones", OptComplNone) oEvacMode :: OptType oEvacMode = (Option "E" ["evac-mode"] (NoArg (\opts -> Ok opts { optEvacMode = True })) "enable evacuation mode, where the algorithm only moves\ \ instances away from offline and drained nodes", OptComplNone) oRestrictedMigrate :: OptType oRestrictedMigrate = (Option "" ["restricted-migration"] (NoArg (\opts -> Ok opts { optRestrictedMigrate = True })) "disallow replace-primary moves (aka frf-moves); in evacuation mode, this\ \ will ensure that the only migrations are off the drained nodes", OptComplNone) oExInst :: OptType oExInst = (Option "" ["exclude-instances"] (ReqArg (\ f opts -> Ok opts { optExInst = sepSplit ',' f }) "INSTS") "exclude given instances from any moves", OptComplManyInstances) oExTags :: OptType oExTags = (Option "" ["exclusion-tags"] (ReqArg (\ f opts -> Ok opts { optExTags = Just $ sepSplit ',' f }) "TAG,...") "Enable instance exclusion based on given tag prefix", OptComplString) oExecJobs :: OptType oExecJobs = (Option "X" ["exec"] (NoArg (\ opts -> Ok opts { optExecJobs = True})) "execute the suggested moves via Luxi (only available when using\ \ it for data gathering)", OptComplNone) oReason :: OptType oReason = (Option "" ["reason"] (ReqArg (\ f opts -> Ok opts { optReason = Just f }) "REASON") "The reason to pass to the submitted jobs", OptComplNone) oFirstJobGroup :: OptType oFirstJobGroup = (Option "" ["first-job-group"] (NoArg (\ opts -> Ok opts {optFirstJobGroup = True})) "only execute the first group of jobs", OptComplNone) oForce :: OptType oForce = (Option "f" ["force"] (NoArg (\ opts -> Ok opts {optForce = True})) "force the execution of this program, even if warnings would\ \ otherwise prevent it", OptComplNone) oFullEvacuation :: OptType oFullEvacuation = (Option "" ["full-evacuation"] (NoArg (\ opts -> Ok opts { optFullEvacuation = True})) "fully evacuate the nodes to be rebooted", OptComplNone) oGroup :: OptType oGroup = (Option "G" ["group"] (ReqArg (\ f o -> Ok o { optGroup = Just f }) "ID") "the target node group (name or UUID)", OptComplOneGroup) oIAllocSrc :: OptType oIAllocSrc = (Option "I" ["ialloc-src"] (ReqArg (\ f opts -> Ok opts { optIAllocSrc = Just f }) "FILE") "Specify an iallocator spec as the cluster data source", OptComplFile) oIgnoreNonRedundant :: OptType oIgnoreNonRedundant = (Option "" ["ignore-non-redundant"] (NoArg (\ opts -> Ok opts { optIgnoreNonRedundant = True })) "Pretend that there are no non-redundant instances in the cluster", OptComplNone) oJobDelay :: OptType oJobDelay = (Option "" ["job-delay"] (reqWithConversion (tryRead "job delay") (\d opts -> Ok opts { optJobDelay = d }) "SECONDS") "insert this much delay before the execution of repair jobs\ \ to allow the tool to continue processing instances", OptComplFloat) genOLuxiSocket :: String -> OptType genOLuxiSocket defSocket = (Option "L" ["luxi"] (OptArg ((\ f opts -> Ok opts { optLuxi = Just f }) . fromMaybe defSocket) "SOCKET") ("collect data via Luxi, optionally using the given SOCKET path [" ++ defSocket ++ "]"), OptComplFile) oLuxiSocket :: IO OptType oLuxiSocket = liftM genOLuxiSocket Path.defaultQuerySocket oMachineReadable :: OptType oMachineReadable = (Option "" ["machine-readable"] (OptArg (\ f opts -> do flag <- parseYesNo True f return $ opts { optMachineReadable = flag }) "CHOICE") "enable machine readable output (pass either 'yes' or 'no' to\ \ explicitly control the flag, or without an argument defaults to\ \ yes)", optComplYesNo) oMaxCpu :: OptType oMaxCpu = (Option "" ["max-cpu"] (reqWithConversion (tryRead "parsing max-cpu") (\mcpu opts -> do when (mcpu <= 0) $ fail "Invalid value of the max-cpu ratio, expected >0" return $ opts { optMcpu = Just mcpu }) "RATIO") "maximum virtual-to-physical cpu ratio for nodes (from 0\ \ upwards) [default read from cluster]", OptComplFloat) oMaxSolLength :: OptType oMaxSolLength = (Option "l" ["max-length"] (reqWithConversion (tryRead "max solution length") (\i opts -> Ok opts { optMaxLength = i }) "N") "cap the solution at this many balancing or allocation\ \ rounds (useful for very unbalanced clusters or empty\ \ clusters)", OptComplInteger) oMinDisk :: OptType oMinDisk = (Option "" ["min-disk"] (reqWithConversion (tryRead "min free disk space") (\n opts -> Ok opts { optMdsk = n }) "RATIO") "minimum free disk space for nodes (between 0 and 1) [0]", OptComplFloat) oMinGain :: OptType oMinGain = (Option "g" ["min-gain"] (reqWithConversion (tryRead "min gain") (\g opts -> Ok opts { optMinGain = g }) "DELTA") "minimum gain to aim for in a balancing step before giving up", OptComplFloat) oMinGainLim :: OptType oMinGainLim = (Option "" ["min-gain-limit"] (reqWithConversion (tryRead "min gain limit") (\g opts -> Ok opts { optMinGainLim = g }) "SCORE") "minimum cluster score for which we start checking the min-gain", OptComplFloat) oMinResources :: OptType oMinResources = (Option "" ["minimal-resources"] (reqWithConversion (tryRead "minimal resources") (\d opts -> Ok opts { optMinResources = d}) "FACTOR") "minimal resources to be present on each in multiples of\ \ the standard allocation for not onlining standby nodes", OptComplFloat) oMinScore :: OptType oMinScore = (Option "e" ["min-score"] (reqWithConversion (tryRead "min score") (\e opts -> Ok opts { optMinScore = e }) "EPSILON") "mininum excess to the N+1 limit to aim for", OptComplFloat) oNoHeaders :: OptType oNoHeaders = (Option "" ["no-headers"] (NoArg (\ opts -> Ok opts { optNoHeaders = True })) "do not show a header line", OptComplNone) oNoSimulation :: OptType oNoSimulation = (Option "" ["no-simulation"] (NoArg (\opts -> Ok opts {optNoSimulation = True})) "do not perform rebalancing simulation", OptComplNone) oNodeSim :: OptType oNodeSim = (Option "" ["simulate"] (ReqArg (\ f o -> Ok o { optNodeSim = f:optNodeSim o }) "SPEC") "simulate an empty cluster, given as\ \ 'alloc_policy,num_nodes,disk,ram,cpu'", OptComplString) oNodeTags :: OptType oNodeTags = (Option "" ["node-tags"] (ReqArg (\ f opts -> Ok opts { optNodeTags = Just $ sepSplit ',' f }) "TAG,...") "Restrict to nodes with the given tags", OptComplString) oOfflineMaintenance :: OptType oOfflineMaintenance = (Option "" ["offline-maintenance"] (NoArg (\ opts -> Ok opts {optOfflineMaintenance = True})) "Schedule offline maintenance, i.e., pretend that all instance are\ \ offline.", OptComplNone) oOfflineNode :: OptType oOfflineNode = (Option "O" ["offline"] (ReqArg (\ n o -> Ok o { optOffline = n:optOffline o }) "NODE") "set node as offline", OptComplOneNode) oRestrictToNodes :: OptType oRestrictToNodes = (Option "" ["restrict-allocation-to"] (ReqArg (\ ns o -> Ok o { optRestrictToNodes = Just $ sepSplit ',' ns }) "NODE,...") "Restrict allocations to the given set of nodes", OptComplManyNodes) oOneStepOnly :: OptType oOneStepOnly = (Option "" ["one-step-only"] (NoArg (\ opts -> Ok opts {optOneStepOnly = True})) "Only do the first step", OptComplNone) oOutputDir :: OptType oOutputDir = (Option "d" ["output-dir"] (ReqArg (\ d opts -> Ok opts { optOutPath = d }) "PATH") "directory in which to write output files", OptComplDir) oPrintCommands :: OptType oPrintCommands = (Option "C" ["print-commands"] (OptArg ((\ f opts -> Ok opts { optShowCmds = Just f }) . fromMaybe "-") "FILE") "print the ganeti command list for reaching the solution,\ \ if an argument is passed then write the commands to a\ \ file named as such", OptComplNone) oPrintInsts :: OptType oPrintInsts = (Option "" ["print-instances"] (NoArg (\ opts -> Ok opts { optShowInsts = True })) "print the final instance map", OptComplNone) oPrintMoves :: OptType oPrintMoves = (Option "" ["print-moves"] (NoArg (\ opts -> Ok opts { optPrintMoves = True })) "print the moves of the instances", OptComplNone) oPrintNodes :: OptType oPrintNodes = (Option "p" ["print-nodes"] (OptArg ((\ f opts -> let (prefix, realf) = case f of '+':rest -> (["+"], rest) _ -> ([], f) splitted = prefix ++ sepSplit ',' realf in Ok opts { optShowNodes = Just splitted }) . fromMaybe []) "FIELDS") "print the final node list", OptComplNone) oQuiet :: OptType oQuiet = (Option "q" ["quiet"] (NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts - 1 })) "decrease the verbosity level", OptComplNone) oRapiMaster :: OptType oRapiMaster = (Option "m" ["master"] (ReqArg (\ m opts -> Ok opts { optMaster = m }) "ADDRESS") "collect data via RAPI at the given ADDRESS", OptComplHost) oSaveCluster :: OptType oSaveCluster = (Option "S" ["save"] (ReqArg (\ f opts -> Ok opts { optSaveCluster = Just f }) "FILE") "Save cluster state at the end of the processing to FILE", OptComplNone) oSkipNonRedundant :: OptType oSkipNonRedundant = (Option "" ["skip-non-redundant"] (NoArg (\ opts -> Ok opts { optSkipNonRedundant = True })) "Skip nodes that host a non-redundant instance", OptComplNone) oStdSpec :: OptType oStdSpec = (Option "" ["standard-alloc"] (ReqArg (\ inp opts -> do tspec <- parseISpecString "standard" inp return $ opts { optStdSpec = Just tspec } ) "STDSPEC") "enable standard specs allocation, given as 'disk,ram,cpu'", OptComplString) oTargetResources :: OptType oTargetResources = (Option "" ["target-resources"] (reqWithConversion (tryRead "target resources") (\d opts -> Ok opts { optTargetResources = d}) "FACTOR") "target resources to be left on each node after squeezing in\ \ multiples of the standard allocation", OptComplFloat) oTieredSpec :: OptType oTieredSpec = (Option "" ["tiered-alloc"] (ReqArg (\ inp opts -> do tspec <- parseISpecString "tiered" inp return $ opts { optTieredSpec = Just tspec } ) "TSPEC") "enable tiered specs allocation, given as 'disk,ram,cpu'", OptComplString) oVerbose :: OptType oVerbose = (Option "v" ["verbose"] (NoArg (\ opts -> Ok opts { optVerbose = optVerbose opts + 1 })) "increase the verbosity level", OptComplNone) oPriority :: OptType oPriority = (Option "" ["priority"] (ReqArg (\ inp opts -> do prio <- parseSubmitPriority inp Ok opts { optPriority = Just prio }) "PRIO") "set the priority of submitted jobs", OptComplChoices (map fmtSubmitPriority [minBound..maxBound])) oNoCapacityChecks :: OptType oNoCapacityChecks = (Option "" ["no-capacity-checks"] (NoArg (\ opts -> Ok opts { optCapacity = False})) "disable capacity checks (like global N+1 redundancy)", OptComplNone) -- | Generic options. genericOpts :: [GenericOptType Options] genericOpts = [ oShowVer , oShowHelp , oShowComp ] -- * Functions -- | Wrapper over 'Common.parseOpts' with our custom options. parseOpts :: [String] -- ^ The command line arguments -> String -- ^ The program name -> [OptType] -- ^ The supported command line options -> [ArgCompletion] -- ^ The supported command line arguments -> IO (Options, [String]) -- ^ The resulting options and leftover -- arguments parseOpts = Common.parseOpts defaultOptions -- | A shell script template for autogenerated scripts. shTemplate :: String shTemplate = printf "#!/bin/sh\n\n\ \# Auto-generated script for executing cluster rebalancing\n\n\ \# To stop, touch the file /tmp/stop-htools\n\n\ \set -e\n\n\ \check() {\n\ \ if [ -f /tmp/stop-htools ]; then\n\ \ echo 'Stop requested, exiting'\n\ \ exit 0\n\ \ fi\n\ \}\n\n" -- | Optionally show or save a list of commands maybeSaveCommands :: String -- ^ Informal description -> Options -> String -- ^ commands -> IO () maybeSaveCommands msg opts cmds = case optShowCmds opts of Nothing -> return () Just "-" -> do putStrLn "" putStrLn msg putStr . unlines . map (" " ++) . filter (/= " check") . lines $ cmds Just out_path -> do writeFile out_path (shTemplate ++ cmds) printf "The commands have been written to file '%s'\n" out_path -- | Optionally print the node list. maybePrintNodes :: Maybe [String] -- ^ The field list -> String -- ^ Informational message -> ([String] -> String) -- ^ Function to generate the listing -> IO () maybePrintNodes Nothing _ _ = return () maybePrintNodes (Just fields) msg fn = do hPutStrLn stderr "" hPutStrLn stderr (msg ++ " status:") hPutStrLn stderr $ fn fields -- | Optionally print the instance list. maybePrintInsts :: Bool -- ^ Whether to print the instance list -> String -- ^ Type of the instance map (e.g. initial) -> String -- ^ The instance data -> IO () maybePrintInsts do_print msg instdata = when do_print $ do hPutStrLn stderr "" hPutStrLn stderr $ msg ++ " instance map:" hPutStr stderr instdata -- | Function to display warning messages from parsing the cluster -- state. maybeShowWarnings :: [String] -- ^ The warning messages -> IO () maybeShowWarnings fix_msgs = unless (null fix_msgs) $ do hPutStrLn stderr "Warning: cluster has inconsistent data:" hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs -- | Format a list of key, value as a shell fragment. printKeys :: String -- ^ Prefix to printed variables -> [(String, String)] -- ^ List of (key, value) pairs to be printed -> IO () printKeys prefix = mapM_ (\(k, v) -> printf "%s_%s=%s\n" prefix (map toUpper k) (ensureQuoted v)) -- | Prints the final @OK@ marker in machine readable output. printFinal :: String -- ^ Prefix to printed variable -> Bool -- ^ Whether output should be machine readable; -- note: if not, there is nothing to print -> IO () printFinal prefix True = -- this should be the final entry printKeys prefix [("OK", "1")] printFinal _ False = return () -- | Potentially set the node as offline based on passed offline list. setNodeOffline :: [Ndx] -> Node.Node -> Node.Node setNodeOffline offline_indices n = if Node.idx n `elem` offline_indices then Node.setOffline n True else n -- | Set node properties based on command line options. setNodeStatus :: Options -> Node.List -> IO Node.List setNodeStatus opts fixed_nl = do let offline_passed = optOffline opts all_nodes = Container.elems fixed_nl offline_lkp = map (lookupName (map Node.name all_nodes)) offline_passed offline_wrong = filter (not . goodLookupResult) offline_lkp offline_names = map lrContent offline_lkp offline_indices = map Node.idx $ filter (\n -> Node.name n `elem` offline_names) all_nodes m_cpu = optMcpu opts m_dsk = optMdsk opts unless (null offline_wrong) . exitErr $ printf "wrong node name(s) set as offline: %s\n" (commaJoin (map lrContent offline_wrong)) let setMCpuFn = case m_cpu of Nothing -> id Just new_mcpu -> flip Node.setMcpu new_mcpu let nm = Container.map (setNodeOffline offline_indices . flip Node.setMdsk m_dsk . setMCpuFn) fixed_nl return nm
grnet/snf-ganeti
src/Ganeti/HTools/CLI.hs
bsd-2-clause
32,151
293
16
8,193
6,557
3,727
2,830
755
3
{-# OPTIONS_GHC -cpp #-} {-# LANGUAGE ForeignFunctionInterface #-} module Main where import Control.Concurrent haskellFun :: Int -> IO () haskellFun c = putStrLn ("Haskell: " ++ show c) foreign export ccall "hFun" haskellFun :: Int -> IO () foreign import ccall safe "hFun" hFun :: Int -> IO () #if defined(mingw32_HOST_OS) foreign import stdcall safe "Sleep" _sleepBlock :: Int -> IO () sleepBlock n = _sleepBlock (n*1000) #else foreign import ccall safe "sleep" sleepBlock :: Int -> IO () #endif main :: IO () main = do th <- newEmptyMVar forkIO $ do putStrLn "newThread started" sleepBlock 1 putStrLn "newThread back again" putMVar th "1 sec later" threadDelay 500000 >> putStrLn "mainThread" -- this will not be blocked in the threaded RTS forkIO $ (hFun 2) -- neither will this x <- takeMVar th putStrLn x putStrLn "\nshutting down"
sdiehl/ghc
testsuite/tests/concurrent/should_run/conc038.hs
bsd-3-clause
898
0
10
199
239
116
123
22
1
data Foo = Foo { r1 :: Int , r2 :: Int }
mpickering/ghc-exactprint
tests/examples/ghc710/HangingRecord.hs
bsd-3-clause
48
0
8
20
22
13
9
3
0
{-# LANGUAGE CPP #-} module Network.Bluetooth.UUID ( ShortUUID , fromShortUUID , toShortUUID , isReservedUUID , randomUUID , protocolToUUID , serviceClassToUUID , byteSwap32 , baseUUID , UUIDProtocol(..) , UUIDServiceClass(..) , UUIDProfile ) where import Data.Ix import Data.UUID import Data.Word (Word16, Word32) import Network.Bluetooth.Utils #if defined(mingw32_HOST_OS) import Network.Bluetooth.Windows.UUID #elif defined(darwin_HOST_OS) import Network.Bluetooth.OSX.UUID #elif defined(linux_HOST_OS) import Network.Bluetooth.Linux.UUID #elif defined(freebsd_HOST_OS) import Network.Bluetooth.FreeBSD.UUID #endif import System.Random type ShortUUID = Word16 baseUUID :: UUID baseUUID = fromWords 0x00000000 baseUUIDWord2 baseUUIDWord3 baseUUIDWord4 baseUUIDWord2, baseUUIDWord3, baseUUIDWord4 :: Word32 baseUUIDWord2 = 0x00001000 baseUUIDWord3 = 0x80000080 baseUUIDWord4 = 0x5F9B34FB fromShortUUID :: ShortUUID -> UUID fromShortUUID su = fromWords (fromIntegral su) baseUUIDWord2 baseUUIDWord3 baseUUIDWord4 toShortUUID :: UUID -> ShortUUID toShortUUID uuid = let (w1,_,_,_) = toWords uuid in fromIntegral w1 randomUUID :: IO UUID randomUUID = do uuid <- randomIO if isReservedUUID uuid then randomUUID else return uuid isReservedUUID :: UUID -> Bool isReservedUUID uuid = let (w1,w2,w3,w4) = toWords uuid in (fromIntegral $ byteSwap32 w1) == (0x0000 :: Word16) && w2 == baseUUIDWord2 && w3 == baseUUIDWord3 && w4 == baseUUIDWord4 && isReserved (fromIntegral w1) where isReserved :: Word16 -> Bool isReserved w = any (flip inRange w) [(0x0000, 0x000A), (0x000C, 0x000C), (0x000E, 0x0012), (0x0014, 0x0014), (0x0016, 0x0017), (0x0019, 0x0019), (0x001B, 0x001B), (0x001D, 0x001F), (0x0100, 0x0100), (0x1000, 0x1002), (0x1101, 0x113B), (0x1200, 0x1206), (0x1300, 0x1305), (0x1400, 0x1402), (0x1801, 0x1801), (0x2112, 0x2122)] protocolToUUID :: UUIDProtocol -> UUID protocolToUUID = fromShortUUID . cFromEnum serviceClassToUUID :: UUIDServiceClass -> UUID serviceClassToUUID = fromShortUUID . cFromEnum -- data UUIDProtocol = SDPProtocol -- | UDPProtocol -- | RFCOMMProtocol -- | TCPProtocol -- | TCS_BINProtocol -- | TCS_ATProtocol -- #if defined(linux_HOST_OS) -- | ATTProtocol -- #endif -- | OBEXProtocol -- | IPProtocol -- | FTPProtocol -- | HTTPProtocol -- | WSPProtocol -- | BNEPProtocol -- | UPnPProtocol -- #if !defined(mingw32_HOST_OS) -- | HIDPProtocol -- #endif -- | HardcopyControlChannelProtocol -- | HardcopyDataChannelProtocol -- | HardcopyNotificationProtocol -- | AVCTPProtocol -- | AVDTPProtocol -- | CMTPProtocol -- | UDI_CPlaneProtocol -- #if defined(linux_HOST_OS) -- | MCAPControlChannelProtocol -- | MCAPDataChannelProtocol -- #endif -- | L2CAPProtocol -- deriving (Ix, Show, Eq, Read, Ord, Bounded) -- -- instance Enum UUIDProtocol where -- fromEnum SDPProtocol = 0x0001 -- fromEnum UDPProtocol = 0x0002 -- fromEnum RFCOMMProtocol = 0x0003 -- fromEnum TCPProtocol = 0x0004 -- fromEnum TCS_BINProtocol = 0x0005 -- fromEnum TCS_ATProtocol = 0x0006 -- #if defined(linux_HOST_OS) -- fromEnum ATTProtocol = 0x0007 -- #endif -- fromEnum OBEXProtocol = 0x0008 -- fromEnum IPProtocol = 0x0009 -- fromEnum FTPProtocol = 0x000A -- fromEnum HTTPProtocol = 0x000C -- fromEnum WSPProtocol = 0x000E -- fromEnum BNEPProtocol = 0x000F -- fromEnum UPnPProtocol = 0x0010 -- #if !defined(mingw32_HOST_OS) -- fromEnum HIDPProtocol = 0x0011 -- #endif -- fromEnum HardcopyControlChannelProtocol = 0x0012 -- fromEnum HardcopyDataChannelProtocol = 0x0014 -- fromEnum HardcopyNotificationProtocol = 0x0016 -- fromEnum AVCTPProtocol = 0x0017 -- fromEnum AVDTPProtocol = 0x0019 -- fromEnum CMTPProtocol = 0x001B -- fromEnum UDI_CPlaneProtocol = 0x001D -- #if defined(linux_HOST_OS) -- fromEnum MCAPControlChannelProtocol = 0x001E -- fromEnum MCAPDataChannelProtocol = 0x001F -- #endif -- fromEnum L2CAPProtocol = 0x0100 -- -- toEnum 0x0001 = SDPProtocol -- toEnum 0x0002 = UDPProtocol -- toEnum 0x0003 = RFCOMMProtocol -- toEnum 0x0004 = TCPProtocol -- toEnum 0x0005 = TCS_BINProtocol -- toEnum 0x0006 = TCS_ATProtocol -- #if defined(linux_HOST_OS) -- toEnum 0x0007 = ATTProtocol -- #endif -- toEnum 0x0008 = OBEXProtocol -- toEnum 0x0009 = IPProtocol -- toEnum 0x000A = FTPProtocol -- toEnum 0x000C = HTTPProtocol -- toEnum 0x000E = WSPProtocol -- toEnum 0x000F = BNEPProtocol -- toEnum 0x0010 = UPnPProtocol -- #if !defined(mingw32_HOST_OS) -- toEnum 0x0011 = HIDPProtocol -- #endif -- toEnum 0x0012 = HardcopyControlChannelProtocol -- toEnum 0x0014 = HardcopyDataChannelProtocol -- toEnum 0x0016 = HardcopyNotificationProtocol -- toEnum 0x0017 = AVCTPProtocol -- toEnum 0x0019 = AVDTPProtocol -- toEnum 0x001B = CMTPProtocol -- toEnum 0x001D = UDI_CPlaneProtocol -- #if defined(linux_HOST_OS) -- toEnum 0x001E = MCAPControlChannelProtocol -- toEnum 0x001F = MCAPDataChannelProtocol -- #endif -- toEnum 0x0100 = L2CAPProtocol -- toEnum unmatched = error $ "UUIDProtocol.toEnum: Cannot match " ++ show unmatched type UUIDProfile = UUIDServiceClass -- data UUIDServiceClass = ServiceDiscoveryServer -- | BrowseGroupDescriptor -- | PublicBrowseGroup -- | SerialPort -- | LANAccessUsingPPP -- | DialupNetworking -- | IrMCSync -- | OBEXObjectPush -- | OBEXFileTransfer -- | IrMCSyncCommand -- | Headset -- | CordlessTelephony -- | AudioSource -- | AudioSink -- | AVRemoteControlTarget -- | AdvancedAudioDistribution -- | AVRemoteControl -- | VideoConferencing -- | Intercom -- | Fax -- | HeadsetAudioGateway -- | WAP -- | WAPClient -- | PANU -- | NAP -- | GN -- | DirectPrinting -- | ReferencePrinting -- | Imaging -- | ImagingResponder -- | ImagingAutomaticArchive -- | ImagingReferencedObjects -- | Handsfree -- | HandsfreeAudioGateway -- | DirectPrintingReferenceObjects -- | ReflectedUI -- | BasicPrinting -- | PrintingStatus -- | HumanInterfaceDevice -- | HardcopyCableReplacement -- | HCRPrint -- | HCRScan -- | CommonISDNAccess -- | VideoConferencingGW -- | UDI_MT -- | UDI_TA -- | AudioVideo -- #if !defined(mingw32_HOST_OS) -- | SIMAccess -- | PhonebookAccessPCE -- | PhonebookAccessPSE -- | PhonebookAccess -- #endif -- | PnPInformation -- | GenericNetworking -- | GenericFileTransfer -- | GenericAudio -- | GenericTelephony -- #if !defined(mingw32_HOST_OS) -- | UPnP -- | UPnP_IP -- | ESDP_UPnP_IP_PAN -- | ESDP_UPnP_IP_LAP -- | ESDP_UPnP_L2CAP -- | VideoSource -- | VideoSink -- | VideoDistribution -- #if defined(linux_HOST_OS) -- | HDP -- | HDPSource -- | HDPSink -- | GenericAttribute -- | AppleAgentService -- #endif -- #endif -- deriving (Ix, Show, Eq, Read, Ord, Bounded) -- instance Enum UUIDServiceClass where -- fromEnum ServiceDiscoveryServer = 0x1000 -- fromEnum BrowseGroupDescriptor = 0x1001 -- fromEnum PublicBrowseGroup = 0x1002 -- fromEnum SerialPort = 0x1101 -- fromEnum LANAccessUsingPPP = 0x1102 -- fromEnum DialupNetworking = 0x1103 -- fromEnum IrMCSync = 0x1104 -- fromEnum OBEXObjectPush = 0x1105 -- fromEnum OBEXFileTransfer = 0x1106 -- fromEnum IrMCSyncCommand = 0x1107 -- fromEnum Headset = 0x1108 -- fromEnum CordlessTelephony = 0x1109 -- fromEnum AudioSource = 0x110A -- fromEnum AudioSink = 0x110B -- fromEnum AVRemoteControlTarget = 0x110C -- fromEnum AdvancedAudioDistribution = 0x110D -- fromEnum AVRemoteControl = 0x110E -- fromEnum VideoConferencing = 0x110F -- fromEnum Intercom = 0x1110 -- fromEnum Fax = 0x1111 -- fromEnum HeadsetAudioGateway = 0x1112 -- fromEnum WAP = 0x1113 -- fromEnum WAPClient = 0x1114 -- fromEnum PANU = 0x1115 -- fromEnum NAP = 0x1116 -- fromEnum GN = 0x1117 -- fromEnum DirectPrinting = 0x1118 -- fromEnum ReferencePrinting = 0x1119 -- fromEnum Imaging = 0x111A -- fromEnum ImagingResponder = 0x111B -- fromEnum ImagingAutomaticArchive = 0x111C -- fromEnum ImagingReferencedObjects = 0x111D -- fromEnum Handsfree = 0x111E -- fromEnum HandsfreeAudioGateway = 0x111F -- fromEnum DirectPrintingReferenceObjects = 0x1120 -- fromEnum ReflectedUI = 0x1121 -- fromEnum BasicPrinting = 0x1122 -- fromEnum PrintingStatus = 0x1123 -- fromEnum HumanInterfaceDevice = 0x1124 -- fromEnum HardcopyCableReplacement = 0x1125 -- fromEnum HCRPrint = 0x1126 -- fromEnum HCRScan = 0x1127 -- fromEnum CommonISDNAccess = 0x1128 -- fromEnum VideoConferencingGW = 0x1129 -- fromEnum UDI_MT = 0x112A -- fromEnum UDI_TA = 0x112B -- fromEnum AudioVideo = 0x112C -- #if !defined(mingw32_HOST_OS) -- fromEnum SIMAccess = 0x112D -- fromEnum PhonebookAccessPCE = 0x112E -- fromEnum PhonebookAccessPSE = 0x112F -- fromEnum PhonebookAccess = 0x1130 -- #endif -- fromEnum PnPInformation = 0x1200 -- fromEnum GenericNetworking = 0x1201 -- fromEnum GenericFileTransfer = 0x1202 -- fromEnum GenericAudio = 0x1203 -- fromEnum GenericTelephony = 0x1204 -- #if !defined(mingw32_HOST_OS) -- fromEnum UPnP = 0x1205 -- fromEnum UPnP_IP = 0x1206 -- fromEnum ESDP_UPnP_IP_PAN = 0x1300 -- fromEnum ESDP_UPnP_IP_LAP = 0x1301 -- fromEnum ESDP_UPnP_L2CAP = 0x1302 -- fromEnum VideoSource = 0x1303 -- fromEnum VideoSink = 0x1304 -- fromEnum VideoDistribution = 0x1305 -- #if defined(linux_HOST_OS) -- fromEnum HDP = 0x1400 -- fromEnum HDPSource = 0x1401 -- fromEnum HDPSink = 0x1402 -- fromEnum GenericAttribute = 0x1801 -- fromEnum AppleAgentService = 0x2112 -- #endif -- #endif -- -- toEnum 0x1000 = ServiceDiscoveryServer -- toEnum 0x1001 = BrowseGroupDescriptor -- toEnum 0x1002 = PublicBrowseGroup -- toEnum 0x1101 = SerialPort -- toEnum 0x1102 = LANAccessUsingPPP -- toEnum 0x1103 = DialupNetworking -- toEnum 0x1104 = IrMCSync -- toEnum 0x1105 = OBEXObjectPush -- toEnum 0x1106 = OBEXFileTransfer -- toEnum 0x1107 = IrMCSyncCommand -- toEnum 0x1108 = Headset -- toEnum 0x1109 = CordlessTelephony -- toEnum 0x110A = AudioSource -- toEnum 0x110B = AudioSink -- toEnum 0x110C = AVRemoteControlTarget -- toEnum 0x110D = AdvancedAudioDistribution -- toEnum 0x110E = AVRemoteControl -- toEnum 0x110F = VideoConferencing -- toEnum 0x1110 = Intercom -- toEnum 0x1111 = Fax -- toEnum 0x1112 = HeadsetAudioGateway -- toEnum 0x1113 = WAP -- toEnum 0x1114 = WAPClient -- toEnum 0x1115 = PANU -- toEnum 0x1116 = NAP -- toEnum 0x1117 = GN -- toEnum 0x1118 = DirectPrinting -- toEnum 0x1119 = ReferencePrinting -- toEnum 0x111A = Imaging -- toEnum 0x111B = ImagingResponder -- toEnum 0x111C = ImagingAutomaticArchive -- toEnum 0x111D = ImagingReferencedObjects -- toEnum 0x111E = Handsfree -- toEnum 0x111F = HandsfreeAudioGateway -- toEnum 0x1120 = DirectPrintingReferenceObjects -- toEnum 0x1121 = ReflectedUI -- toEnum 0x1122 = BasicPrinting -- toEnum 0x1123 = PrintingStatus -- toEnum 0x1124 = HumanInterfaceDevice -- toEnum 0x1125 = HardcopyCableReplacement -- toEnum 0x1126 = HCRPrint -- toEnum 0x1127 = HCRScan -- toEnum 0x1128 = CommonISDNAccess -- toEnum 0x1129 = VideoConferencingGW -- toEnum 0x112A = UDI_MT -- toEnum 0x112B = UDI_TA -- toEnum 0x112C = AudioVideo -- #if !defined(mingw32_HOST_OS) -- toEnum 0x112D = SIMAccess -- toEnum 0x112E = PhonebookAccessPCE -- toEnum 0x112F = PhonebookAccessPSE -- toEnum 0x1130 = PhonebookAccess -- #endif -- toEnum 0x1200 = PnPInformation -- toEnum 0x1201 = GenericNetworking -- toEnum 0x1202 = GenericFileTransfer -- toEnum 0x1203 = GenericAudio -- toEnum 0x1204 = GenericTelephony -- #if !defined(mingw32_HOST_OS) -- toEnum 0x1205 = UPnP -- toEnum 0x1206 = UPnP_IP -- toEnum 0x1300 = ESDP_UPnP_IP_PAN -- toEnum 0x1301 = ESDP_UPnP_IP_LAP -- toEnum 0x1302 = ESDP_UPnP_L2CAP -- toEnum 0x1303 = VideoSource -- toEnum 0x1304 = VideoSink -- toEnum 0x1305 = VideoDistribution -- #if defined(linux_HOST_OS) -- toEnum 0x1400 = HDP -- toEnum 0x1401 = HDPSource -- toEnum 0x1402 = HDPSink -- toEnum 0x1801 = GenericAttribute -- toEnum 0x2112 = AppleAgentService -- #endif -- #endif -- toEnum unmatched = error $ "UUIDServiceClass.toEnum: Cannot match " ++ show unmatched
RyanGlScott/bluetooth
src/Network/Bluetooth/UUID.hs
bsd-3-clause
16,039
0
17
6,029
909
669
240
55
2
{-# language FlexibleContexts #-} {-# language OverloadedLists #-} {-# language OverloadedStrings #-} {-# language QuasiQuotes #-} {-# language TypeFamilies #-} module Planetary.Core.Typecheck.Test ( unitTests , checkTest , emptyTypingEnv ) where import Control.Lens import Control.Unification (freeze, unfreeze) import Control.Unification.IntVar import Data.ByteString (ByteString) import Data.Text (Text) import NeatInterpolation import Network.IPLD import EasyTest import Planetary.Core import Planetary.Library import Planetary.Library.HaskellForeign (intTy, boolTy) import Planetary.Library.FrankExamples as Frank import Planetary.Support.NameResolution import Planetary.Support.Parser checkTest :: Text -> TypingEnvI -> TmI -> UTy IntVar -> Test () checkTest name tables tm ty = scope name $ case runTcM tables (check tm ty) of Right () -> ok other -> fail (show other) inferTest :: Text -> TypingEnvI -> TmI -> Either TcErr (UTy IntVar) -> Test () inferTest name tables tm expected = scope name $ expectEq (freeze <$> runTcM tables (infer tm)) (freeze <$> expected) exampleInterfaces :: InterfaceTableI exampleInterfaces = mempty dataTypeTable :: DataTypeTableI dataTypeTable = mempty ambientAbility :: UTy IntVar ambientAbility = unfreeze emptyAbility emptyTypingEnv :: TypingEnvI emptyTypingEnv = TypingEnv dataTypeTable exampleInterfaces ambientAbility [] mockCid :: ByteString -> Cid mockCid = mkCid unitTests :: Test () unitTests = scope "typechecking" $ tests [ scope "infer variable" $ tests [ let ty = VariableTyU "hippo" env = emptyTypingEnv & varTypes .~ [("x", Left ty)] in inferTest "VAR 1" env (V"x") (Right ty) , inferTest "VAR 2" emptyTypingEnv (V"x") (Left (LookupVarTy "x")) ] , scope "TODO: infer polyvar" $ tests [ ] , scope "infer command" $ tests [ let domTy = DataTy (UidTy (mockCid "domain")) [] codomTy = DataTy (UidTy (mockCid "codomain")) [] cmdUid = mockCid "fire missiles" -- TODO: this duplication between ambient and interfaces is so bad cmdIfaces = [ (cmdUid, EffectInterface [] [CommandDeclaration "fire missiles" [domTy] codomTy] ) ] ambient = extendAbility emptyAbility $ Adjustment [ (cmdUid, [TyArgVal domTy]) -- TODO: what does it mean to have an ability here? -- [ (cmdUid, [TyArgVal domTy, TyArgAbility _]) ] tables = emptyTypingEnv & typingInterfaces .~ cmdIfaces & typingAbilities .~ unfreeze ambient cmd = Command cmdUid 0 expected = Right $ unfreeze $ SuspendedTy $ CompTy [domTy] $ Peg ambient codomTy in inferTest "COMMAND" tables cmd expected ] , let dataUid = mockCid "dataUid" v1Id = mockCid "v1" v2Id = mockCid "v2" tm1 = DataConstructor v1Id 0 [] tm2 = DataConstructor v2Id 0 [] ty1, ty2 :: TyFix' ty1 = DataTy (UidTy v1Id) [] ty2 = DataTy (UidTy v2Id) [] ty1ty2vals = [TyArgVal ty1, TyArgVal ty2] constr1 = ConstructorDecl "constr1" [ty1, ty2] constr2 = ConstructorDecl "constr2" [] app fun = AppT fun [tm1, tm2] f = Lambda ["x", "y"] $ DataConstructor dataUid 0 [V"x", V"y"] resultTy = DataTy (UidTy dataUid) ty1ty2vals goodAnnF = Annotation f $ SuspendedTy $ CompTy [ty1, ty2] (Peg emptyAbility resultTy) expected = Right (unfreeze resultTy) baddAnnF = Annotation f $ SuspendedTy $ CompTy [ty1, ty1] (Peg emptyAbility resultTy) ty1Thawed = DataTy_ (unfreeze (UidTy v1Id)) [] ty2Thawed = DataTy_ (unfreeze (UidTy v2Id)) [] expectedBad = Left (MismatchFailure ty1Thawed ty2Thawed) tables = emptyTypingEnv & typingData .~ [ (dataUid, DataTypeInterface [] [constr1 ty1ty2vals]) , (v1Id, DataTypeInterface [] [constr2 []]) , (v2Id, DataTypeInterface [] [constr2 []]) ] in scope "sharing data defns" $ tests [ scope "infer app" $ tests [ inferTest "APP (1)" tables (app goodAnnF) expected , inferTest "APP (2)" tables (app baddAnnF) expectedBad ] , scope "check data" $ tests [ let tables' = emptyTypingEnv & typingData .~ [ (v1Id, DataTypeInterface [] [ConstructorDecl "constr" [] []]) ] in checkTest "DATA (simple)" tables' tm1 (unfreeze ty1) , let tm = DataConstructor dataUid 0 [tm1, tm2] expectedTy = DataTy (UidTy dataUid) ty1ty2vals in checkTest "DATA (args)" tables tm (unfreeze expectedTy) ] ] , scope "infer annotation" $ tests [ let cid = mockCid "ty" ty = DataTy (UidTy cid) [] tm = Annotation (DataConstructor cid 0 []) ty env = emptyTypingEnv & typingData .~ [ (cid, DataTypeInterface [] [ ConstructorDecl "constr" [] [] ]) ] in inferTest "COERCE" env tm (Right (unfreeze ty)) ] , scope "TODO: check lambda" $ tests [] , scope "case" $ tests [ do let abcdUid = mockCid "abcd" defgUid = mockCid "123424321432" abcdTy = DataTy (UidTy abcdUid) [] abcdVal = DataConstructor abcdUid 0 [] val = Annotation (DataConstructor defgUid 1 [abcdVal, abcdVal]) (DataTy (UidTy defgUid) []) resolutionState = [ ("abcd", abcdUid) , ("defg", defgUid) ] Right tm <- pure $ resolveTm resolutionState $ fst $ forceTm [text| case val of | <_ x y z> -> x | <_ y z> -> z |] let tm' = substitute "val" val tm -- decls = forceDeclarations [text| -- data abcd = -- | <abcd> -- data defg = -- | <defg1 abcd abcd abcd> -- | <defg2 abcd abcd> -- |] env = emptyTypingEnv & typingData .~ [ (abcdUid, DataTypeInterface [] [ ConstructorDecl "abcd" [] [] ]) , (defgUid, DataTypeInterface [] [ ConstructorDecl "defg1" [abcdTy, abcdTy, abcdTy] [] , ConstructorDecl "defg2" [abcdTy, abcdTy] [] ]) ] expectedTy = unfreeze abcdTy checkTest "CASE" env tm' expectedTy ] , scope "check switch" $ tests [ let tm = V"x" dataUid = mockCid "dataUid" dataTy = unfreeze $ DataTy (UidTy dataUid) [] expectedTy = dataTy env = emptyTypingEnv & varTypes .~ [("x", Left dataTy)] in checkTest "SWITCH" env tm expectedTy ] , scope "check handle" $ tests -- both branches should give us a bool [ do Right tm <- pure $ resolve $ fst $ forceTm [text| handle abort! : [e , <Abort>]Int with Abort: | <aborting -> k> -> x1 | v -> x2 |] Just abortId <- pure $ Frank.resolvedDecls ^? globalCids . ix "Abort" let abortAbility = Ability OpenAbility [(abortId, [])] abortTy = SuspendedTy (CompTy [] (Peg abortAbility intTy)) -- XXX generalize env = emptyTypingEnv & typingInterfaces .~ (Frank.resolvedDecls ^. interfaces) & varTypes .~ (Left . unfreeze <$> [ ("abort", abortTy) , ("x1", boolTy) , ("x2", boolTy) ]) -- & varTypes .~ [Left . unfreeze <$> [abortTy, boolTy, boolTy]] expectedTy = unfreeze boolTy checkTest "HANDLE (abort)" env tm expectedTy , do Right tm <- pure $ resolve $ fst $ forceTm [text| handle val : [e, <Send Bool>, <Receive Bool>]Int with Send Bool: | <send y -> s> -> x1 Receive Bool: | <receive -> r> -> x2 | v -> x3 |] let env = emptyTypingEnv & typingInterfaces .~ (Frank.resolvedDecls ^. interfaces) & varTypes .~ (Left . unfreeze <$> [ ("val", intTy) , ("x1", boolTy) , ("x2", boolTy) , ("x3", boolTy) ]) -- & varTypes .~ [Left . unfreeze <$> [intTy, boolTy, boolTy, boolTy]] expectedTy = unfreeze boolTy checkTest "HANDLE (multi)" env tm expectedTy ] , let in scope "polyvar instantiation" $ tests [ ] ]
joelburget/interplanetary-computation
src/Planetary/Core/Typecheck/Test.hs
bsd-3-clause
9,182
0
26
3,434
2,312
1,206
1,106
-1
-1
{- (c) The AQUA Project, Glasgow University, 1994-1998 \section[ErrsUtils]{Utilities for error reporting} -} {-# LANGUAGE CPP #-} module ErrUtils ( MsgDoc, Validity(..), andValid, allValid, isValid, getInvalids, ErrMsg, WarnMsg, Severity(..), Messages, ErrorMessages, WarningMessages, errMsgSpan, errMsgContext, errMsgShortDoc, errMsgExtraInfo, mkLocMessage, pprMessageBag, pprErrMsgBag, pprErrMsgBagWithLoc, pprLocErrMsg, makeIntoWarning, isWarning, errorsFound, emptyMessages, isEmptyMessages, mkErrMsg, mkPlainErrMsg, mkLongErrMsg, mkWarnMsg, mkPlainWarnMsg, printBagOfErrors, warnIsErrorMsg, mkLongWarnMsg, ghcExit, doIfSet, doIfSet_dyn, dumpIfSet, dumpIfSet_dyn, dumpIfSet_dyn_printer, mkDumpDoc, dumpSDoc, -- * Messages during compilation putMsg, printInfoForUser, printOutputForUser, logInfo, logOutput, errorMsg, fatalErrorMsg, fatalErrorMsg', fatalErrorMsg'', compilationProgressMsg, showPass, debugTraceMsg, prettyPrintGhcErrors, ) where #include "HsVersions.h" import Bag ( Bag, bagToList, isEmptyBag, emptyBag ) import Exception import Outputable import Panic import FastString import SrcLoc import DynFlags import System.Directory import System.Exit ( ExitCode(..), exitWith ) import System.FilePath ( takeDirectory, (</>) ) import Data.List import qualified Data.Set as Set import Data.IORef import Data.Ord import Data.Time import Control.Monad import Control.Monad.IO.Class import System.IO ------------------------- type MsgDoc = SDoc ------------------------- data Validity = IsValid -- Everything is fine | NotValid MsgDoc -- A problem, and some indication of why isValid :: Validity -> Bool isValid IsValid = True isValid (NotValid {}) = False andValid :: Validity -> Validity -> Validity andValid IsValid v = v andValid v _ = v allValid :: [Validity] -> Validity -- If they aren't all valid, return the first allValid [] = IsValid allValid (v : vs) = v `andValid` allValid vs getInvalids :: [Validity] -> [MsgDoc] getInvalids vs = [d | NotValid d <- vs] -- ----------------------------------------------------------------------------- -- Basic error messages: just render a message with a source location. type Messages = (WarningMessages, ErrorMessages) type WarningMessages = Bag WarnMsg type ErrorMessages = Bag ErrMsg data ErrMsg = ErrMsg { errMsgSpan :: SrcSpan, errMsgContext :: PrintUnqualified, errMsgShortDoc :: MsgDoc, -- errMsgShort* should always errMsgShortString :: String, -- contain the same text errMsgExtraInfo :: MsgDoc, errMsgSeverity :: Severity } -- The SrcSpan is used for sorting errors into line-number order type WarnMsg = ErrMsg data Severity = SevOutput | SevDump | SevInteractive | SevInfo | SevWarning | SevError | SevFatal instance Show ErrMsg where show em = errMsgShortString em pprMessageBag :: Bag MsgDoc -> SDoc pprMessageBag msgs = vcat (punctuate blankLine (bagToList msgs)) mkLocMessage :: Severity -> SrcSpan -> MsgDoc -> MsgDoc -- Always print the location, even if it is unhelpful. Error messages -- are supposed to be in a standard format, and one without a location -- would look strange. Better to say explicitly "<no location info>". mkLocMessage severity locn msg = sdocWithDynFlags $ \dflags -> let locn' = if gopt Opt_ErrorSpans dflags then ppr locn else ppr (srcSpanStart locn) in hang (locn' <> colon <+> sev_info) 4 msg where sev_info = case severity of SevWarning -> ptext (sLit "Warning:") _other -> empty -- For warnings, print Foo.hs:34: Warning: -- <the warning message> makeIntoWarning :: ErrMsg -> ErrMsg makeIntoWarning err = err { errMsgSeverity = SevWarning } isWarning :: ErrMsg -> Bool isWarning err | SevWarning <- errMsgSeverity err = True | otherwise = False -- ----------------------------------------------------------------------------- -- Collecting up messages for later ordering and printing. mk_err_msg :: DynFlags -> Severity -> SrcSpan -> PrintUnqualified -> MsgDoc -> SDoc -> ErrMsg mk_err_msg dflags sev locn print_unqual msg extra = ErrMsg { errMsgSpan = locn, errMsgContext = print_unqual , errMsgShortDoc = msg , errMsgShortString = showSDoc dflags msg , errMsgExtraInfo = extra , errMsgSeverity = sev } mkLongErrMsg, mkLongWarnMsg :: DynFlags -> SrcSpan -> PrintUnqualified -> MsgDoc -> MsgDoc -> ErrMsg -- A long (multi-line) error message mkErrMsg, mkWarnMsg :: DynFlags -> SrcSpan -> PrintUnqualified -> MsgDoc -> ErrMsg -- A short (one-line) error message mkPlainErrMsg, mkPlainWarnMsg :: DynFlags -> SrcSpan -> MsgDoc -> ErrMsg -- Variant that doesn't care about qualified/unqualified names mkLongErrMsg dflags locn unqual msg extra = mk_err_msg dflags SevError locn unqual msg extra mkErrMsg dflags locn unqual msg = mk_err_msg dflags SevError locn unqual msg empty mkPlainErrMsg dflags locn msg = mk_err_msg dflags SevError locn alwaysQualify msg empty mkLongWarnMsg dflags locn unqual msg extra = mk_err_msg dflags SevWarning locn unqual msg extra mkWarnMsg dflags locn unqual msg = mk_err_msg dflags SevWarning locn unqual msg empty mkPlainWarnMsg dflags locn msg = mk_err_msg dflags SevWarning locn alwaysQualify msg empty ---------------- emptyMessages :: Messages emptyMessages = (emptyBag, emptyBag) isEmptyMessages :: Messages -> Bool isEmptyMessages (warns, errs) = isEmptyBag warns && isEmptyBag errs warnIsErrorMsg :: DynFlags -> ErrMsg warnIsErrorMsg dflags = mkPlainErrMsg dflags noSrcSpan (text "\nFailing due to -Werror.") errorsFound :: DynFlags -> Messages -> Bool errorsFound _dflags (_warns, errs) = not (isEmptyBag errs) printBagOfErrors :: DynFlags -> Bag ErrMsg -> IO () printBagOfErrors dflags bag_of_errors = printMsgBag dflags bag_of_errors pprErrMsgBag :: Bag ErrMsg -> [SDoc] pprErrMsgBag bag = [ sdocWithDynFlags $ \dflags -> let style = mkErrStyle dflags unqual in withPprStyle style (d $$ e) | ErrMsg { errMsgShortDoc = d, errMsgExtraInfo = e, errMsgContext = unqual } <- sortMsgBag bag ] pprErrMsgBagWithLoc :: Bag ErrMsg -> [SDoc] pprErrMsgBagWithLoc bag = [ pprLocErrMsg item | item <- sortMsgBag bag ] pprLocErrMsg :: ErrMsg -> SDoc pprLocErrMsg (ErrMsg { errMsgSpan = s , errMsgShortDoc = d , errMsgExtraInfo = e , errMsgSeverity = sev , errMsgContext = unqual }) = sdocWithDynFlags $ \dflags -> withPprStyle (mkErrStyle dflags unqual) (mkLocMessage sev s (d $$ e)) printMsgBag :: DynFlags -> Bag ErrMsg -> IO () printMsgBag dflags bag = sequence_ [ let style = mkErrStyle dflags unqual in log_action dflags dflags sev s style (d $$ e) | ErrMsg { errMsgSpan = s, errMsgShortDoc = d, errMsgSeverity = sev, errMsgExtraInfo = e, errMsgContext = unqual } <- sortMsgBag bag ] sortMsgBag :: Bag ErrMsg -> [ErrMsg] sortMsgBag bag = sortBy (comparing errMsgSpan) $ bagToList bag ghcExit :: DynFlags -> Int -> IO () ghcExit dflags val | val == 0 = exitWith ExitSuccess | otherwise = do errorMsg dflags (text "\nCompilation had errors\n\n") exitWith (ExitFailure val) doIfSet :: Bool -> IO () -> IO () doIfSet flag action | flag = action | otherwise = return () doIfSet_dyn :: DynFlags -> GeneralFlag -> IO () -> IO() doIfSet_dyn dflags flag action | gopt flag dflags = action | otherwise = return () -- ----------------------------------------------------------------------------- -- Dumping dumpIfSet :: DynFlags -> Bool -> String -> SDoc -> IO () dumpIfSet dflags flag hdr doc | not flag = return () | otherwise = log_action dflags dflags SevDump noSrcSpan defaultDumpStyle (mkDumpDoc hdr doc) -- | a wrapper around 'dumpSDoc'. -- First check whether the dump flag is set -- Do nothing if it is unset dumpIfSet_dyn :: DynFlags -> DumpFlag -> String -> SDoc -> IO () dumpIfSet_dyn dflags flag hdr doc = when (dopt flag dflags) $ dumpSDoc dflags alwaysQualify flag hdr doc -- | a wrapper around 'dumpSDoc'. -- First check whether the dump flag is set -- Do nothing if it is unset -- -- Unlike 'dumpIfSet_dyn', -- has a printer argument but no header argument dumpIfSet_dyn_printer :: PrintUnqualified -> DynFlags -> DumpFlag -> SDoc -> IO () dumpIfSet_dyn_printer printer dflags flag doc = when (dopt flag dflags) $ dumpSDoc dflags printer flag "" doc mkDumpDoc :: String -> SDoc -> SDoc mkDumpDoc hdr doc = vcat [blankLine, line <+> text hdr <+> line, doc, blankLine] where line = text (replicate 20 '=') -- | Write out a dump. -- If --dump-to-file is set then this goes to a file. -- otherwise emit to stdout. -- -- When hdr is empty, we print in a more compact format (no separators and -- blank lines) -- -- The DumpFlag is used only to choose the filename to use if --dump-to-file is -- used; it is not used to decide whether to dump the output dumpSDoc :: DynFlags -> PrintUnqualified -> DumpFlag -> String -> SDoc -> IO () dumpSDoc dflags print_unqual flag hdr doc = do let mFile = chooseDumpFile dflags flag dump_style = mkDumpStyle print_unqual case mFile of Just fileName -> do let gdref = generatedDumps dflags gd <- readIORef gdref let append = Set.member fileName gd mode = if append then AppendMode else WriteMode when (not append) $ writeIORef gdref (Set.insert fileName gd) createDirectoryIfMissing True (takeDirectory fileName) handle <- openFile fileName mode doc' <- if null hdr then return doc else do t <- getCurrentTime let d = text (show t) $$ blankLine $$ doc return $ mkDumpDoc hdr d defaultLogActionHPrintDoc dflags handle doc' dump_style hClose handle -- write the dump to stdout Nothing -> do let (doc', severity) | null hdr = (doc, SevOutput) | otherwise = (mkDumpDoc hdr doc, SevDump) log_action dflags dflags severity noSrcSpan dump_style doc' -- | Choose where to put a dump file based on DynFlags -- chooseDumpFile :: DynFlags -> DumpFlag -> Maybe String chooseDumpFile dflags flag | gopt Opt_DumpToFile dflags || flag == Opt_D_th_dec_file , Just prefix <- getPrefix = Just $ setDir (prefix ++ (beautifyDumpName flag)) | otherwise = Nothing where getPrefix -- dump file location is being forced -- by the --ddump-file-prefix flag. | Just prefix <- dumpPrefixForce dflags = Just prefix -- dump file location chosen by DriverPipeline.runPipeline | Just prefix <- dumpPrefix dflags = Just prefix -- we haven't got a place to put a dump file. | otherwise = Nothing setDir f = case dumpDir dflags of Just d -> d </> f Nothing -> f -- | Build a nice file name from name of a GeneralFlag constructor beautifyDumpName :: DumpFlag -> String beautifyDumpName Opt_D_th_dec_file = "th.hs" beautifyDumpName flag = let str = show flag suff = case stripPrefix "Opt_D_" str of Just x -> x Nothing -> panic ("Bad flag name: " ++ str) dash = map (\c -> if c == '_' then '-' else c) suff in dash -- ----------------------------------------------------------------------------- -- Outputting messages from the compiler -- We want all messages to go through one place, so that we can -- redirect them if necessary. For example, when GHC is used as a -- library we might want to catch all messages that GHC tries to -- output and do something else with them. ifVerbose :: DynFlags -> Int -> IO () -> IO () ifVerbose dflags val act | verbosity dflags >= val = act | otherwise = return () errorMsg :: DynFlags -> MsgDoc -> IO () errorMsg dflags msg = log_action dflags dflags SevError noSrcSpan (defaultErrStyle dflags) msg fatalErrorMsg :: DynFlags -> MsgDoc -> IO () fatalErrorMsg dflags msg = fatalErrorMsg' (log_action dflags) dflags msg fatalErrorMsg' :: LogAction -> DynFlags -> MsgDoc -> IO () fatalErrorMsg' la dflags msg = la dflags SevFatal noSrcSpan (defaultErrStyle dflags) msg fatalErrorMsg'' :: FatalMessager -> String -> IO () fatalErrorMsg'' fm msg = fm msg compilationProgressMsg :: DynFlags -> String -> IO () compilationProgressMsg dflags msg = ifVerbose dflags 1 $ logOutput dflags defaultUserStyle (text msg) showPass :: DynFlags -> String -> IO () showPass dflags what = ifVerbose dflags 2 $ logInfo dflags defaultUserStyle (text "***" <+> text what <> colon) debugTraceMsg :: DynFlags -> Int -> MsgDoc -> IO () debugTraceMsg dflags val msg = ifVerbose dflags val $ logInfo dflags defaultDumpStyle msg putMsg :: DynFlags -> MsgDoc -> IO () putMsg dflags msg = logInfo dflags defaultUserStyle msg printInfoForUser :: DynFlags -> PrintUnqualified -> MsgDoc -> IO () printInfoForUser dflags print_unqual msg = logInfo dflags (mkUserStyle print_unqual AllTheWay) msg printOutputForUser :: DynFlags -> PrintUnqualified -> MsgDoc -> IO () printOutputForUser dflags print_unqual msg = logOutput dflags (mkUserStyle print_unqual AllTheWay) msg logInfo :: DynFlags -> PprStyle -> MsgDoc -> IO () logInfo dflags sty msg = log_action dflags dflags SevInfo noSrcSpan sty msg logOutput :: DynFlags -> PprStyle -> MsgDoc -> IO () -- Like logInfo but with SevOutput rather then SevInfo logOutput dflags sty msg = log_action dflags dflags SevOutput noSrcSpan sty msg prettyPrintGhcErrors :: ExceptionMonad m => DynFlags -> m a -> m a prettyPrintGhcErrors dflags = ghandle $ \e -> case e of PprPanic str doc -> pprDebugAndThen dflags panic (text str) doc PprSorry str doc -> pprDebugAndThen dflags sorry (text str) doc PprProgramError str doc -> pprDebugAndThen dflags pgmError (text str) doc _ -> liftIO $ throwIO e
forked-upstream-packages-for-ghcjs/ghc
compiler/main/ErrUtils.hs
bsd-3-clause
15,469
0
23
4,480
3,601
1,871
1,730
285
4
-- | -- TH.Lib contains lots of useful helper functions for -- generating and manipulating Template Haskell terms {-# LANGUAGE CPP #-} module Language.Haskell.TH.Lib where -- All of the exports from this module should -- be "public" functions. The main module TH -- re-exports them all. import Language.Haskell.TH.Syntax hiding (Role, InjectivityAnn) import qualified Language.Haskell.TH.Syntax as TH import Control.Monad( liftM, liftM2 ) import Data.Word( Word8 ) ---------------------------------------------------------- -- * Type synonyms ---------------------------------------------------------- type InfoQ = Q Info type PatQ = Q Pat type FieldPatQ = Q FieldPat type ExpQ = Q Exp type TExpQ a = Q (TExp a) type DecQ = Q Dec type DecsQ = Q [Dec] type ConQ = Q Con type TypeQ = Q Type type TyLitQ = Q TyLit type CxtQ = Q Cxt type PredQ = Q Pred type MatchQ = Q Match type ClauseQ = Q Clause type BodyQ = Q Body type GuardQ = Q Guard type StmtQ = Q Stmt type RangeQ = Q Range type SourceStrictnessQ = Q SourceStrictness type SourceUnpackednessQ = Q SourceUnpackedness type BangQ = Q Bang type BangTypeQ = Q BangType type VarBangTypeQ = Q VarBangType type StrictTypeQ = Q StrictType type VarStrictTypeQ = Q VarStrictType type FieldExpQ = Q FieldExp type RuleBndrQ = Q RuleBndr type TySynEqnQ = Q TySynEqn type PatSynDirQ = Q PatSynDir type PatSynArgsQ = Q PatSynArgs -- must be defined here for DsMeta to find it type Role = TH.Role type InjectivityAnn = TH.InjectivityAnn ---------------------------------------------------------- -- * Lowercase pattern syntax functions ---------------------------------------------------------- intPrimL :: Integer -> Lit intPrimL = IntPrimL wordPrimL :: Integer -> Lit wordPrimL = WordPrimL floatPrimL :: Rational -> Lit floatPrimL = FloatPrimL doublePrimL :: Rational -> Lit doublePrimL = DoublePrimL integerL :: Integer -> Lit integerL = IntegerL charL :: Char -> Lit charL = CharL charPrimL :: Char -> Lit charPrimL = CharPrimL stringL :: String -> Lit stringL = StringL stringPrimL :: [Word8] -> Lit stringPrimL = StringPrimL rationalL :: Rational -> Lit rationalL = RationalL litP :: Lit -> PatQ litP l = return (LitP l) varP :: Name -> PatQ varP v = return (VarP v) tupP :: [PatQ] -> PatQ tupP ps = do { ps1 <- sequence ps; return (TupP ps1)} unboxedTupP :: [PatQ] -> PatQ unboxedTupP ps = do { ps1 <- sequence ps; return (UnboxedTupP ps1)} conP :: Name -> [PatQ] -> PatQ conP n ps = do ps' <- sequence ps return (ConP n ps') infixP :: PatQ -> Name -> PatQ -> PatQ infixP p1 n p2 = do p1' <- p1 p2' <- p2 return (InfixP p1' n p2') uInfixP :: PatQ -> Name -> PatQ -> PatQ uInfixP p1 n p2 = do p1' <- p1 p2' <- p2 return (UInfixP p1' n p2') parensP :: PatQ -> PatQ parensP p = do p' <- p return (ParensP p') tildeP :: PatQ -> PatQ tildeP p = do p' <- p return (TildeP p') bangP :: PatQ -> PatQ bangP p = do p' <- p return (BangP p') asP :: Name -> PatQ -> PatQ asP n p = do p' <- p return (AsP n p') wildP :: PatQ wildP = return WildP recP :: Name -> [FieldPatQ] -> PatQ recP n fps = do fps' <- sequence fps return (RecP n fps') listP :: [PatQ] -> PatQ listP ps = do ps' <- sequence ps return (ListP ps') sigP :: PatQ -> TypeQ -> PatQ sigP p t = do p' <- p t' <- t return (SigP p' t') viewP :: ExpQ -> PatQ -> PatQ viewP e p = do e' <- e p' <- p return (ViewP e' p') fieldPat :: Name -> PatQ -> FieldPatQ fieldPat n p = do p' <- p return (n, p') ------------------------------------------------------------------------------- -- * Stmt bindS :: PatQ -> ExpQ -> StmtQ bindS p e = liftM2 BindS p e letS :: [DecQ] -> StmtQ letS ds = do { ds1 <- sequence ds; return (LetS ds1) } noBindS :: ExpQ -> StmtQ noBindS e = do { e1 <- e; return (NoBindS e1) } parS :: [[StmtQ]] -> StmtQ parS sss = do { sss1 <- mapM sequence sss; return (ParS sss1) } ------------------------------------------------------------------------------- -- * Range fromR :: ExpQ -> RangeQ fromR x = do { a <- x; return (FromR a) } fromThenR :: ExpQ -> ExpQ -> RangeQ fromThenR x y = do { a <- x; b <- y; return (FromThenR a b) } fromToR :: ExpQ -> ExpQ -> RangeQ fromToR x y = do { a <- x; b <- y; return (FromToR a b) } fromThenToR :: ExpQ -> ExpQ -> ExpQ -> RangeQ fromThenToR x y z = do { a <- x; b <- y; c <- z; return (FromThenToR a b c) } ------------------------------------------------------------------------------- -- * Body normalB :: ExpQ -> BodyQ normalB e = do { e1 <- e; return (NormalB e1) } guardedB :: [Q (Guard,Exp)] -> BodyQ guardedB ges = do { ges' <- sequence ges; return (GuardedB ges') } ------------------------------------------------------------------------------- -- * Guard normalG :: ExpQ -> GuardQ normalG e = do { e1 <- e; return (NormalG e1) } normalGE :: ExpQ -> ExpQ -> Q (Guard, Exp) normalGE g e = do { g1 <- g; e1 <- e; return (NormalG g1, e1) } patG :: [StmtQ] -> GuardQ patG ss = do { ss' <- sequence ss; return (PatG ss') } patGE :: [StmtQ] -> ExpQ -> Q (Guard, Exp) patGE ss e = do { ss' <- sequence ss; e' <- e; return (PatG ss', e') } ------------------------------------------------------------------------------- -- * Match and Clause -- | Use with 'caseE' match :: PatQ -> BodyQ -> [DecQ] -> MatchQ match p rhs ds = do { p' <- p; r' <- rhs; ds' <- sequence ds; return (Match p' r' ds') } -- | Use with 'funD' clause :: [PatQ] -> BodyQ -> [DecQ] -> ClauseQ clause ps r ds = do { ps' <- sequence ps; r' <- r; ds' <- sequence ds; return (Clause ps' r' ds') } --------------------------------------------------------------------------- -- * Exp -- | Dynamically binding a variable (unhygenic) dyn :: String -> ExpQ dyn s = return (VarE (mkName s)) varE :: Name -> ExpQ varE s = return (VarE s) conE :: Name -> ExpQ conE s = return (ConE s) litE :: Lit -> ExpQ litE c = return (LitE c) appE :: ExpQ -> ExpQ -> ExpQ appE x y = do { a <- x; b <- y; return (AppE a b)} parensE :: ExpQ -> ExpQ parensE x = do { x' <- x; return (ParensE x') } uInfixE :: ExpQ -> ExpQ -> ExpQ -> ExpQ uInfixE x s y = do { x' <- x; s' <- s; y' <- y; return (UInfixE x' s' y') } infixE :: Maybe ExpQ -> ExpQ -> Maybe ExpQ -> ExpQ infixE (Just x) s (Just y) = do { a <- x; s' <- s; b <- y; return (InfixE (Just a) s' (Just b))} infixE Nothing s (Just y) = do { s' <- s; b <- y; return (InfixE Nothing s' (Just b))} infixE (Just x) s Nothing = do { a <- x; s' <- s; return (InfixE (Just a) s' Nothing)} infixE Nothing s Nothing = do { s' <- s; return (InfixE Nothing s' Nothing) } infixApp :: ExpQ -> ExpQ -> ExpQ -> ExpQ infixApp x y z = infixE (Just x) y (Just z) sectionL :: ExpQ -> ExpQ -> ExpQ sectionL x y = infixE (Just x) y Nothing sectionR :: ExpQ -> ExpQ -> ExpQ sectionR x y = infixE Nothing x (Just y) lamE :: [PatQ] -> ExpQ -> ExpQ lamE ps e = do ps' <- sequence ps e' <- e return (LamE ps' e') -- | Single-arg lambda lam1E :: PatQ -> ExpQ -> ExpQ lam1E p e = lamE [p] e lamCaseE :: [MatchQ] -> ExpQ lamCaseE ms = sequence ms >>= return . LamCaseE tupE :: [ExpQ] -> ExpQ tupE es = do { es1 <- sequence es; return (TupE es1)} unboxedTupE :: [ExpQ] -> ExpQ unboxedTupE es = do { es1 <- sequence es; return (UnboxedTupE es1)} condE :: ExpQ -> ExpQ -> ExpQ -> ExpQ condE x y z = do { a <- x; b <- y; c <- z; return (CondE a b c)} multiIfE :: [Q (Guard, Exp)] -> ExpQ multiIfE alts = sequence alts >>= return . MultiIfE letE :: [DecQ] -> ExpQ -> ExpQ letE ds e = do { ds2 <- sequence ds; e2 <- e; return (LetE ds2 e2) } caseE :: ExpQ -> [MatchQ] -> ExpQ caseE e ms = do { e1 <- e; ms1 <- sequence ms; return (CaseE e1 ms1) } doE :: [StmtQ] -> ExpQ doE ss = do { ss1 <- sequence ss; return (DoE ss1) } compE :: [StmtQ] -> ExpQ compE ss = do { ss1 <- sequence ss; return (CompE ss1) } arithSeqE :: RangeQ -> ExpQ arithSeqE r = do { r' <- r; return (ArithSeqE r') } listE :: [ExpQ] -> ExpQ listE es = do { es1 <- sequence es; return (ListE es1) } sigE :: ExpQ -> TypeQ -> ExpQ sigE e t = do { e1 <- e; t1 <- t; return (SigE e1 t1) } recConE :: Name -> [Q (Name,Exp)] -> ExpQ recConE c fs = do { flds <- sequence fs; return (RecConE c flds) } recUpdE :: ExpQ -> [Q (Name,Exp)] -> ExpQ recUpdE e fs = do { e1 <- e; flds <- sequence fs; return (RecUpdE e1 flds) } stringE :: String -> ExpQ stringE = litE . stringL fieldExp :: Name -> ExpQ -> Q (Name, Exp) fieldExp s e = do { e' <- e; return (s,e') } -- | @staticE x = [| static x |]@ staticE :: ExpQ -> ExpQ staticE = fmap StaticE unboundVarE :: Name -> ExpQ unboundVarE s = return (UnboundVarE s) -- ** 'arithSeqE' Shortcuts fromE :: ExpQ -> ExpQ fromE x = do { a <- x; return (ArithSeqE (FromR a)) } fromThenE :: ExpQ -> ExpQ -> ExpQ fromThenE x y = do { a <- x; b <- y; return (ArithSeqE (FromThenR a b)) } fromToE :: ExpQ -> ExpQ -> ExpQ fromToE x y = do { a <- x; b <- y; return (ArithSeqE (FromToR a b)) } fromThenToE :: ExpQ -> ExpQ -> ExpQ -> ExpQ fromThenToE x y z = do { a <- x; b <- y; c <- z; return (ArithSeqE (FromThenToR a b c)) } ------------------------------------------------------------------------------- -- * Dec valD :: PatQ -> BodyQ -> [DecQ] -> DecQ valD p b ds = do { p' <- p ; ds' <- sequence ds ; b' <- b ; return (ValD p' b' ds') } funD :: Name -> [ClauseQ] -> DecQ funD nm cs = do { cs1 <- sequence cs ; return (FunD nm cs1) } tySynD :: Name -> [TyVarBndr] -> TypeQ -> DecQ tySynD tc tvs rhs = do { rhs1 <- rhs; return (TySynD tc tvs rhs1) } dataD :: CxtQ -> Name -> [TyVarBndr] -> Maybe Kind -> [ConQ] -> CxtQ -> DecQ dataD ctxt tc tvs ksig cons derivs = do ctxt1 <- ctxt cons1 <- sequence cons derivs1 <- derivs return (DataD ctxt1 tc tvs ksig cons1 derivs1) newtypeD :: CxtQ -> Name -> [TyVarBndr] -> Maybe Kind -> ConQ -> CxtQ -> DecQ newtypeD ctxt tc tvs ksig con derivs = do ctxt1 <- ctxt con1 <- con derivs1 <- derivs return (NewtypeD ctxt1 tc tvs ksig con1 derivs1) classD :: CxtQ -> Name -> [TyVarBndr] -> [FunDep] -> [DecQ] -> DecQ classD ctxt cls tvs fds decs = do decs1 <- sequence decs ctxt1 <- ctxt return $ ClassD ctxt1 cls tvs fds decs1 instanceD :: CxtQ -> TypeQ -> [DecQ] -> DecQ instanceD = instanceWithOverlapD Nothing instanceWithOverlapD :: Maybe Overlap -> CxtQ -> TypeQ -> [DecQ] -> DecQ instanceWithOverlapD o ctxt ty decs = do ctxt1 <- ctxt decs1 <- sequence decs ty1 <- ty return $ InstanceD o ctxt1 ty1 decs1 sigD :: Name -> TypeQ -> DecQ sigD fun ty = liftM (SigD fun) $ ty forImpD :: Callconv -> Safety -> String -> Name -> TypeQ -> DecQ forImpD cc s str n ty = do ty' <- ty return $ ForeignD (ImportF cc s str n ty') infixLD :: Int -> Name -> DecQ infixLD prec nm = return (InfixD (Fixity prec InfixL) nm) infixRD :: Int -> Name -> DecQ infixRD prec nm = return (InfixD (Fixity prec InfixR) nm) infixND :: Int -> Name -> DecQ infixND prec nm = return (InfixD (Fixity prec InfixN) nm) pragInlD :: Name -> Inline -> RuleMatch -> Phases -> DecQ pragInlD name inline rm phases = return $ PragmaD $ InlineP name inline rm phases pragSpecD :: Name -> TypeQ -> Phases -> DecQ pragSpecD n ty phases = do ty1 <- ty return $ PragmaD $ SpecialiseP n ty1 Nothing phases pragSpecInlD :: Name -> TypeQ -> Inline -> Phases -> DecQ pragSpecInlD n ty inline phases = do ty1 <- ty return $ PragmaD $ SpecialiseP n ty1 (Just inline) phases pragSpecInstD :: TypeQ -> DecQ pragSpecInstD ty = do ty1 <- ty return $ PragmaD $ SpecialiseInstP ty1 pragRuleD :: String -> [RuleBndrQ] -> ExpQ -> ExpQ -> Phases -> DecQ pragRuleD n bndrs lhs rhs phases = do bndrs1 <- sequence bndrs lhs1 <- lhs rhs1 <- rhs return $ PragmaD $ RuleP n bndrs1 lhs1 rhs1 phases pragAnnD :: AnnTarget -> ExpQ -> DecQ pragAnnD target expr = do exp1 <- expr return $ PragmaD $ AnnP target exp1 pragLineD :: Int -> String -> DecQ pragLineD line file = return $ PragmaD $ LineP line file dataInstD :: CxtQ -> Name -> [TypeQ] -> Maybe Kind -> [ConQ] -> CxtQ -> DecQ dataInstD ctxt tc tys ksig cons derivs = do ctxt1 <- ctxt tys1 <- sequence tys cons1 <- sequence cons derivs1 <- derivs return (DataInstD ctxt1 tc tys1 ksig cons1 derivs1) newtypeInstD :: CxtQ -> Name -> [TypeQ] -> Maybe Kind -> ConQ -> CxtQ -> DecQ newtypeInstD ctxt tc tys ksig con derivs = do ctxt1 <- ctxt tys1 <- sequence tys con1 <- con derivs1 <- derivs return (NewtypeInstD ctxt1 tc tys1 ksig con1 derivs1) tySynInstD :: Name -> TySynEqnQ -> DecQ tySynInstD tc eqn = do eqn1 <- eqn return (TySynInstD tc eqn1) dataFamilyD :: Name -> [TyVarBndr] -> Maybe Kind -> DecQ dataFamilyD tc tvs kind = return $ DataFamilyD tc tvs kind openTypeFamilyD :: Name -> [TyVarBndr] -> FamilyResultSig -> Maybe InjectivityAnn -> DecQ openTypeFamilyD tc tvs res inj = return $ OpenTypeFamilyD (TypeFamilyHead tc tvs res inj) closedTypeFamilyD :: Name -> [TyVarBndr] -> FamilyResultSig -> Maybe InjectivityAnn -> [TySynEqnQ] -> DecQ closedTypeFamilyD tc tvs result injectivity eqns = do eqns1 <- sequence eqns return (ClosedTypeFamilyD (TypeFamilyHead tc tvs result injectivity) eqns1) -- These were deprecated in GHC 8.0 with a plan to remove them in 8.2. If you -- remove this check please also: -- 1. remove deprecated functions -- 2. remove CPP language extension from top of this module -- 3. remove the FamFlavour data type from Syntax module -- 4. make sure that all references to FamFlavour are gone from DsMeta, -- Convert, TcSplice (follows from 3) #if __GLASGOW_HASKELL__ >= 802 #error Remove deprecated familyNoKindD, familyKindD, closedTypeFamilyNoKindD and closedTypeFamilyKindD #endif {-# DEPRECATED familyNoKindD, familyKindD "This function will be removed in the next stable release. Use openTypeFamilyD/dataFamilyD instead." #-} familyNoKindD :: FamFlavour -> Name -> [TyVarBndr] -> DecQ familyNoKindD flav tc tvs = case flav of TypeFam -> return $ OpenTypeFamilyD (TypeFamilyHead tc tvs NoSig Nothing) DataFam -> return $ DataFamilyD tc tvs Nothing familyKindD :: FamFlavour -> Name -> [TyVarBndr] -> Kind -> DecQ familyKindD flav tc tvs k = case flav of TypeFam -> return $ OpenTypeFamilyD (TypeFamilyHead tc tvs (KindSig k) Nothing) DataFam -> return $ DataFamilyD tc tvs (Just k) {-# DEPRECATED closedTypeFamilyNoKindD, closedTypeFamilyKindD "This function will be removed in the next stable release. Use closedTypeFamilyD instead." #-} closedTypeFamilyNoKindD :: Name -> [TyVarBndr] -> [TySynEqnQ] -> DecQ closedTypeFamilyNoKindD tc tvs eqns = do eqns1 <- sequence eqns return (ClosedTypeFamilyD (TypeFamilyHead tc tvs NoSig Nothing) eqns1) closedTypeFamilyKindD :: Name -> [TyVarBndr] -> Kind -> [TySynEqnQ] -> DecQ closedTypeFamilyKindD tc tvs kind eqns = do eqns1 <- sequence eqns return (ClosedTypeFamilyD (TypeFamilyHead tc tvs (KindSig kind) Nothing) eqns1) roleAnnotD :: Name -> [Role] -> DecQ roleAnnotD name roles = return $ RoleAnnotD name roles standaloneDerivD :: CxtQ -> TypeQ -> DecQ standaloneDerivD ctxtq tyq = do ctxt <- ctxtq ty <- tyq return $ StandaloneDerivD ctxt ty defaultSigD :: Name -> TypeQ -> DecQ defaultSigD n tyq = do ty <- tyq return $ DefaultSigD n ty -- | Pattern synonym declaration patSynD :: Name -> PatSynArgsQ -> PatSynDirQ -> PatQ -> DecQ patSynD name args dir pat = do args' <- args dir' <- dir pat' <- pat return (PatSynD name args' dir' pat') -- | Pattern synonym type signature patSynSigD :: Name -> TypeQ -> DecQ patSynSigD nm ty = do ty' <- ty return $ PatSynSigD nm ty' tySynEqn :: [TypeQ] -> TypeQ -> TySynEqnQ tySynEqn lhs rhs = do lhs1 <- sequence lhs rhs1 <- rhs return (TySynEqn lhs1 rhs1) cxt :: [PredQ] -> CxtQ cxt = sequence normalC :: Name -> [BangTypeQ] -> ConQ normalC con strtys = liftM (NormalC con) $ sequence strtys recC :: Name -> [VarBangTypeQ] -> ConQ recC con varstrtys = liftM (RecC con) $ sequence varstrtys infixC :: Q (Bang, Type) -> Name -> Q (Bang, Type) -> ConQ infixC st1 con st2 = do st1' <- st1 st2' <- st2 return $ InfixC st1' con st2' forallC :: [TyVarBndr] -> CxtQ -> ConQ -> ConQ forallC ns ctxt con = liftM2 (ForallC ns) ctxt con gadtC :: [Name] -> [StrictTypeQ] -> TypeQ -> ConQ gadtC cons strtys ty = liftM2 (GadtC cons) (sequence strtys) ty recGadtC :: [Name] -> [VarStrictTypeQ] -> TypeQ -> ConQ recGadtC cons varstrtys ty = liftM2 (RecGadtC cons) (sequence varstrtys) ty ------------------------------------------------------------------------------- -- * Type forallT :: [TyVarBndr] -> CxtQ -> TypeQ -> TypeQ forallT tvars ctxt ty = do ctxt1 <- ctxt ty1 <- ty return $ ForallT tvars ctxt1 ty1 varT :: Name -> TypeQ varT = return . VarT conT :: Name -> TypeQ conT = return . ConT infixT :: TypeQ -> Name -> TypeQ -> TypeQ infixT t1 n t2 = do t1' <- t1 t2' <- t2 return (InfixT t1' n t2') uInfixT :: TypeQ -> Name -> TypeQ -> TypeQ uInfixT t1 n t2 = do t1' <- t1 t2' <- t2 return (UInfixT t1' n t2') parensT :: TypeQ -> TypeQ parensT t = do t' <- t return (ParensT t') appT :: TypeQ -> TypeQ -> TypeQ appT t1 t2 = do t1' <- t1 t2' <- t2 return $ AppT t1' t2' arrowT :: TypeQ arrowT = return ArrowT listT :: TypeQ listT = return ListT litT :: TyLitQ -> TypeQ litT l = fmap LitT l tupleT :: Int -> TypeQ tupleT i = return (TupleT i) unboxedTupleT :: Int -> TypeQ unboxedTupleT i = return (UnboxedTupleT i) sigT :: TypeQ -> Kind -> TypeQ sigT t k = do t' <- t return $ SigT t' k equalityT :: TypeQ equalityT = return EqualityT wildCardT :: TypeQ wildCardT = return WildCardT {-# DEPRECATED classP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please use 'conT' and 'appT'." #-} classP :: Name -> [Q Type] -> Q Pred classP cla tys = do tysl <- sequence tys return (foldl AppT (ConT cla) tysl) {-# DEPRECATED equalP "As of template-haskell-2.10, constraint predicates (Pred) are just types (Type), in keeping with ConstraintKinds. Please see 'equalityT'." #-} equalP :: TypeQ -> TypeQ -> PredQ equalP tleft tright = do tleft1 <- tleft tright1 <- tright eqT <- equalityT return (foldl AppT eqT [tleft1, tright1]) promotedT :: Name -> TypeQ promotedT = return . PromotedT promotedTupleT :: Int -> TypeQ promotedTupleT i = return (PromotedTupleT i) promotedNilT :: TypeQ promotedNilT = return PromotedNilT promotedConsT :: TypeQ promotedConsT = return PromotedConsT noSourceUnpackedness, sourceNoUnpack, sourceUnpack :: SourceUnpackednessQ noSourceUnpackedness = return NoSourceUnpackedness sourceNoUnpack = return SourceNoUnpack sourceUnpack = return SourceUnpack noSourceStrictness, sourceLazy, sourceStrict :: SourceStrictnessQ noSourceStrictness = return NoSourceStrictness sourceLazy = return SourceLazy sourceStrict = return SourceStrict {-# DEPRECATED isStrict ["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ", "Example usage: 'bang noSourceUnpackedness sourceStrict'"] #-} {-# DEPRECATED notStrict ["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ", "Example usage: 'bang noSourceUnpackedness noSourceStrictness'"] #-} {-# DEPRECATED unpacked ["Use 'bang'. See https://ghc.haskell.org/trac/ghc/wiki/Migration/8.0. ", "Example usage: 'bang sourceUnpack sourceStrict'"] #-} isStrict, notStrict, unpacked :: Q Strict isStrict = bang noSourceUnpackedness sourceStrict notStrict = bang noSourceUnpackedness noSourceStrictness unpacked = bang sourceUnpack sourceStrict bang :: SourceUnpackednessQ -> SourceStrictnessQ -> BangQ bang u s = do u' <- u s' <- s return (Bang u' s') bangType :: BangQ -> TypeQ -> BangTypeQ bangType = liftM2 (,) varBangType :: Name -> BangTypeQ -> VarBangTypeQ varBangType v bt = do (b, t) <- bt return (v, b, t) {-# DEPRECATED strictType "As of @template-haskell-2.11.0.0@, 'StrictType' has been replaced by 'BangType'. Please use 'bangType' instead." #-} strictType :: Q Strict -> TypeQ -> StrictTypeQ strictType = bangType {-# DEPRECATED varStrictType "As of @template-haskell-2.11.0.0@, 'VarStrictType' has been replaced by 'VarBangType'. Please use 'varBangType' instead." #-} varStrictType :: Name -> StrictTypeQ -> VarStrictTypeQ varStrictType = varBangType -- * Type Literals numTyLit :: Integer -> TyLitQ numTyLit n = if n >= 0 then return (NumTyLit n) else fail ("Negative type-level number: " ++ show n) strTyLit :: String -> TyLitQ strTyLit s = return (StrTyLit s) ------------------------------------------------------------------------------- -- * Kind plainTV :: Name -> TyVarBndr plainTV = PlainTV kindedTV :: Name -> Kind -> TyVarBndr kindedTV = KindedTV varK :: Name -> Kind varK = VarT conK :: Name -> Kind conK = ConT tupleK :: Int -> Kind tupleK = TupleT arrowK :: Kind arrowK = ArrowT listK :: Kind listK = ListT appK :: Kind -> Kind -> Kind appK = AppT starK :: Kind starK = StarT constraintK :: Kind constraintK = ConstraintT ------------------------------------------------------------------------------- -- * Type family result noSig :: FamilyResultSig noSig = NoSig kindSig :: Kind -> FamilyResultSig kindSig = KindSig tyVarSig :: TyVarBndr -> FamilyResultSig tyVarSig = TyVarSig ------------------------------------------------------------------------------- -- * Injectivity annotation injectivityAnn :: Name -> [Name] -> InjectivityAnn injectivityAnn = TH.InjectivityAnn ------------------------------------------------------------------------------- -- * Role nominalR, representationalR, phantomR, inferR :: Role nominalR = NominalR representationalR = RepresentationalR phantomR = PhantomR inferR = InferR ------------------------------------------------------------------------------- -- * Callconv cCall, stdCall, cApi, prim, javaScript :: Callconv cCall = CCall stdCall = StdCall cApi = CApi prim = Prim javaScript = JavaScript ------------------------------------------------------------------------------- -- * Safety unsafe, safe, interruptible :: Safety unsafe = Unsafe safe = Safe interruptible = Interruptible ------------------------------------------------------------------------------- -- * FunDep funDep :: [Name] -> [Name] -> FunDep funDep = FunDep ------------------------------------------------------------------------------- -- * FamFlavour typeFam, dataFam :: FamFlavour typeFam = TypeFam dataFam = DataFam ------------------------------------------------------------------------------- -- * RuleBndr ruleVar :: Name -> RuleBndrQ ruleVar = return . RuleVar typedRuleVar :: Name -> TypeQ -> RuleBndrQ typedRuleVar n ty = ty >>= return . TypedRuleVar n ------------------------------------------------------------------------------- -- * AnnTarget valueAnnotation :: Name -> AnnTarget valueAnnotation = ValueAnnotation typeAnnotation :: Name -> AnnTarget typeAnnotation = TypeAnnotation moduleAnnotation :: AnnTarget moduleAnnotation = ModuleAnnotation ------------------------------------------------------------------------------- -- * Pattern Synonyms (sub constructs) unidir, implBidir :: PatSynDirQ unidir = return Unidir implBidir = return ImplBidir explBidir :: [ClauseQ] -> PatSynDirQ explBidir cls = do cls' <- sequence cls return (ExplBidir cls') prefixPatSyn :: [Name] -> PatSynArgsQ prefixPatSyn args = return $ PrefixPatSyn args recordPatSyn :: [Name] -> PatSynArgsQ recordPatSyn sels = return $ RecordPatSyn sels infixPatSyn :: Name -> Name -> PatSynArgsQ infixPatSyn arg1 arg2 = return $ InfixPatSyn arg1 arg2 -------------------------------------------------------------- -- * Useful helper function appsE :: [ExpQ] -> ExpQ appsE [] = error "appsE []" appsE [x] = x appsE (x:y:zs) = appsE ( (appE x y) : zs ) -- | Return the Module at the place of splicing. Can be used as an -- input for 'reifyModule'. thisModule :: Q Module thisModule = do loc <- location return $ Module (mkPkgName $ loc_package loc) (mkModName $ loc_module loc)
vikraman/ghc
libraries/template-haskell/Language/Haskell/TH/Lib.hs
bsd-3-clause
25,510
0
13
6,283
8,280
4,258
4,022
-1
-1
module Main where import System.Posix.CircularBuffer main :: IO () main = do wb <- createBuffer "/abuf" "/abuf" 4 384 :: IO (WriteBuffer Int) mapM_ (putBuffer wb) [1..16] removeBuffer wb
smunix/shared-buffer
tests/Writer.hs
bsd-3-clause
201
0
10
43
78
39
39
7
1
import Data.List import System.Process import System.IO import System.Exit import Text.Printf import Control.Applicative import Control.Concurrent import Control.Concurrent.Spawn -- mass 1 - 50 -- radius 0.05 - 1.0 -- orbit time 1 - 5 -- sim time 1 - 5 bin = "./milkyway_nbody" outFile = "runtime_results" inFile = "arst.js" histogram = "histogram" nthreads = 4 --nbodySets = [ 1024, 2048, 3072, 4096, 8192, 10000, 15000 ] manyNbodySets = [ 100, 200, 300, 500, 750, 1000, 2000, 3000, 4000, 5000, 8000, 10000, 12000, 15000, 20000, 30000, 40000, 50000, 75000, 100000 ] simpleArguments = arguments 1234 inFile histogram data FitParams = FitParams { mass :: Double, radius :: Double, reverseTime :: Double, forwardTime :: Double } deriving (Eq, Show) data Workunit = Workunit { nbody :: Int, fitParams :: FitParams } deriving (Eq, Show) high = FitParams { mass = 50.0, radius = 1.0, reverseTime = 5.0, forwardTime = 5.0 } low = FitParams { mass = 1.0, radius = 0.05, reverseTime = 1.0, forwardTime = 1.0 } steps = FitParams { mass = 5.0, radius = 0.1, reverseTime = 1.0, forwardTime = 1.0 } fpRange :: (FitParams -> Double) -> [Double] fpRange f = [f low, f low + f steps .. f high] sample = FitParams { mass = 15.0, radius = 0.2, reverseTime = 3.945, forwardTime = 4.0 } findWorkunit :: Int -> [Workunit] findWorkunit n = map (Workunit n) fps where fps = [ FitParams m r 4.0 4.0 | m <- fpRange mass, r <- fpRange radius ] -- Workunits with different numbers of bodies, each with the same set of bodies findWorkunits :: [Int] -> [Workunit] findWorkunits ns = concatMap findWorkunit ns -- One workunit with different numbers of bodiesp differentBodies :: FitParams -> [Int] -> [Workunit] differentBodies fp = map (flip Workunit fp) arguments :: Int -> FilePath -> FilePath -> Workunit -> [String] arguments seed file histogram wu = [ "-t", "-f", file, "-h", histogram, "-e", show seed, "-u", show (nbody wu), "-np", show (length params), "-p" ] ++ params where params = map show [ mass fp, radius fp, reverseTime fp, forwardTime fp ] fp = fitParams wu -- assumes only thing written to stdout is "<run_time> %g </run_time>\n" -- worst function ever readTimeTag :: String -> Double readTimeTag str | Just rest <- stripPrefix begin str = let numLen = length rest - endlen in if end `isSuffixOf` rest then read $ take numLen rest else err | otherwise = err where endlen = length end begin = "<run_time> " end = " </run_time>\n" err = (-1.0) readResult :: Handle -> IO Double readResult h = do !x <- hGetContents h return (readTimeTag x) wuString :: Workunit -> Double -> String wuString wu val = let fp = fitParams wu in printf "%d, %g, %g, %g, %g, %g\n" (nbody wu) (mass fp) (radius fp) (reverseTime fp) (forwardTime fp) val runWorkunit :: Chan String -> Workunit -> IO () runWorkunit results wu = do (_, pout, _, h) <- runInteractiveProcess bin (simpleArguments wu) Nothing Nothing rc <- waitForProcess h rtime <- readResult pout hClose pout let !resString = if rc /= ExitSuccess then printf "Failed to run process: %s\n" (show rc) else wuString wu rtime hPrintf stdout "Completed workunit in %g: %s\n" rtime (show wu) writeChan results resString runWorkunits results runPool wus = do mapM (spawn . runPool . runWorkunit results) wus readChanN :: Int -> Chan a -> IO [a] readChanN n chan = take n <$> getChanContents chan main = do results <- newChan :: IO (Chan String) runPool <- pool nthreads let wus = differentBodies sample manyNbodySets --wus = findWorkunits nbodySets n = length wus hPrintf stdout "Running %d workunits\n" n runWorkunits results runPool wus out <- concat <$> readChanN n results putStrLn out writeFile outFile out
MarchM4/Milkyway-home-server-expansion
tools/nbody_run_times/FindNBodyTimes.hs
gpl-3.0
5,010
0
13
2,004
1,265
671
594
-1
-1
module Main (main) where -- This module test whether joining two maps with equal keys works correctly. import FiniteMaps (FiniteMap, unitFM, toListFM, joinFM, joinCombFM, addToFM) main :: IO () main = let map1 = addToFM 1 () (unitFM 2 ()) map2 = addToFM 1 () (unitFM 3 ()) -- finalmap = joinCombFM const map1 map2 finalmap = joinFM map1 map2 in putStr (maptostring finalmap) maptostring :: (FiniteMap Int ()) -> String maptostring fmap = let list = toListFM fmap in concat (map (show . fst) list) ++ "\n"
phischu/gtk2hs
tools/c2hs/base/general/tests/doubles.hs
lgpl-3.0
555
0
12
138
185
97
88
14
1
addMaybe1 :: Maybe Int addMaybe1 = Just 4 >>= \x -> Just 5 >>= \y -> return (x+y) addMaybe2 :: Maybe Int addMaybe2 = do x <- Just 4 y <- Just 5 return (x+y) profileSetup :: IO String profileSetup = do putStrLn "What is your name?" name <- getLine putStrLn "What is your game?" game <- getLine return (name ++ "'s name & " ++ game ++ "'s my game") profileSetup' :: IO String profileSetup' = do let q1 = "What is your name?" let q2 = "What is your game?" putStrLn q1 name <- getLine putStrLn q2 game <- getLine return (name ++ "'s name & " ++ game ++ "'s my game") {- Bind Operator f :: a -> M b bind (M a ) f = M b λ> λ> (Just 2) `bind` (f 10) `bind` (f 0) Nothing λ> (Just 2) `bind` (f 0) `bind` (f 3) Nothing λ> -} bind Nothing f = Nothing bind (Just x) f = case (f x) of Nothing -> Nothing Just a -> Just a -- findCustomer :: (Eq a, Num a) => a -> Either [Char] [Char] findCustomerByName :: Num b => [Char] -> Either [Char] b findCustomerByName "John Galt" = Right 0 findCustomerByName "Ayn Rand" = Right 1 findCustomerByName "Abrahan Lincol" = Right 2 findCustomerByName "James Madison" = Right 3 findCustomerByName _ = Left "Customer Not Found" findCustomerByID :: (Eq a, Num a) => a -> Either [Char] [Char] findCustomerByID 0 = Right "John Galt" findCustomerByID 1 = Right "Ayn Rand" findCustomerByID 2 = Right "Abrahan Lincol" findCustomerByID 3 = Right "James Madison" findCustomerByID 4 = Right "Richard Feyman" findCustomerByID _ = Left "Customer ID doesn't exist" findOrder 0 = Right (1, "Pizza") findOrder 1 = Right (2, "Cake") findOrder 2 = Right (1, "Cake") findOrder 3 = Right (3, "Donuts") findOrder 4 = Right (0, "Ice Cream") findOrder 5 = Right (0, "Soda Pop") findOrder 6 = Right (10, "Soda Pop") findOrder _ = Left "Order not Found" greetCustomer :: (Eq a, Num a) => a -> IO () greetCustomer customerID = case findCustomerByID customerID of Right customer -> print ("Hello " ++ customer) Left errorMessage -> print ("Erro: " ++ errorMessage)
junnf/Functional-Programming
codes/Algebra.hs
unlicense
2,217
0
11
619
683
331
352
57
2
{-# LANGUAGE OverloadedStrings #-} module Database.Persist.Sql.Util ( parseEntityValues , entityColumnNames , entityColumnCount , isIdField , hasCompositeKey , dbIdColumns , dbIdColumnsEsc , dbColumns ) where import Data.Maybe (isJust) import Data.Monoid ((<>)) import Data.Text (Text, pack) import Database.Persist ( Entity(Entity), EntityDef, EntityField, HaskellName(HaskellName) , PersistEntity, PersistValue , keyFromValues, fromPersistValues, fieldDB, entityId, entityPrimary , entityFields, entityKeyFields, fieldHaskell, compositeFields, persistFieldDef , DBName) import Database.Persist.Sql.Types (Sql, SqlBackend, connEscapeName) entityColumnNames :: EntityDef -> SqlBackend -> [Sql] entityColumnNames ent conn = (if hasCompositeKey ent then [] else [connEscapeName conn $ fieldDB (entityId ent)]) <> map (connEscapeName conn . fieldDB) (entityFields ent) entityColumnCount :: EntityDef -> Int entityColumnCount e = length (entityFields e) + if hasCompositeKey e then 0 else 1 hasCompositeKey :: EntityDef -> Bool hasCompositeKey = isJust . entityPrimary dbIdColumns :: SqlBackend -> EntityDef -> [Text] dbIdColumns conn = dbIdColumnsEsc (connEscapeName conn) dbIdColumnsEsc :: (DBName -> Text) -> EntityDef -> [Text] dbIdColumnsEsc esc t = map (esc . fieldDB) $ entityKeyFields t dbColumns :: SqlBackend -> EntityDef -> [Text] dbColumns conn t = case entityPrimary t of Just _ -> flds Nothing -> escapeDB (entityId t) : flds where escapeDB = connEscapeName conn . fieldDB flds = map escapeDB (entityFields t) parseEntityValues :: PersistEntity record => EntityDef -> [PersistValue] -> Either Text (Entity record) parseEntityValues t vals = case entityPrimary t of Just pdef -> let pks = map fieldHaskell $ compositeFields pdef keyvals = map snd . filter ((`elem` pks) . fst) $ zip (map fieldHaskell $ entityFields t) vals in fromPersistValuesComposite' keyvals vals Nothing -> fromPersistValues' vals where fromPersistValues' (kpv:xs) = -- oracle returns Double case fromPersistValues xs of Left e -> Left e Right xs' -> case keyFromValues [kpv] of Left _ -> error $ "fromPersistValues': keyFromValues failed on " ++ show kpv Right k -> Right (Entity k xs') fromPersistValues' xs = Left $ pack ("error in fromPersistValues' xs=" ++ show xs) fromPersistValuesComposite' keyvals xs = case fromPersistValues xs of Left e -> Left e Right xs' -> case keyFromValues keyvals of Left _ -> error "fromPersistValuesComposite': keyFromValues failed" Right key -> Right (Entity key xs') isIdField :: PersistEntity record => EntityField record typ -> Bool isIdField f = fieldHaskell (persistFieldDef f) == HaskellName "Id"
jasonzoladz/persistent
persistent/Database/Persist/Sql/Util.hs
mit
2,980
0
17
722
861
447
414
66
7
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="pt-BR"> <title>Network Add-on</title> <maps> <homeID>addon.network</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/network/src/main/javahelp/help_pt_BR/helpset_pt_BR.hs
apache-2.0
969
77
67
156
413
209
204
-1
-1
module Oops where -- highlight >x +< in f, then select IntroNewDef main = print ((f 1) 2, gaga True) where f x y = g + y where g = x gaga h = ("g: " ++) (show h)
kmate/HaRe
old/testing/introNewDef/Oops_TokOut.hs
bsd-3-clause
197
0
9
75
75
40
35
5
1
{-# LANGUAGE DeriveDataTypeable #-} -- $Id: HsGuardsStruct.hs,v 1.1 2001/07/25 01:15:30 moran Exp $ module HsGuardsStruct where import Data.Generics import SrcLoc1 data HsAlt e p ds = HsAlt SrcLoc p (HsRhs e) {-where-} ds deriving (Ord,Read, Eq, Show, Data, Typeable) data HsRhs e = HsBody e | HsGuard [(SrcLoc, e, e)] deriving (Ord,Read, Eq, Show, Data, Typeable)
kmate/HaRe
old/tools/base/AST/HsGuardsStruct.hs
bsd-3-clause
396
0
8
87
120
70
50
11
0
module LiftOneLevel.WhereIn6 where --A definition can be lifted from a where or let into the surrounding binding group. --Lifting a definition widens the scope of the definition. --In this example, lift 'pow' defined in 'sq' sumSquares x y = sq x + sq y where sq::Int->Int sq 0 = 0 sq z = z^pow where pow=2 anotherFun 0 y = sq y where sq x=x^2
RefactoringTools/HaRe
test/testdata/LiftOneLevel/WhereIn6.hs
bsd-3-clause
437
0
8
158
94
49
45
8
2
module Fixme where import Language.Haskell.Liquid.Prelude {-@ LIQUID "--no-termination" @-} {-@ measure containsV @-} {-@ measure binderContainsV @-} binderContainsV :: Binder n -> Bool binderContainsV B = True binderContainsV (M x) = containsV x data Binder n = B | M (TT n) data TT n = V Int | Other | Bind (Binder n) (TT n) containsV :: TT n -> Bool containsV (V i) = True containsV (Bind b body) = (binderContainsV b) || (containsV body) -- containsV (App f arg) = (containsV f) || (containsV arg) -- containsV (Proj tm i) = containsV tm containsV _ = False prop1 = liquidAssert (containsV $ V 7) prop2 = liquidAssert (containsV $ Bind (M (V 5)) Other)
mightymoose/liquidhaskell
tests/pos/MeasureContains.hs
bsd-3-clause
706
0
12
166
220
118
102
13
1
{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} module Yesod.Default.Main ( defaultMain , defaultMainLog , defaultRunner , defaultDevelApp , LogFunc ) where import Yesod.Default.Config import Network.Wai (Application) import Network.Wai.Handler.Warp (runSettings, defaultSettings, setPort, setHost, setOnException) import qualified Network.Wai.Handler.Warp as Warp import System.Directory (doesDirectoryExist, removeDirectoryRecursive) import Network.Wai.Middleware.Gzip (gzip, GzipFiles (GzipCacheFolder), gzipFiles, def) import Network.Wai.Middleware.Autohead (autohead) import Network.Wai.Middleware.Jsonp (jsonp) import Control.Monad (when) import System.Environment (getEnvironment) import Data.Maybe (fromMaybe) import Safe (readMay) import Control.Monad.Logger (Loc, LogSource, LogLevel (LevelError), liftLoc) import System.Log.FastLogger (LogStr, toLogStr) import Language.Haskell.TH.Syntax (qLocation) #ifndef WINDOWS import qualified System.Posix.Signals as Signal import Control.Concurrent (forkIO, killThread) import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar) #endif -- | Run your app, taking environment and port settings from the -- commandline. -- -- @'fromArgs'@ helps parse a custom configuration -- -- > main :: IO () -- > main = defaultMain (fromArgs parseExtra) makeApplication -- defaultMain :: (Show env, Read env) => IO (AppConfig env extra) -> (AppConfig env extra -> IO Application) -> IO () defaultMain load getApp = do config <- load app <- getApp config runSettings ( setPort (appPort config) $ setHost (appHost config) $ defaultSettings ) app type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO () -- | Same as @defaultMain@, but gets a logging function back as well as an -- @Application@ to install Warp exception handlers. -- -- Since 1.2.5 defaultMainLog :: (Show env, Read env) => IO (AppConfig env extra) -> (AppConfig env extra -> IO (Application, LogFunc)) -> IO () defaultMainLog load getApp = do config <- load (app, logFunc) <- getApp config runSettings ( setPort (appPort config) $ setHost (appHost config) $ setOnException (const $ \e -> when (shouldLog' e) $ logFunc $(qLocation >>= liftLoc) "yesod" LevelError (toLogStr $ "Exception from Warp: " ++ show e)) $ defaultSettings ) app where shouldLog' = Warp.defaultShouldDisplayException -- | Run your application continously, listening for SIGINT and exiting -- when received -- -- > withYourSite :: AppConfig DefaultEnv -> Logger -> (Application -> IO a) -> IO () -- > withYourSite conf logger f = do -- > Settings.withConnectionPool conf $ \p -> do -- > runConnectionPool (runMigration yourMigration) p -- > defaultRunner f $ YourSite conf logger p defaultRunner :: (Application -> IO ()) -> Application -> IO () defaultRunner f app = do -- clear the .static-cache so we don't have stale content exists <- doesDirectoryExist staticCache when exists $ removeDirectoryRecursive staticCache #ifdef WINDOWS f (middlewares app) #else tid <- forkIO $ f (middlewares app) >> return () flag <- newEmptyMVar _ <- Signal.installHandler Signal.sigINT (Signal.CatchOnce $ do putStrLn "Caught an interrupt" killThread tid putMVar flag ()) Nothing takeMVar flag #endif where middlewares = gzip gset . jsonp . autohead gset = def { gzipFiles = GzipCacheFolder staticCache } staticCache = ".static-cache" -- | Run your development app using a custom environment type and loader -- function defaultDevelApp :: (Show env, Read env) => IO (AppConfig env extra) -- ^ A means to load your development @'AppConfig'@ -> (AppConfig env extra -> IO Application) -- ^ Get your @Application@ -> IO (Int, Application) defaultDevelApp load getApp = do conf <- load env <- getEnvironment let p = fromMaybe (appPort conf) $ lookup "PORT" env >>= readMay pdisplay = fromMaybe p $ lookup "DISPLAY_PORT" env >>= readMay putStrLn $ "Devel application launched: http://localhost:" ++ show pdisplay app <- getApp conf return (p, app)
pikajude/yesod
yesod/Yesod/Default/Main.hs
mit
4,442
0
19
1,014
943
519
424
87
1
{-# LANGUAGE TypeFamilies #-} module ShouldCompile where class C a where data Sd a :: * data Sn a :: * type St a :: * instance C Int where data Sd Int = SdC Char newtype Sn Int = SnC Char type St Int = Char
urbanslug/ghc
testsuite/tests/indexed-types/should_compile/Simple1.hs
bsd-3-clause
228
0
7
68
77
44
33
10
0
module LilRender.Image.Immutable ( Image(..) , ImageIndexType , (<!>) , ImageConvertible , toImage , fromImage , makeImage ) where import Control.DeepSeq import qualified Data.Vector.Storable as V import GHC.Generics (Generic) import LilRender.Color import LilRender.Math.Geometry data Image = Image { _storage :: V.Vector RGBColor , _width :: Int , _height :: Int } deriving (Eq, Generic) instance Show Image where show (Image _ w h) = "Image (" ++ show w ++ "x" ++ show h ++ ")\n" instance NFData Image type ImageIndexType = Screen (Point2 Int) {-# INLINE (<!>) #-} (<!>) :: Image -> ImageIndexType -> RGBColor Image { _storage = storage, _width = width } <!> (Screen (Point2 x y)) = storage `V.unsafeIndex` (width * y + x) class ImageConvertible a where toImage :: a -> Image fromImage :: Image -> a makeImage :: Int -> Int -> RGBColor -> Image makeImage width height color = Image { _storage = V.replicate (width * height) color , _width = width , _height = height }
SASinestro/lil-render
src/LilRender/Image/Immutable.hs
isc
1,109
0
10
301
349
198
151
33
1
module Shaker.CabalInterface where import Control.Monad.Reader import Control.Arrow import Distribution.PackageDescription import Distribution.Simple.Build import Distribution.Simple.GHC(ghcOptions) import Distribution.Simple.LocalBuildInfo import Distribution.Simple.PreProcess import Distribution.Verbosity import Shaker.Type import System.FilePath generateAutogenFiles :: LocalBuildInfo -> IO () generateAutogenFiles lbi = writeAutogenFiles normal (localPkgDescr lbi) lbi applyPreprocessSources :: Shaker IO () applyPreprocessSources = do lbi <- asks shakerLocalBuildInfo let pkgDescription = localPkgDescr lbi lift $ preprocessSources pkgDescription lbi False normal knownSuffixHandlers getPreprocessorDirectory :: LocalBuildInfo -> Executable -> FilePath getPreprocessorDirectory lbi Executable {exeName = exeName'}= buildDir lbi </> exeName' </> exeName' ++ "-tmp" getCompileFlagsForExecutable :: LocalBuildInfo -> Executable -> ComponentLocalBuildInfo -> [String] getCompileFlagsForExecutable lbi executable componentLocalBuildInfo = ghcOptions lbi (buildInfo executable) componentLocalBuildInfo defaultDistDir ++ preprocessLocation where preprocessLocation = ["-i" ++ getPreprocessorDirectory lbi executable] getCompileFlagsForLibrary :: LocalBuildInfo -> Library -> ComponentLocalBuildInfo -> [String] getCompileFlagsForLibrary lbi lib componentLocalBuildInfo = preprocessLocation : ghcOptions lbi (libBuildInfo lib) componentLocalBuildInfo defaultDistDir where preprocessLocation = "-i" ++getPreprocessorDirectory lbi (localPkgDescr >>> executables >>> head $ lbi)
bonnefoa/Shaker
src/Shaker/CabalInterface.hs
isc
1,603
0
12
176
357
187
170
29
1
module Network.Gravatar ( gravatar -- * Options , GravatarOptions(..) , Size(..) , DefaultImg(..) , ForceDefault(..) , Rating(..) , Scheme(..) , def , defaultConfig ) where import Data.Default (Default(..)) import Data.Digest.Pure.MD5 (md5) import Data.List (intercalate) import Data.Maybe (catMaybes) import Data.Text (Text) import Network.HTTP.Base (urlEncode) import qualified Data.ByteString.Lazy.Char8 as C8 import qualified Data.Text as T class GravatarParam a where toParam :: a -> Maybe (String, String) -- | Size in pixels newtype Size = Size Int instance GravatarParam Size where toParam (Size i) = Just ("s", show i) -- | Always show the default image newtype ForceDefault = ForceDefault Bool instance GravatarParam ForceDefault where toParam (ForceDefault True) = Just ("f", "y") toParam (ForceDefault False) = Nothing -- | Image to show when an avatar is not available data DefaultImg = Custom String -- ^ supply your own url | NotFound -- ^ do not load an image return a 404 | MM -- ^ mystery man | Identicon -- ^ geometric pattern based on the hash | MonsterId -- ^ a generated monster | Wavatar -- ^ generated faces | Retro -- ^ generated, 8-bit arcade style pixelated face instance GravatarParam DefaultImg where toParam (Custom s) = Just ("d", urlEncode s) toParam NotFound = Just ("d", "404") toParam MM = Just ("d", "mm") toParam Identicon = Just ("d", "identicon") toParam MonsterId = Just ("d", "monsterid") toParam Wavatar = Just ("d", "wavatar") toParam Retro = Just ("d", "retro") -- | Limit the returned images by rating data Rating = G | PG | R | X instance GravatarParam Rating where toParam G = Just ("r", "g") toParam PG = Just ("r", "pg") toParam R = Just ("r", "r") toParam X = Just ("r", "x") data GravatarOptions = GravatarOptions { gSize :: Maybe Size -- ^ default @Nothing@ , gDefault :: Maybe DefaultImg -- ^ default @Nothing@ , gForceDefault :: ForceDefault -- ^ default @False@ , gRating :: Maybe Rating -- ^ default @Nothing@ , gScheme :: Scheme -- ^ default @Https@ } -- | Scheme to use for image URLs data Scheme = Http -- ^ @http://@ | Https -- ^ @https://@ | None -- ^ @//@ instance Show Scheme where show Http = "http://" show Https = "https://" show None = "//" instance Default GravatarOptions where def = defaultConfig -- | Available for backwards compatability, using @def@ is advised defaultConfig :: GravatarOptions defaultConfig = GravatarOptions { gSize = Nothing , gDefault = Nothing , gForceDefault = ForceDefault False , gRating = Nothing , gScheme = Https } -- | Return the avatar for the given email using the provided options -- -- >>> gravatar def "pbrisbin@gmail.com" -- "https://www.gravatar.com/avatar/2be502055b6c21ff470730beead2a998" -- -- Whitespace is trimmed. -- -- >>> gravatar def " pbrisbin@gmail.com " -- "https://www.gravatar.com/avatar/2be502055b6c21ff470730beead2a998" -- -- Case is ignored. -- -- >>> gravatar def "PBrisbin@GMAIL.com" -- "https://www.gravatar.com/avatar/2be502055b6c21ff470730beead2a998" -- -- Options are supported. -- -- >>> :{ -- let opts = GravatarOptions -- { gSize = Just $ Size 24 -- , gDefault = Just NotFound -- , gForceDefault = ForceDefault True -- , gRating = Just G -- , gScheme = Http -- } -- in gravatar opts "pbrisbin@gmail.com" -- :} -- "http://www.gravatar.com/avatar/2be502055b6c21ff470730beead2a998?s=24&d=404&f=y&r=g" -- gravatar :: GravatarOptions -> Text -> String gravatar opts e = concat [ show $ gScheme opts , "www.gravatar.com/avatar/" , hashEmail e , queryString opts ] -- | <http://en.gravatar.com/site/implement/hash/> hashEmail :: Text -> String hashEmail = show . md5 . C8.pack . T.unpack . T.toLower . T.strip queryString :: GravatarOptions -> String queryString opts = case queryParts of [] -> "" ps -> "?" ++ intercalate "&" (map queryPart ps) where queryParts :: [(String, String)] queryParts = catMaybes [ toParam =<< gSize opts , toParam =<< gDefault opts , toParam $ gForceDefault opts , toParam =<< gRating opts ] queryPart :: (String, String) -> String queryPart (k, v) = k ++ "=" ++ v
pbrisbin/gravatar
src/Network/Gravatar.hs
mit
4,468
0
11
1,082
970
567
403
92
2
module Atom.CommandRegistry where import Control.Monad import Data.Text (Text) import GHCJS.Foreign import GHCJS.Types import Atom.TextEditor (TextEditor) data Event_ type Event = JSRef Event_ foreign import javascript unsafe "atom.commands.add($1, $2, $3)" js_addCommand :: JSString -> JSString -> JSFun (Event -> IO ()) -> IO () foreign import javascript unsafe "$1.target.getModel()" js_getTextEditor :: Event -> IO TextEditor type Target = Text type CommandName = Text addCommand :: Target -> CommandName -> (TextEditor -> IO ()) -> IO () addCommand target commandName action = do let wrappedAction event = action =<< js_getTextEditor event callback <- asyncCallback1 AlwaysRetain wrappedAction js_addCommand (toJSString target) (toJSString commandName) callback
CRogers/stack-ide-atom
haskell/src/Atom/CommandRegistry.hs
mit
783
15
9
116
227
121
106
-1
-1
{-# LANGUAGE TypeOperators #-} module Api where import Data.Proxy import Database.Persist (Entity) import Models (Todo, TodoId) import Servant.API ((:<|>), (:>), Capture, Delete, Get, JSON, NoContent, Post, Put, ReqBody) type Api = -- create "todo" :> ReqBody '[JSON] Todo :> Post '[JSON] TodoId -- read :<|> "todo" :> Capture "key" TodoId :> Get '[JSON] (Maybe (Entity Todo)) -- update :<|> "todo" :> Capture "key" TodoId :> ReqBody '[JSON] Todo :> Put '[JSON] NoContent -- delete :<|> "todo" :> Capture "id" TodoId :> Delete '[JSON] NoContent -- all :<|> "todos" :> Get '[JSON] [Entity Todo] api :: Proxy Api api = Proxy
sectore/haskell-elm-todo-app
server/src/Api.hs
mit
823
0
21
300
255
142
113
-1
-1
{-# htermination unzip :: [(a,b)] -> ([a],[b]) #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_unzip_1.hs
mit
51
0
2
8
3
2
1
1
0
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoMonomorphismRestriction #-} module Test.Hspec.Wai.JSONSpec (main, spec) where import Test.Hspec import Data.String import Test.Hspec.Wai hiding (pending) import Test.Hspec.Wai.JSON main :: IO () main = hspec spec spec :: Spec spec = do describe "json" $ do context "when matching body" $ do let MatchBody matcher = matchBody [json|{foo: 23}|] it "ignores whitespace" $ do let actual = fromString $ unlines [ "{" , show ("foo" :: String) ++ " : 23" , "}" ] matcher [] actual `shouldBe` Nothing it "rejects bodies that are not equal" $ do matcher [] [json|{foo: 42}|] `shouldBe` Just (unlines [ "body mismatch:" , " expected: {\"foo\":23}" , " but got: {\"foo\":42}" ]) context "when matching Content-Type header" $ do let body = [json|{foo: 23}|] [MatchHeader matcher] = matchHeaders body match = (`matcher` body) it "accepts 'application/json'" $ do match [("Content-Type", "application/json")] `shouldBe` Nothing it "ignores 'charset=utf-8'" $ do match [("Content-Type", "application/json;charset=utf-8")] `shouldBe` Nothing it "ignores whitespace" $ do match [("Content-Type", "application/json ; charset=utf-8")] `shouldBe` Nothing it "requires a Content-Type header" $ do match [] `shouldBe` (Just. unlines) [ "missing header:" , " Content-Type: application/json" ] it "rejects other values for Content-Type" $ do match [("Content-Type", "foobar")] `shouldBe` (Just . unlines) [ "missing header:" , " Content-Type: application/json" ]
hspec/hspec-wai
hspec-wai-json/test/Test/Hspec/Wai/JSONSpec.hs
mit
1,893
0
24
588
458
244
214
46
1
{-# htermination ap :: Maybe (a -> b) -> Maybe a -> Maybe b #-} import Monad
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Monad_ap_3.hs
mit
77
0
3
17
5
3
2
1
0
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE UndecidableInstances #-} module AI where import qualified Data.Tree.Game_tree.Game_tree as GT import qualified Data.Tree.Game_tree.Negascout as NS import Data.Maybe (fromJust) import Data.List (nubBy, sort) import Yinsh -- | Every AI should provide a function @ai..@, returning an AIFunction. type AIFunction = GameState -> GameState -- | Result of an heuristic evaluation function type AIValue = Int -- | Wrapper class for AI players which encapsules the current game state. class AIPlayer a where -- | Heuristic evaluation function for a game state. Everything is calulated -- from the perspective of the white player. This is sufficient since -- Yinsh is a zero sum game. valueForWhite :: a -> AIValue -- | Number of turns to look ahead in the game tree. getPlies :: a -> Int -- | Unwrap the gamestate inside the AI. getGamestate :: a -> GameState -- | Update AI with new gamestate update :: a -> GameState -> a -- | Make the GameState (wrapped in the AIPlayer) an instance of Game_tree -- (actually rather an instance of a node in the game tree). instance (AIPlayer a) => GT.Game_tree a where is_terminal = terminalState . getGamestate children ai = map (update ai) (gamestates (getGamestate ai)) node_value ai = sign * valueForWhite ai where sign | activePlayer gs == W = 1 | otherwise = -1 gs = getGamestate ai -- | Possible new game states. The input and output game states are guaranteed -- to be in turn mode AddRing, AddMarker, RemoveRun or Wait*. gamestates :: GameState -> [GameState] gamestates gs | terminalState gs = [] | otherwise = case turnMode gs of AddRing -> freeCoords >>= newGS gs AddMarker -> rings' >>= newGS gs (RemoveRun _) -> runCoords' >>= newGS gs (WaitRemoveRun _) -> [fromJust (newGameState gs (0, 0))] WaitAddMarker -> [fromJust (newGameState gs (0, 0))] (MoveRing _) -> error "This is not supposed to happen" (RemoveRing _) -> error "This is not supposed to happen" where freeCoords = filter (freeCoord (board gs)) coords -- TODO: factor out, optimize rings' = rings (activePlayer gs) (board gs) runCoords' = removeDups $ filter (partOfRun markers') coords markers' = markers (activePlayer gs) (board gs) removeDups = nubBy (\c1 c2 -> sort (runCoords markers' c1) == sort (runCoords markers' c2)) newGS gs' c = case turnMode nextGS of AddRing -> [nextGS] AddMarker -> [nextGS] (RemoveRun _) -> [nextGS] (WaitRemoveRun _) -> [nextGS] WaitAddMarker -> [nextGS] (MoveRing start) -> ringMoves (board nextGS) start >>= newGS nextGS (RemoveRing _) -> rings (activePlayer nextGS) (board gs') >>= newGS nextGS where nextGS = fromJust $ newGameState gs' c -- | Get new game state after the AI turn. aiTurn :: (AIPlayer ai) => ai -> GameState aiTurn ai = case turnMode gs of (WaitRemoveRun _) -> fromJust $ newGameState gs (0, 0) WaitAddMarker -> fromJust $ newGameState gs (0, 0) _ -> pv !! 1 where pv = aiPV ai gs = getGamestate ai -- | Get the whole principal variation. aiPV :: (AIPlayer ai) => ai -> [GameState] aiPV ai = map getGamestate gss where (gss, _) = NS.negascout ai ply ply = getPlies ai -- TODO: negascout really seems to be the fastest. But test this for more game states -- NS.alpha_beta_search gs ply -- NS.principal_variation_search gs ply -- | A large number for symbolizing a win. -- Very ugly: if this number is higher than 2^31, there is an integer overflow -- in haste/javascript, resulting in the AI playing *very* bad. -- So 2^31 - 1 ~ 2 * 10^9 is our hardcoded magic 'huge' number. hugeNumber :: Int hugeNumber = 2147483647
sharkdp/yinsh
src/AI.hs
mit
4,215
0
13
1,287
903
478
425
60
13
module Templates.HActivityStringTemplate( getActivityStringTemplate, loadActivityStringTemplate ) where import Data.ByteString.Lazy (ByteString) import Text.StringTemplate import Control.Monad getActivityStringTemplate nameActivity tpl = do t <- liftM (render . setAttribute "name" nameActivity) tpl return t loadActivityStringTemplate :: String -> IO (StringTemplate ByteString) loadActivityStringTemplate nameTpl = do templates <- directoryGroup "/Users/jrm2k6/Documents/Programming/adom/androidtemplates" :: IO (STGroup ByteString) let res = case (getStringTemplate nameTpl templates) of Just t -> t Nothing -> error "Error" return res
jrm2k6/adom
Templates/HActivityStringTemplate.hs
mit
754
2
14
181
171
84
87
16
2
{- | Module : Orville.PostgreSQL.Internal.Expr.Update Copyright : Flipstone Technology Partners 2021 License : MIT -} module Orville.PostgreSQL.Internal.Expr.Update ( SetClauseList, setClauseList, SetClause, setColumn, UpdateExpr, updateExpr, ) where import Orville.PostgreSQL.Internal.Expr.Update.SetClause (SetClause, setColumn) import Orville.PostgreSQL.Internal.Expr.Update.SetClauseList (SetClauseList, setClauseList) import Orville.PostgreSQL.Internal.Expr.Update.UpdateExpr (UpdateExpr, updateExpr)
flipstone/orville
orville-postgresql-libpq/src/Orville/PostgreSQL/Internal/Expr/Update.hs
mit
536
0
5
68
82
58
24
10
0
{-# LANGUAGE TemplateHaskell, RecordWildCards, DeriveDataTypeable #-} module Ratscrew.Game ( module Ratscrew.Types, module Ratscrew.Cards, attemptSnap, gameView, playCard, newGame, Game() ) where import Ratscrew.Cards import Control.Lens import Ratscrew.Types import Data.Typeable import Ratscrew.Game.Internal import Ratscrew.Game.Internal.Snapping as Snapping newtype Game = Game {_gameState :: GameState} deriving Typeable makeLenses ''Game attemptSnap :: Player -> Game -> Game attemptSnap = withGameState (attemptSnap' (Snapping.isSnap (-1))) playCard :: Player -> Game -> Game playCard = withGameState playCard' gameView :: Game -> GameView gameView (Game g) = gameView' g newGame :: [Player] -> Game newGame = let d = fullDeck in Game . newGameState d withGameState ::(a -> GameState -> GameState) -> a -> Game -> Game withGameState f p = gameState %~ f p
smobs/Ratscrew
src/Ratscrew/Game.hs
mit
965
0
11
218
265
149
116
33
1
{-# LANGUAGE CPP #-} {- arch-tag: HVFS Combinators Copyright (c) 2004-2011 John Goerzen <jgoerzen@complete.org> All rights reserved. For license and copyright information, see the file LICENSE -} {- | Module : System.IO.HVFS.Combinators Copyright : Copyright (C) 2004-2011 John Goerzen License : BSD3 Maintainer : John Goerzen <jgoerzen@complete.org> Stability : provisional Portability: portable Support for combining different HVFS modules together Copyright (c) 2004-2005 John Goerzen, jgoerzen\@complete.org -} module System.IO.HVFS.Combinators ( -- * Restrictions HVFSReadOnly(..), HVFSChroot, newHVFSChroot) where import System.IO import System.IO.Error import System.IO.HVFS import System.IO.HVFS.InstanceHelpers (getFullPath) #if !(defined(mingw32_HOST_OS) || defined(mingw32_TARGET_OS) || defined(__MINGW32__)) import System.Posix.Files -- This actually needed? -Wall doesn't seem to think -- so, but I'm not sure... #endif import System.Path (secureAbsNormPath) import System.Path.NameManip (normalise_path) import System.FilePath ((</>), pathSeparator, isPathSeparator) ---------------------------------------------------------------------- -- Providing read-only access ---------------------------------------------------------------------- {- | Restrict access to the underlying filesystem to be strictly read-only. Any write-type operations will cause an error. No constructor is required; just say @HVFSReadOnly fs@ to make a new read-only wrapper around the 'HVFS' instance @fs@. -} data HVFS a => HVFSReadOnly a = HVFSReadOnly a deriving (Eq, Show) withro :: HVFS a => (a -> b) -> HVFSReadOnly a -> b withro f (HVFSReadOnly x) = f x roerror :: (HVFS a) => HVFSReadOnly a -> IO c roerror h = let err x = vRaiseError x permissionErrorType "Read-only virtual filesystem" Nothing in withro err h instance HVFS a => HVFS (HVFSReadOnly a) where vGetCurrentDirectory = withro vGetCurrentDirectory vSetCurrentDirectory = withro vSetCurrentDirectory vGetDirectoryContents = withro vGetDirectoryContents vDoesFileExist = withro vDoesFileExist vDoesDirectoryExist = withro vDoesDirectoryExist vCreateDirectory h _ = roerror h vRemoveDirectory h _ = roerror h vRenameDirectory h _ _ = roerror h vRenameFile h _ _ = roerror h vGetFileStatus = withro vGetFileStatus vGetSymbolicLinkStatus = withro vGetSymbolicLinkStatus vGetModificationTime = withro vGetModificationTime vRaiseError = withro vRaiseError vCreateSymbolicLink h _ _ = roerror h vReadSymbolicLink = withro vReadSymbolicLink vCreateLink h _ _ = roerror h instance HVFSOpenable a => HVFSOpenable (HVFSReadOnly a) where vOpen fh fp mode = case mode of ReadMode -> withro (\h -> vOpen h fp mode) fh _ -> roerror fh ---------------------------------------------------------------------- -- Restricting to a subdirectory ---------------------------------------------------------------------- {- | Access a subdirectory of a real filesystem as if it was the root of that filesystem. -} data HVFS a => HVFSChroot a = HVFSChroot String a deriving (Eq, Show) {- | Create a new 'HVFSChroot' object. -} newHVFSChroot :: HVFS a => a -- ^ The object to pass requests on to -> FilePath -- ^ The path of the directory to make root -> IO (HVFSChroot a) -- ^ The resulting new object newHVFSChroot fh fp = do full <- getFullPath fh fp isdir <- vDoesDirectoryExist fh full if isdir then do let newobj = (HVFSChroot full fh) vSetCurrentDirectory newobj [pathSeparator] return newobj else vRaiseError fh doesNotExistErrorType ("Attempt to instantiate HVFSChroot over non-directory " ++ full) (Just full) {- | Get the embedded object -} dch :: (HVFS t) => HVFSChroot t -> t dch (HVFSChroot _ a) = a {- | Convert a local (chroot) path to a full path. -} dch2fp, fp2dch :: (HVFS t) => HVFSChroot t -> String -> IO String dch2fp mainh@(HVFSChroot fp h) locfp = do full <- (fp ++) `fmap` if isPathSeparator (head locfp) then return locfp else getFullPath mainh locfp case secureAbsNormPath fp full of Nothing -> vRaiseError h doesNotExistErrorType ("Trouble normalizing path in chroot") (Just (fp ++ "," ++ full)) Just x -> return x {- | Convert a full path to a local (chroot) path. -} fp2dch (HVFSChroot fp h) locfp = do newpath <- case secureAbsNormPath fp locfp of Nothing -> vRaiseError h doesNotExistErrorType ("Unable to securely normalize path") (Just (fp </> locfp)) Just x -> return x if (take (length fp) newpath /= fp) then vRaiseError h doesNotExistErrorType ("Local path is not subdirectory of parent path") (Just newpath) else let newpath2 = drop (length fp) newpath in return $ normalise_path ([pathSeparator] ++ newpath2) dch2fph :: (HVFS t) => (t -> String -> IO t1) -> HVFSChroot t -> [Char] -> IO t1 dch2fph func fh@(HVFSChroot _ h) locfp = do newfp <- dch2fp fh locfp func h newfp instance HVFS a => HVFS (HVFSChroot a) where vGetCurrentDirectory x = do fp <- vGetCurrentDirectory (dch x) fp2dch x fp vSetCurrentDirectory = dch2fph vSetCurrentDirectory vGetDirectoryContents = dch2fph vGetDirectoryContents vDoesFileExist = dch2fph vDoesFileExist vDoesDirectoryExist = dch2fph vDoesDirectoryExist vCreateDirectory = dch2fph vCreateDirectory vRemoveDirectory = dch2fph vRemoveDirectory vRenameDirectory fh old new = do old' <- dch2fp fh old new' <- dch2fp fh new vRenameDirectory (dch fh) old' new' vRemoveFile = dch2fph vRemoveFile vRenameFile fh old new = do old' <- dch2fp fh old new' <- dch2fp fh new vRenameFile (dch fh) old' new' vGetFileStatus = dch2fph vGetFileStatus vGetSymbolicLinkStatus = dch2fph vGetSymbolicLinkStatus vGetModificationTime = dch2fph vGetModificationTime -- vRaiseError vCreateSymbolicLink fh old new = do old' <- dch2fp fh old new' <- dch2fp fh new vCreateSymbolicLink (dch fh) old' new' vReadSymbolicLink fh fp = do result <- dch2fph vReadSymbolicLink fh fp fp2dch fh result vCreateLink fh old new = do old' <- dch2fp fh old new' <- dch2fp fh new vCreateLink (dch fh) old' new' instance HVFSOpenable a => HVFSOpenable (HVFSChroot a) where vOpen fh fp mode = do newfile <- dch2fp fh fp vOpen (dch fh) newfile mode
haskellbr/missingh
missingh-all/src/System/IO/HVFS/Combinators.hs
mit
7,331
0
15
2,194
1,555
776
779
115
3
module Numeric.Limp.Solvers.Cbc.Solve where import Numeric.Limp.Canon import Numeric.Limp.Rep import Numeric.Limp.Solvers.Cbc.Error import Numeric.Limp.Solvers.Cbc.MatrixRepr import qualified Data.Vector.Storable as V import Numeric.Limp.Solvers.Cbc.Internal.Wrapper import System.IO.Unsafe solve :: (Ord z, Ord r) => Program z r IntDouble -> Either Error (Assignment z r IntDouble) solve p = let mr = matrixReprOfProgram p in unsafePerformIO $ do m <- newModel setQuiet m loadProblem m (_starts mr) (_inds mr) (_vals mr) (_colLs mr) (_colUs mr) (_obj mr) (_rowLs mr) (_rowUs mr) V.forM_ (_ints mr) $ setInteger m setObjSense m 1 branchAndBound m infeasible <- isProvenInfeasible m -- TODO get other statuses case infeasible of True -> return $ Left $ Infeasible False -> do vs <- getSolution m return $ Right $ makeAssignment p vs
amosr/limp-cbc
src/Numeric/Limp/Solvers/Cbc/Solve.hs
mit
1,089
0
16
374
315
162
153
27
2
{-# LANGUAGE OverloadedStrings #-} module GitIssues.Web.Styles where import Clay import Data.Text.Lazy (Text) import Prelude hiding ((**)) css :: Text css = render stylesheet stylesheet :: Css stylesheet = do ".spacing-top" ? marginTop (px 10) ".spacing-right" ? marginRight (px 10) ".issue-button-row" ? do marginTop (px 10) marginBottom (px 10) ".issue-button-row" ** ".btn" ? marginRight (px 10)
yamadapc/git-issues
src/GitIssues/Web/Styles.hs
mit
500
0
12
156
139
72
67
18
1
module Test.Integration where import Import import Control.Monad.IO.Class (MonadIO(..)) import qualified Data.ByteString as BS import Data.Text.Encoding (decodeUtf8) import Test.Hspec import Base64URL import qualified Station as SN import qualified Station.Implementation as IM import Station.Procedures.Build (buildDeck) import Station.Procedures.General (validateVersion) import Station.JSON import qualified Station.Original as SO import qualified Station.Types as ST import Test.Integration.Schema test :: SpecWith () test = do describe "basic operations" $ do it "work as expected" $ void . SN.runTempDeck $ \paths -> do -- Print diagnostics liftIO (print paths) -- Make sure the meta schema version is valid. -- -- Once we have a way to validate entire decks this will -- automatically be handled by validating the starting deck. startingDeck <- get metaSchemaVersion <- fmap ST._vcVersion . shouldBeJust =<< SN.resolve ST.customSchemaId shouldBeRight $ validateVersion startingDeck (fst <$> metaSchemaVersion) -- Check that the meta schema in the deck is really a canonicalized -- version of the human readable one. -- -- Then do the same for the author schema. deckMetaVC <- shouldBeJust =<< SN.resolve ST.customSchemaId deckMetaBts <- shouldBeJust . fmap (ST._cardInstance . snd) . ST._versionCard . ST._vcVersion $ deckMetaVC humanMeta <- liftIO (BS.readFile "schemas/human-optimized/draft4-modified.json") humanMetaJQ <- liftIO $ fmap encodeUtf8 . convertJQ . decodeUtf8 $ humanMeta liftIO $ deckMetaBts `shouldBe` SO.unsafeStripTrailingEOL humanMetaJQ deckAuthorVC <- shouldBeJust =<< SN.resolve ST.authorSchemaId deckAuthorBts <- shouldBeJust . fmap (ST._cardInstance . snd) . ST._versionCard . ST._vcVersion $ deckAuthorVC humanAuthor <- liftIO (BS.readFile "schemas/human-optimized/author.json") humanAuthorJQ <- liftIO $ fmap encodeUtf8 . convertJQ . decodeUtf8 $ humanAuthor liftIO $ deckAuthorBts `shouldBe` SO.unsafeStripTrailingEOL humanAuthorJQ -- Add schemas void . shouldBeLeft =<< SN.newSchema (SN.encodeSchemaBytes invalidSchema) Nothing schNone <- shouldBeRight =<< SN.newSchema (SN.encodeSchemaBytes noneSchema) Nothing schAny <- shouldBeRight =<< SN.newSchema (SN.encodeSchemaBytes anySchema) Nothing schMinimal <- shouldBeRight =<< SN.newSchema (SN.encodeSchemaBytes minimalSchema) Nothing schLink <- shouldBeRight =<< SN.newSchema (SN.encodeSchemaBytes linkSchema) Nothing -- Add instances of those schemas void . shouldBeLeft =<< SN.new (ST.Card schNone (encodeProper Null) Nothing) void . shouldBeRight =<< SN.new (ST.Card schAny (encodeProper Null) Nothing) void . shouldBeRight =<< SN.new (ST.Card schMinimal (encodeProper Null) Nothing) void . shouldBeLeft =<< SN.new (ST.Card schMinimal (encodeProper (Bool True)) Nothing) -- Test that "linkTo" is actually enforced let arrayCard = ST.Card schAny (encodeProper emptyArray) Nothing arrayLink@(ST.Link arrayId _) <- shouldBeRight =<< SN.new arrayCard let c1 = ST.Card schLink (encodeProper (mkLink arrayLink)) Nothing void . shouldBeRight =<< SN.new c1 let emptyHash = ST.VersionHash . ST.Hash . AlreadyBase64URL $ mempty badLink = arrayLink&ST.linkHash.~emptyHash c2 = ST.Card schLink (encodeProper (mkLink badLink)) Nothing void . shouldBeLeft =<< SN.new c2 let objectCard = ST.Card schAny (encodeProper emptyObject) Nothing objectLink <- shouldBeRight =<< SN.new objectCard let c3 = ST.Card schLink (encodeProper (mkLink objectLink)) Nothing void . shouldBeLeft =<< SN.new c3 -- Test updating arrayN <- SN.versionCount arrayId liftIO $ arrayN `shouldBe` 1 newLinkHash <- shouldBeRight =<< SN.update arrayLink objectCard arrayNUpdated <- SN.versionCount arrayId liftIO $ arrayNUpdated `shouldBe` 2 void . shouldBeRight =<< SN.update (arrayLink&ST.linkHash.~newLinkHash) objectCard -- This shouldn't do anything. arrayNUnchanged <- SN.versionCount arrayId liftIO $ arrayNUnchanged `shouldBe` 2 -- Test deletion SN.archive objectLink -- Test that the store in memory equals that on disk liveDeck <- get reloaded <- liftIO (buildDeck (IM.plainFilesystem paths)) liftIO $ do ST._deckBytes liveDeck `shouldBe` ST._deckBytes reloaded ST._deckVersions liveDeck `shouldBe` ST._deckVersions reloaded ST._deckIds liveDeck `shouldBe` ST._deckIds reloaded
seagreen/station
test/Test/Integration.hs
mit
5,402
0
22
1,694
1,250
618
632
74
1
module Handler.Home where import Import getHomeR :: Handler Html getHomeR = defaultLayout $ do setTitle "Crypto demo homepage" [whamlet| <p>Welcome to the crypto demo site homepage. <p>Learn more about: <ul> <li> <a href=@{AesR}>AES |]
snoyberg/crypto-demo-site
src/Handler/Home.hs
mit
323
0
8
121
39
22
17
-1
-1
-- Flatten a nested list structure module Problem7 where data NestedList a = Elem a | List [NestedList a] flatten :: NestedList a -> [a] flatten (Elem a) = [a] flatten (List a) = flattenSubList a flattenSubList :: [NestedList a] -> [a] flattenSubList ((Elem x):xs) = x:(flattenSubList xs) flattenSubList (x:xs) = flatten x ++ flattenSubList xs flattenSubList [] = []
Matt-Renfro/haskell
H-99/Problem7.hs
mit
385
0
9
78
161
85
76
9
1
{-- HSFM, a filemanager written in Haskell. Copyright (C) 2016 Julian Ospald This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. --} {-# LANGUAGE RecordWildCards #-} module HSFM.GUI.Gtk.MyView where import Control.Concurrent.MVar ( newEmptyMVar , putMVar , tryTakeMVar ) import Control.Concurrent.STM ( newTVarIO , readTVarIO ) import Control.Monad ( unless , void , when ) import Control.Monad.IO.Class ( liftIO ) import Data.Foldable ( for_ ) import Data.Maybe ( catMaybes , fromJust ) import Data.String ( fromString ) import Graphics.UI.Gtk import {-# SOURCE #-} HSFM.GUI.Gtk.Callbacks (setViewCallbacks) import qualified HPath as P import HSFM.FileSystem.FileType import HSFM.GUI.Glib.GlibString() import HSFM.GUI.Gtk.Data import HSFM.GUI.Gtk.Icons import HSFM.GUI.Gtk.Utils import HSFM.History import HSFM.Utils.IO import Paths_hsfm ( getDataFileName ) import Prelude hiding(readFile) import System.INotify ( addWatch , initINotify , killINotify , EventVariety(..) ) import System.IO.Error ( catchIOError , ioError , isUserError ) import System.Posix.FilePath ( hiddenFile ) -- |Creates a new tab with its own view and refreshes the view. newTab :: MyGUI -> Notebook -> IO FMView -> Item -> Int -> IO MyView newTab mygui nb iofmv item pos = do -- create eventbox with label label <- labelNewWithMnemonic (maybe (P.fromAbs $ path item) P.fromRel $ P.basename $ path item) ebox <- eventBoxNew eventBoxSetVisibleWindow ebox False containerAdd ebox label widgetShowAll label myview <- createMyView mygui nb iofmv _ <- notebookInsertPageMenu (notebook myview) (viewBox myview) ebox ebox pos -- set initial history let historySize = 5 putMVar (history myview) (BrowsingHistory [] (path item) [] historySize) notebookSetTabReorderable (notebook myview) (viewBox myview) True catchIOError (refreshView mygui myview item) $ \e -> do file <- pathToFile getFileInfo . fromJust . P.parseAbs . fromString $ "/" refreshView mygui myview file labelSetText label (fromString "/" :: String) unless (isUserError e) (ioError e) -- close callback _ <- ebox `on` buttonPressEvent $ do eb <- eventButton case eb of MiddleButton -> liftIO $ do n <- notebookGetNPages (notebook myview) when (n > 1) $ void $ destroyView myview return True _ -> return False return myview -- |Constructs the initial MyView object with a few dummy models. -- It also initializes the callbacks. createMyView :: MyGUI -> Notebook -> IO FMView -> IO MyView createMyView mygui nb iofmv = do inotify <- newEmptyMVar history <- newEmptyMVar builder <- builderNew builderAddFromFile builder =<< getDataFileName "data/Gtk/builder.xml" -- create dummy models, so we don't have to use MVar rawModel <- newTVarIO =<< listStoreNew [] filteredModel <- newTVarIO =<< (\x -> treeModelFilterNew x []) =<< readTVarIO rawModel sortedModel <- newTVarIO =<< treeModelSortNewWithModel =<< readTVarIO filteredModel cwd <- newEmptyMVar view' <- iofmv view <- newTVarIO view' urlBar <- builderGetObject builder castToEntry "urlBar" backViewB <- builderGetObject builder castToButton "backViewB" upViewB <- builderGetObject builder castToButton "upViewB" forwardViewB <- builderGetObject builder castToButton "forwardViewB" homeViewB <- builderGetObject builder castToButton "homeViewB" refreshViewB <- builderGetObject builder castToButton "refreshViewB" scroll <- builderGetObject builder castToScrolledWindow "mainScroll" viewBox <- builderGetObject builder castToBox "viewBox" let notebook = nb let myview = MkMyView {..} -- set the bindings setViewCallbacks mygui myview -- add the treeview to the scroll container let oview = fmViewToContainer view' containerAdd scroll oview widgetShowAll viewBox return myview -- |Switch the existing view in `MyView` with the one that the -- io action returns. switchView :: MyGUI -> MyView -> IO FMView -> IO () switchView mygui myview iofmv = do cwd <- getCurrentDir myview let nb = notebook myview oldpage <- destroyView myview -- create new view and tab page where the previous one was nview <- newTab mygui nb iofmv cwd oldpage page <- fromJust <$> notebookPageNum nb (viewBox nview) notebookSetCurrentPage nb page refreshView mygui nview cwd -- |Destroys the given view by disconnecting the watcher -- and destroying the active FMView container. -- -- Everything that needs to be done in order to forget about a -- view needs to be done here. -- -- Returns the page in the tab list this view corresponds to. destroyView :: MyView -> IO Int destroyView myview = do -- disconnect watcher mi <- tryTakeMVar (inotify myview) for_ mi $ \i -> killINotify i page <- fromJust <$> notebookPageNum (notebook myview) (viewBox myview) -- destroy old view and tab page view' <- readTVarIO $ view myview widgetDestroy (fmViewToContainer view') notebookRemovePage (notebook myview) page return page -- |Createss an IconView. createIconView :: IO FMView createIconView = do iconv <- iconViewNew iconViewSetSelectionMode iconv SelectionMultiple iconViewSetColumns iconv (-1) iconViewSetSpacing iconv 2 iconViewSetMargin iconv 0 {- set iconv [ iconViewItemOrientation := OrientationHorizontal ] -} {- set iconv [ iconViewOrientation := OrientationHorizontal ] -} return $ FMIconView iconv -- |Creates a TreeView. createTreeView :: IO FMView createTreeView = do -- create the final view treeView <- treeViewNew -- set selection mode tvs <- treeViewGetSelection treeView treeSelectionSetMode tvs SelectionMultiple -- set drag and drop tl <- targetListNew atom <- atomNew ("HSFM" :: String) targetListAdd tl atom [TargetSameApp] 0 treeViewEnableModelDragDest treeView tl [ActionCopy] treeViewEnableModelDragSource treeView [Button1] tl [ActionCopy] -- create final tree model columns renderTxt <- cellRendererTextNew renderPix <- cellRendererPixbufNew let ct = cellText :: (CellRendererTextClass cr) => Attr cr String cp = cellPixbuf :: (CellRendererPixbufClass self) => Attr self Pixbuf -- filename column cF <- treeViewColumnNew treeViewColumnSetTitle cF ("Filename" :: String) treeViewColumnSetResizable cF True treeViewColumnSetClickable cF True treeViewColumnSetSortColumnId cF 1 cellLayoutPackStart cF renderPix False cellLayoutPackStart cF renderTxt True _ <- treeViewAppendColumn treeView cF cellLayoutAddColumnAttribute cF renderPix cp $ makeColumnIdPixbuf 0 cellLayoutAddColumnAttribute cF renderTxt ct $ makeColumnIdString 1 -- date column cMD <- treeViewColumnNew treeViewColumnSetTitle cMD ("Date" :: String) treeViewColumnSetResizable cMD True treeViewColumnSetClickable cMD True treeViewColumnSetSortColumnId cMD 2 cellLayoutPackStart cMD renderTxt True _ <- treeViewAppendColumn treeView cMD cellLayoutAddColumnAttribute cMD renderTxt ct $ makeColumnIdString 2 -- permissions column cP <- treeViewColumnNew treeViewColumnSetTitle cP ("Permission" :: String) treeViewColumnSetResizable cP True treeViewColumnSetClickable cP True treeViewColumnSetSortColumnId cP 3 cellLayoutPackStart cP renderTxt True _ <- treeViewAppendColumn treeView cP cellLayoutAddColumnAttribute cP renderTxt ct $ makeColumnIdString 3 return $ FMTreeView treeView -- |Refreshes the View based on the given directory. -- -- Throws: -- -- - `userError` on inappropriate type refreshView :: MyGUI -> MyView -> Item -> IO () refreshView mygui myview SymLink { sdest = Just d@Dir{} } = refreshView mygui myview d refreshView mygui myview item@Dir{} = do newRawModel <- fileListStore item myview writeTVarIO (rawModel myview) newRawModel view' <- readTVarIO $ view myview _ <- tryTakeMVar (cwd myview) putMVar (cwd myview) item -- get selected items tps <- getSelectedTreePaths mygui myview trs <- catMaybes <$> mapM (treeRowReferenceNew newRawModel) tps constructView mygui myview -- reselect selected items -- TODO: not implemented for icon view yet case view' of FMTreeView treeView -> do tvs <- treeViewGetSelection treeView ntps <- mapM treeRowReferenceGetPath trs mapM_ (treeSelectionSelectPath tvs) ntps _ -> return () refreshView _ _ _ = ioError $ userError "Inappropriate type!" -- |Constructs the visible View with the current underlying mutable models, -- which are retrieved from 'MyGUI'. -- -- This sort of merges the components mygui and myview and fires up -- the actual models. constructView :: MyGUI -> MyView -> IO () constructView mygui myview = do settings' <- readTVarIO $ settings mygui -- pix stuff iT <- iconThemeGetDefault folderPix <- getIcon IFolder iT (iconSize settings') folderSymPix <- getSymlinkIcon IFolder iT (iconSize settings') filePix <- getIcon IFile iT (iconSize settings') fileSymPix <- getSymlinkIcon IFile iT (iconSize settings') errorPix <- getIcon IError iT (iconSize settings') let dirtreePix Dir{} = folderPix dirtreePix FileLike{} = filePix dirtreePix DirSym{} = folderSymPix dirtreePix FileLikeSym{} = fileSymPix dirtreePix BrokenSymlink{} = errorPix dirtreePix _ = errorPix view' <- readTVarIO $ view myview cdir <- getCurrentDir myview let cdirp = path cdir -- update urlBar entrySetText (urlBar myview) (P.fromAbs cdirp) rawModel' <- readTVarIO $ rawModel myview -- filtering filteredModel' <- treeModelFilterNew rawModel' [] writeTVarIO (filteredModel myview) filteredModel' treeModelFilterSetVisibleFunc filteredModel' $ \iter -> do hidden <- showHidden <$> readTVarIO (settings mygui) item <- treeModelGetRow rawModel' iter >>= (P.basename . path) if hidden then return True else return . not . hiddenFile . P.fromRel $ item -- sorting sortedModel' <- treeModelSortNewWithModel filteredModel' writeTVarIO (sortedModel myview) sortedModel' treeSortableSetSortFunc sortedModel' 1 $ \iter1 iter2 -> do cIter1 <- treeModelFilterConvertIterToChildIter filteredModel' iter1 cIter2 <- treeModelFilterConvertIterToChildIter filteredModel' iter2 item1 <- treeModelGetRow rawModel' cIter1 item2 <- treeModelGetRow rawModel' cIter2 return $ compare item1 item2 treeSortableSetSortColumnId sortedModel' 1 SortAscending -- set values treeModelSetColumn rawModel' (makeColumnIdPixbuf 0) dirtreePix treeModelSetColumn rawModel' (makeColumnIdString 1) (P.toFilePath . fromJust . P.basename . path) treeModelSetColumn rawModel' (makeColumnIdString 2) packModTime treeModelSetColumn rawModel' (makeColumnIdString 3) packPermissions -- update model of view case view' of FMTreeView treeView -> do treeViewSetModel treeView (Just sortedModel') treeViewSetRubberBanding treeView True FMIconView iconView -> do iconViewSetModel iconView (Just sortedModel') iconViewSetPixbufColumn iconView (makeColumnIdPixbuf 0 :: ColumnId item Pixbuf) iconViewSetTextColumn iconView (makeColumnIdString 1 :: ColumnId item String) -- add watcher mi <- tryTakeMVar (inotify myview) for_ mi $ \i -> killINotify i newi <- initINotify _ <- addWatch newi [Move, MoveIn, MoveOut, MoveSelf, Create, Delete, DeleteSelf] (P.fromAbs cdirp) (\_ -> postGUIAsync $ refreshView mygui myview cdir) putMVar (inotify myview) newi return ()
hasufell/hsfm
src/HSFM/GUI/Gtk/MyView.hs
gpl-2.0
12,814
0
21
3,006
2,958
1,408
1,550
283
8
----------------------------------------------------------------------------- -- | -- Module : Application.Layer -- Copyright : (c) Gushcha Anton 2013-2014 -- License : GNU GPLv3 (see the file LICENSE) -- -- Maintainer : ncrashed@gmail.com -- Stability : experimental -- Portability : portable -- -- Module defines application layer of the application. It is the top layer -- in application structure. The layer uses following messages to communicate -- with channel layer: -- -- * incoming \"exit\" - is sent from gui thread (same level), initializes recursive -- shutdown protocol. -- -- * incoming \"message\" - is sent from channel layer. Indicates that other user -- sent a message. Holds username and contents. -- -- * incoming \"info\" - is sent from channel layer and informing about internal events. -- -- * incoming \"error\" - is sent from channel layer and informing about important errors. -- -- * incoming \"options\" - is sent from channel layer. Holds new options to set in gui. -- -- * incoming \"connect\" - is sent from channel layer. Indicates about connecting of a new user, -- holds username. -- -- * incoming \"disconnect\" - is sent from channel layer. Indicates about disconnecting of a user, -- holds username. -- -- * outgoing \"exit\" -- is sent to channel layer when terminating protocol is triggered. -- -- * outgoing \"send\" -- is sent to channel layer when the user finishes to chat a message. Holds only -- message body. -- -- * outgoing \"connect\" -- is sent to channel layer when the user presses connecting button. -- -- * outgoing \"disconnect\" -- is sent to channel layer when the user presses disconnecting button. -- -- * outgoing \"options\" -- is sent to channel layer when the user finishes changing serial port options -- (or user name changes). ----------------------------------------------------------------------------- module Application.Layer ( initApplicationLayer ) where import Application.Gui import Application.Types import Channel.Layer import Channel.Options import Utility (while, exitMsg) import Event import Control.Distributed.Process import Control.Monad (forever) import Control.Concurrent (yield) -- | Application events that are listened. The events are connecting -- gui callbacks and process monad. data AppEvents = AppEvents { sendEvent :: Event String , connectEvent :: Event () , disconnectEvent :: Event () , optionChangedEvent :: Event (ChannelOptions, ChannelOptions) } -- | Application event initialization. initAppEvents :: IO AppEvents initAppEvents = do sendEvent' <- initEvent "" connectEvent' <- initEvent () disconnectEvent' <- initEvent () optionChangedEvent' <- initEvent (defaultOptions, defaultOptions) return AppEvents { sendEvent = sendEvent' , connectEvent = connectEvent' , disconnectEvent = disconnectEvent' , optionChangedEvent = optionChangedEvent' } -- | Transforms application event to callbacks that rises the events. callbacks :: AppEvents -> GuiCallbacks callbacks events = GuiCallbacks { sendMessageCallback = \msg -> do newEvent <- tag (sendEvent events) msg riseEvent newEvent return () , connectCallback = do riseEvent $ connectEvent events return () , disconnectCallback = do riseEvent $ disconnectEvent events return () , optionChangedCallback = \opt oldopt -> do newEvent <- tag (optionChangedEvent events) (opt, oldopt) riseEvent newEvent return () } -- | Handler for incoming user message. printUserMessage :: GuiApi -> (ProcessId, String, String, String) -> Process Bool printUserMessage api (_, _, user, msg) = do liftIO $ printMessage api user msg return True -- | Handler for incoming system info message. printInfoMessage :: GuiApi -> (ProcessId, String, String) -> Process Bool printInfoMessage api (_, _, msg) = do liftIO $ printInfo api msg return True -- | Handler for incoming system error message. printErrorMessage :: GuiApi -> (ProcessId, String, String) -> Process Bool printErrorMessage api (_, _, msg) = do liftIO $ printError api msg return True -- | Handler for incoming serial port options changes. setupOptionsHandler :: GuiApi -> (ProcessId, String, ChannelOptions) -> Process Bool setupOptionsHandler api (_, _, options) = do liftIO $ setupOptions api options return True -- | Handler for incoming remote connecting event. userConnectHandler :: GuiApi -> (ProcessId, String, String) -> Process Bool userConnectHandler api (_, _, name) = do liftIO $ addUser api name return True -- | Handler for incoming remote disconnecting event. userDisconnectHandler :: GuiApi -> (ProcessId, String, String) -> Process Bool userDisconnectHandler api (_, _, name) = do liftIO $ removeUser api name return True -- | Initializes application layer. initApplicationLayer :: FilePath -- ^ Glade file name to load gui from -> Maybe (String, String) -- ^ Optional arguments: serial port name and defualt user name. -> ProcessId -- ^ Parent layer id, for application layer root is is Main thread. -> Process () initApplicationLayer gladeFile args rootId = do spawnLocal $ do events <- liftIO initAppEvents (mainWindow, options, api) <- liftIO $ initGui gladeFile args $ callbacks events thisId <- getSelfPid channelId <- initChannelLayer thisId options spawnLocal $ do liftIO $ runGui mainWindow mapM_ (`send` (thisId, "exit")) [thisId, channelId, rootId] spawnLocal $ forever $ do checkEvent (sendEvent events) (\s -> send channelId (thisId, "send", s)) () checkEvent (connectEvent events) (\() -> send channelId (thisId, "connect")) () checkEvent (disconnectEvent events) (\() -> send channelId (thisId, "disconnect")) () checkEvent (optionChangedEvent events) (\(opt, oldopt) -> do liftIO $ removeUser api $ userName oldopt liftIO $ addUser api $ userName opt send channelId (thisId, "options", opt, oldopt)) () liftIO yield while $ receiveWait [ matchIf (\(_, com) -> com == "exit") exitMsg , matchIf (\(_, com, _, _) -> com == "message") $ printUserMessage api , matchIf (\(_, com, _) -> com == "info") $ printInfoMessage api , matchIf (\(_, com, _) -> com == "error") $ printErrorMessage api , matchIf (\(_, com, _) -> com == "options") $ setupOptionsHandler api , matchIf (\(_, com, _) -> com == "connect") $ userConnectHandler api , matchIf (\(_, com, _) -> com == "disconnect") $ userDisconnectHandler api] return ()
NCrashed/PowerCom
src/powercom/Application/Layer.hs
gpl-3.0
7,171
0
20
1,872
1,449
781
668
101
1
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RankNTypes #-} module Main (main) where -------------------------------------------------------------------------------- import Control.Exception (SomeException (..), handle) import Data.List (isPrefixOf) import Path.Parse import System.Exit import System.IO import System.Process (runInteractiveProcess, waitForProcess) import qualified Utils.Color as Color import qualified Utils.Icon as Icon import Xmobar -------------------------------------------------------------------------------- newtype Env = Env { envConfigHome :: Path Abs Dir } -------------------------------------------------------------------------------- main :: IO () main = do env <- Env <$> parseDirPath "$XDG_CONFIG_HOME" xmobar $ config env -------------------------------------------------------------------------------- config :: Env -> Config config env = defaultConfig { -- appearance font = "xft:Source Code Pro:size=10,Symbola:size=10,FontAwesome:size=10", border = NoBorder, borderColor = Color.background, bgColor = Color.background, fgColor = Color.textRegular, alpha = 255, position = TopSize C 100 32, -- layout sepChar = "%", -- delineator between plugin names and straight text alignSep = "}{", -- separator between left-right alignment template = concat [ "%StdinReader%", "}{", "%dropbox-status%", " ", "%default:Master%", " ", "%default:Capture%", " ", Icon.static "\x2328" <> " %kbd%", " ", Icon.static "\xf1eb" <> " %wlp0s20f3wi%", " ", "%battery%", " ", "%date%", " ", "%notification-status%", " " ], -- general behavior lowerOnStart = True, -- send to bottom of window stack on start hideOnStart = False, -- start with window unmapped (hidden) allDesktops = True, -- show on all desktops overrideRedirect = True, -- set the Override Redirect flag (Xlib) pickBroadest = False, -- choose widest display (multi-monitor) persistent = True, -- enable/disable hiding (True = disabled) -- icons iconRoot = toFilePath $ envConfigHome env </> [reldir|xmonad/icons|], -- plugins commands = [ Run $ Battery [ "--template", "<acstatus>", "--Low", "20", -- units: % "--High", "80", -- units: % "--low", Color.textAlert, "--normal", Color.textWarning, "--high", Color.textRegular, "--", -- battery specific options -- discharging status "-o", Icon.static "\xf242" <> " <left>% (<timeleft>)", -- AC "on" status "-O", Icon.static "\xf0e7" <> " <left>% (<timeleft>)", -- charged status "-i", Icon.static "\xf240" ] 50, Run $ Date dateTemplate "date" 10, Run $ Volume "default" "Master" [ "--template", "<status> <volume>%", "--", "-o", Icon.static "\x1F507", "-O", Icon.static "\x1F50A", "-c", Color.textRegular, "-C", Color.textRegular ] 10, Run $ Volume "default" "Capture" [ "--template", "<status> <volume>%", "--", "-o", Icon.static "\xf131", "-O", Icon.static "\xf130", "-c", Color.textRegular, "-C", Color.textRegular ] 10, Run $ Kbd [], Run $ Wireless "wlp0s20f3" [ "--template", "<ssid>" ] 100, Run $ NotificationStatus 10, Run $ DropboxStatus 100, Run StdinReader ] } dateTemplate :: String dateTemplate = concat [ Icon.static "\xf073", " %F (%a) ", Icon.static "\x23F2", " %T" ] -------------------------------------------------------------------------------- newtype NotificationStatus = NotificationStatus Int deriving (Show, Read) instance Exec NotificationStatus where alias _ = "notification-status" start (NotificationStatus r) callback = pollProg r "notify" ["status"] cb where cb (Just "enabled") = callback $ Icon.static "\xf0f3" cb (Just "disabled") = callback $ Icon.alert "\xf1f6" cb _ = callback "?" -------------------------------------------------------------------------------- newtype DropboxStatus = DropboxStatus Int deriving (Show, Read) instance Exec DropboxStatus where alias _ = "dropbox-status" start (DropboxStatus r) callback = pollProg r "dropbox" ["status"] cb where cb Nothing = callback "?" cb (Just res) | res == "Dropbox isn't running!" = callback $ Icon.alert "\xf16b Not running" | res == "Up to date" = callback $ Icon.static "\xf16b" | "Syncing" `isPrefixOf` res = callback $ Icon.static "\xf16b \xf021" | "Connecting" `isPrefixOf` res = callback $ Icon.alert "\xf16b \xf071 No connection" | otherwise = callback $ Icon.alert "\xf16b \xf071 Unknown" -------------------------------------------------------------------------------- pollProg :: Int -> FilePath -> [String] -> (Maybe String -> IO ()) -> IO () pollProg interval prog args cb = if interval > 0 then go else exec >>= cb where go = exec >>= cb >> tenthSeconds interval >> go exec = execProg' prog args execProg' :: FilePath -> [String] -> IO (Maybe String) execProg' prog args = do (i, o, e, p) <- runInteractiveProcess prog args Nothing Nothing exit <- waitForProcess p let closeHandles = hClose o >> hClose i >> hClose e getL = handle (\(SomeException _) -> return "") (hGetLine o) case exit of ExitSuccess -> do str <- getL closeHandles pure (Just str) _ -> closeHandles >> pure Nothing --------------------------------------------------------------------------------
d12frosted/environment
xmonad/xmobar/Main.hs
gpl-3.0
6,758
0
14
2,360
1,337
738
599
174
2
module Control.Monad.Loop where import Control.Monad -- | Monadic loop helper. Runs the loop until True loopM :: (Monad m) => s -> (s -> m (s,Bool)) -> m s loopM s f = do (s', stop) <- f s if stop then return s' else loopM s' f -- | Monadic loop helper. Runs the loop until True. Returns unit. loopM_ :: (Monad m) => s -> (s -> m (s,Bool)) -> m () loopM_ s f = do (s', stop) <- f s if stop then return () else loopM_ s' f
grwlf/vsim
src_r/Control/Monad/Loop.hs
gpl-3.0
456
0
11
127
190
100
90
10
2
module SIRS.SIRSFronend where import qualified Graphics.Gloss as GLO winSizeX :: Int winSizeX = 800 winSizeY :: Int winSizeY = 800 display :: GLO.Display display = (GLO.InWindow "SIRS (Gloss)" (winSizeX, winSizeY) (0, 0)) data RenderCellState = ShadeGreen | ShadeRed | ShadeBlue data RenderCell = RenderCell { renderCellCoord :: (Int, Int), renderCellState :: RenderCellState } renderFrame :: [RenderCell] -> (Int, Int) -> GLO.Picture renderFrame cs (xCells, yCells) = GLO.Pictures $ agentPics where agentPics = map (renderCell (cellWidth, cellHeight)) cs cellWidth = (fromIntegral winSizeX) / (fromIntegral xCells) cellHeight = (fromIntegral winSizeY) / (fromIntegral yCells) renderCell :: (Float, Float) -> RenderCell -> GLO.Picture renderCell (rectWidth, rectHeight) c = GLO.color color $ GLO.translate xPix yPix $ GLO.Polygon (GLO.rectanglePath rectWidth rectHeight) where (x, y) = renderCellCoord c s = renderCellState c xPix = fromRational (toRational (fromIntegral x * rectWidth)) - halfXSize yPix = fromRational (toRational (fromIntegral y * rectHeight)) - halfYSize color = cellColor s halfXSize = fromRational (toRational winSizeX / 2.0) halfYSize = fromRational (toRational winSizeY / 2.0) cellColor :: RenderCellState -> GLO.Color cellColor ShadeGreen = GLO.green cellColor ShadeRed = GLO.red cellColor ShadeBlue = GLO.blue
thalerjonathan/phd
coding/prototyping/haskell/PureAgentsPar/src/SIRS/SIRSFronend.hs
gpl-3.0
1,441
0
13
285
460
250
210
30
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Fitness.Types.Sum -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.Fitness.Types.Sum where import Network.Google.Prelude -- | The type of a bucket signifies how the data aggregation is performed in -- the bucket. data AggregateBucketType = ABTActivitySegment -- ^ @activitySegment@ | ABTActivityType -- ^ @activityType@ | ABTSession -- ^ @session@ | ABTTime -- ^ @time@ | ABTUnknown -- ^ @unknown@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable AggregateBucketType instance FromHttpApiData AggregateBucketType where parseQueryParam = \case "activitySegment" -> Right ABTActivitySegment "activityType" -> Right ABTActivityType "session" -> Right ABTSession "time" -> Right ABTTime "unknown" -> Right ABTUnknown x -> Left ("Unable to parse AggregateBucketType from: " <> x) instance ToHttpApiData AggregateBucketType where toQueryParam = \case ABTActivitySegment -> "activitySegment" ABTActivityType -> "activityType" ABTSession -> "session" ABTTime -> "time" ABTUnknown -> "unknown" instance FromJSON AggregateBucketType where parseJSON = parseJSONText "AggregateBucketType" instance ToJSON AggregateBucketType where toJSON = toJSONText -- | A constant describing the type of this data source. Indicates whether -- this data source produces raw or derived data. data DataSourceType = Derived -- ^ @derived@ | Raw -- ^ @raw@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable DataSourceType instance FromHttpApiData DataSourceType where parseQueryParam = \case "derived" -> Right Derived "raw" -> Right Raw x -> Left ("Unable to parse DataSourceType from: " <> x) instance ToHttpApiData DataSourceType where toQueryParam = \case Derived -> "derived" Raw -> "raw" instance FromJSON DataSourceType where parseJSON = parseJSONText "DataSourceType" instance ToJSON DataSourceType where toJSON = toJSONText data BucketByTimePeriodType = Day -- ^ @day@ | Month -- ^ @month@ | Week -- ^ @week@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable BucketByTimePeriodType instance FromHttpApiData BucketByTimePeriodType where parseQueryParam = \case "day" -> Right Day "month" -> Right Month "week" -> Right Week x -> Left ("Unable to parse BucketByTimePeriodType from: " <> x) instance ToHttpApiData BucketByTimePeriodType where toQueryParam = \case Day -> "day" Month -> "month" Week -> "week" instance FromJSON BucketByTimePeriodType where parseJSON = parseJSONText "BucketByTimePeriodType" instance ToJSON BucketByTimePeriodType where toJSON = toJSONText data AggregateRequestFilteredDataQualityStandardItem = DATAQUALITYBLOODGLUCOSEISO151972003 -- ^ @dataQualityBloodGlucoseIso151972003@ | DATAQUALITYBLOODGLUCOSEISO151972013 -- ^ @dataQualityBloodGlucoseIso151972013@ | DataQualityBloodPressureAami -- ^ @dataQualityBloodPressureAami@ | DataQualityBloodPressureBhsAA -- ^ @dataQualityBloodPressureBhsAA@ | DataQualityBloodPressureBhsAB -- ^ @dataQualityBloodPressureBhsAB@ | DataQualityBloodPressureBhsBA -- ^ @dataQualityBloodPressureBhsBA@ | DataQualityBloodPressureBhsBB -- ^ @dataQualityBloodPressureBhsBB@ | DATAQUALITYBLOODPRESSUREESH2002 -- ^ @dataQualityBloodPressureEsh2002@ | DATAQUALITYBLOODPRESSUREESH2010 -- ^ @dataQualityBloodPressureEsh2010@ | DataQualityUnknown -- ^ @dataQualityUnknown@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable AggregateRequestFilteredDataQualityStandardItem instance FromHttpApiData AggregateRequestFilteredDataQualityStandardItem where parseQueryParam = \case "dataQualityBloodGlucoseIso151972003" -> Right DATAQUALITYBLOODGLUCOSEISO151972003 "dataQualityBloodGlucoseIso151972013" -> Right DATAQUALITYBLOODGLUCOSEISO151972013 "dataQualityBloodPressureAami" -> Right DataQualityBloodPressureAami "dataQualityBloodPressureBhsAA" -> Right DataQualityBloodPressureBhsAA "dataQualityBloodPressureBhsAB" -> Right DataQualityBloodPressureBhsAB "dataQualityBloodPressureBhsBA" -> Right DataQualityBloodPressureBhsBA "dataQualityBloodPressureBhsBB" -> Right DataQualityBloodPressureBhsBB "dataQualityBloodPressureEsh2002" -> Right DATAQUALITYBLOODPRESSUREESH2002 "dataQualityBloodPressureEsh2010" -> Right DATAQUALITYBLOODPRESSUREESH2010 "dataQualityUnknown" -> Right DataQualityUnknown x -> Left ("Unable to parse AggregateRequestFilteredDataQualityStandardItem from: " <> x) instance ToHttpApiData AggregateRequestFilteredDataQualityStandardItem where toQueryParam = \case DATAQUALITYBLOODGLUCOSEISO151972003 -> "dataQualityBloodGlucoseIso151972003" DATAQUALITYBLOODGLUCOSEISO151972013 -> "dataQualityBloodGlucoseIso151972013" DataQualityBloodPressureAami -> "dataQualityBloodPressureAami" DataQualityBloodPressureBhsAA -> "dataQualityBloodPressureBhsAA" DataQualityBloodPressureBhsAB -> "dataQualityBloodPressureBhsAB" DataQualityBloodPressureBhsBA -> "dataQualityBloodPressureBhsBA" DataQualityBloodPressureBhsBB -> "dataQualityBloodPressureBhsBB" DATAQUALITYBLOODPRESSUREESH2002 -> "dataQualityBloodPressureEsh2002" DATAQUALITYBLOODPRESSUREESH2010 -> "dataQualityBloodPressureEsh2010" DataQualityUnknown -> "dataQualityUnknown" instance FromJSON AggregateRequestFilteredDataQualityStandardItem where parseJSON = parseJSONText "AggregateRequestFilteredDataQualityStandardItem" instance ToJSON AggregateRequestFilteredDataQualityStandardItem where toJSON = toJSONText -- | A constant representing the type of the device. data DeviceType = ChestStrap -- ^ @chestStrap@ | HeadMounted -- ^ @headMounted@ | Phone -- ^ @phone@ | Scale -- ^ @scale@ | Tablet -- ^ @tablet@ | Unknown -- ^ @unknown@ | Watch -- ^ @watch@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable DeviceType instance FromHttpApiData DeviceType where parseQueryParam = \case "chestStrap" -> Right ChestStrap "headMounted" -> Right HeadMounted "phone" -> Right Phone "scale" -> Right Scale "tablet" -> Right Tablet "unknown" -> Right Unknown "watch" -> Right Watch x -> Left ("Unable to parse DeviceType from: " <> x) instance ToHttpApiData DeviceType where toQueryParam = \case ChestStrap -> "chestStrap" HeadMounted -> "headMounted" Phone -> "phone" Scale -> "scale" Tablet -> "tablet" Unknown -> "unknown" Watch -> "watch" instance FromJSON DeviceType where parseJSON = parseJSONText "DeviceType" instance ToJSON DeviceType where toJSON = toJSONText data DataSourceDataQualityStandardItem = DSDQSIDATAQUALITYBLOODGLUCOSEISO151972003 -- ^ @dataQualityBloodGlucoseIso151972003@ | DSDQSIDATAQUALITYBLOODGLUCOSEISO151972013 -- ^ @dataQualityBloodGlucoseIso151972013@ | DSDQSIDataQualityBloodPressureAami -- ^ @dataQualityBloodPressureAami@ | DSDQSIDataQualityBloodPressureBhsAA -- ^ @dataQualityBloodPressureBhsAA@ | DSDQSIDataQualityBloodPressureBhsAB -- ^ @dataQualityBloodPressureBhsAB@ | DSDQSIDataQualityBloodPressureBhsBA -- ^ @dataQualityBloodPressureBhsBA@ | DSDQSIDataQualityBloodPressureBhsBB -- ^ @dataQualityBloodPressureBhsBB@ | DSDQSIDATAQUALITYBLOODPRESSUREESH2002 -- ^ @dataQualityBloodPressureEsh2002@ | DSDQSIDATAQUALITYBLOODPRESSUREESH2010 -- ^ @dataQualityBloodPressureEsh2010@ | DSDQSIDataQualityUnknown -- ^ @dataQualityUnknown@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable DataSourceDataQualityStandardItem instance FromHttpApiData DataSourceDataQualityStandardItem where parseQueryParam = \case "dataQualityBloodGlucoseIso151972003" -> Right DSDQSIDATAQUALITYBLOODGLUCOSEISO151972003 "dataQualityBloodGlucoseIso151972013" -> Right DSDQSIDATAQUALITYBLOODGLUCOSEISO151972013 "dataQualityBloodPressureAami" -> Right DSDQSIDataQualityBloodPressureAami "dataQualityBloodPressureBhsAA" -> Right DSDQSIDataQualityBloodPressureBhsAA "dataQualityBloodPressureBhsAB" -> Right DSDQSIDataQualityBloodPressureBhsAB "dataQualityBloodPressureBhsBA" -> Right DSDQSIDataQualityBloodPressureBhsBA "dataQualityBloodPressureBhsBB" -> Right DSDQSIDataQualityBloodPressureBhsBB "dataQualityBloodPressureEsh2002" -> Right DSDQSIDATAQUALITYBLOODPRESSUREESH2002 "dataQualityBloodPressureEsh2010" -> Right DSDQSIDATAQUALITYBLOODPRESSUREESH2010 "dataQualityUnknown" -> Right DSDQSIDataQualityUnknown x -> Left ("Unable to parse DataSourceDataQualityStandardItem from: " <> x) instance ToHttpApiData DataSourceDataQualityStandardItem where toQueryParam = \case DSDQSIDATAQUALITYBLOODGLUCOSEISO151972003 -> "dataQualityBloodGlucoseIso151972003" DSDQSIDATAQUALITYBLOODGLUCOSEISO151972013 -> "dataQualityBloodGlucoseIso151972013" DSDQSIDataQualityBloodPressureAami -> "dataQualityBloodPressureAami" DSDQSIDataQualityBloodPressureBhsAA -> "dataQualityBloodPressureBhsAA" DSDQSIDataQualityBloodPressureBhsAB -> "dataQualityBloodPressureBhsAB" DSDQSIDataQualityBloodPressureBhsBA -> "dataQualityBloodPressureBhsBA" DSDQSIDataQualityBloodPressureBhsBB -> "dataQualityBloodPressureBhsBB" DSDQSIDATAQUALITYBLOODPRESSUREESH2002 -> "dataQualityBloodPressureEsh2002" DSDQSIDATAQUALITYBLOODPRESSUREESH2010 -> "dataQualityBloodPressureEsh2010" DSDQSIDataQualityUnknown -> "dataQualityUnknown" instance FromJSON DataSourceDataQualityStandardItem where parseJSON = parseJSONText "DataSourceDataQualityStandardItem" instance ToJSON DataSourceDataQualityStandardItem where toJSON = toJSONText -- | The different supported formats for each field in a data type. data DataTypeFieldFormat = Blob -- ^ @blob@ | FloatList -- ^ @floatList@ | FloatPoint -- ^ @floatPoint@ | Integer -- ^ @integer@ | IntegerList -- ^ @integerList@ | Map -- ^ @map@ | String -- ^ @string@ deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable DataTypeFieldFormat instance FromHttpApiData DataTypeFieldFormat where parseQueryParam = \case "blob" -> Right Blob "floatList" -> Right FloatList "floatPoint" -> Right FloatPoint "integer" -> Right Integer "integerList" -> Right IntegerList "map" -> Right Map "string" -> Right String x -> Left ("Unable to parse DataTypeFieldFormat from: " <> x) instance ToHttpApiData DataTypeFieldFormat where toQueryParam = \case Blob -> "blob" FloatList -> "floatList" FloatPoint -> "floatPoint" Integer -> "integer" IntegerList -> "integerList" Map -> "map" String -> "string" instance FromJSON DataTypeFieldFormat where parseJSON = parseJSONText "DataTypeFieldFormat" instance ToJSON DataTypeFieldFormat where toJSON = toJSONText
rueshyna/gogol
gogol-fitness/gen/Network/Google/Fitness/Types/Sum.hs
mpl-2.0
12,095
0
11
2,474
1,718
905
813
224
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.DFAReporting.MobileCarriers.List -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Retrieves a list of mobile carriers. -- -- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.mobileCarriers.list@. module Network.Google.Resource.DFAReporting.MobileCarriers.List ( -- * REST Resource MobileCarriersListResource -- * Creating a Request , mobileCarriersList , MobileCarriersList -- * Request Lenses , mclXgafv , mclUploadProtocol , mclAccessToken , mclUploadType , mclProFileId , mclCallback ) where import Network.Google.DFAReporting.Types import Network.Google.Prelude -- | A resource alias for @dfareporting.mobileCarriers.list@ method which the -- 'MobileCarriersList' request conforms to. type MobileCarriersListResource = "dfareporting" :> "v3.5" :> "userprofiles" :> Capture "profileId" (Textual Int64) :> "mobileCarriers" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] MobileCarriersListResponse -- | Retrieves a list of mobile carriers. -- -- /See:/ 'mobileCarriersList' smart constructor. data MobileCarriersList = MobileCarriersList' { _mclXgafv :: !(Maybe Xgafv) , _mclUploadProtocol :: !(Maybe Text) , _mclAccessToken :: !(Maybe Text) , _mclUploadType :: !(Maybe Text) , _mclProFileId :: !(Textual Int64) , _mclCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'MobileCarriersList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'mclXgafv' -- -- * 'mclUploadProtocol' -- -- * 'mclAccessToken' -- -- * 'mclUploadType' -- -- * 'mclProFileId' -- -- * 'mclCallback' mobileCarriersList :: Int64 -- ^ 'mclProFileId' -> MobileCarriersList mobileCarriersList pMclProFileId_ = MobileCarriersList' { _mclXgafv = Nothing , _mclUploadProtocol = Nothing , _mclAccessToken = Nothing , _mclUploadType = Nothing , _mclProFileId = _Coerce # pMclProFileId_ , _mclCallback = Nothing } -- | V1 error format. mclXgafv :: Lens' MobileCarriersList (Maybe Xgafv) mclXgafv = lens _mclXgafv (\ s a -> s{_mclXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). mclUploadProtocol :: Lens' MobileCarriersList (Maybe Text) mclUploadProtocol = lens _mclUploadProtocol (\ s a -> s{_mclUploadProtocol = a}) -- | OAuth access token. mclAccessToken :: Lens' MobileCarriersList (Maybe Text) mclAccessToken = lens _mclAccessToken (\ s a -> s{_mclAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). mclUploadType :: Lens' MobileCarriersList (Maybe Text) mclUploadType = lens _mclUploadType (\ s a -> s{_mclUploadType = a}) -- | User profile ID associated with this request. mclProFileId :: Lens' MobileCarriersList Int64 mclProFileId = lens _mclProFileId (\ s a -> s{_mclProFileId = a}) . _Coerce -- | JSONP mclCallback :: Lens' MobileCarriersList (Maybe Text) mclCallback = lens _mclCallback (\ s a -> s{_mclCallback = a}) instance GoogleRequest MobileCarriersList where type Rs MobileCarriersList = MobileCarriersListResponse type Scopes MobileCarriersList = '["https://www.googleapis.com/auth/dfatrafficking"] requestClient MobileCarriersList'{..} = go _mclProFileId _mclXgafv _mclUploadProtocol _mclAccessToken _mclUploadType _mclCallback (Just AltJSON) dFAReportingService where go = buildClient (Proxy :: Proxy MobileCarriersListResource) mempty
brendanhay/gogol
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/MobileCarriers/List.hs
mpl-2.0
4,810
0
18
1,155
726
421
305
107
1
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} -- Module : Network.AWS.EC2.Metadata -- Copyright : (c) 2013-2015 Brendan Hay <brendan.g.hay@gmail.com> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- | Retrieve an EC2 instance's local metadata. module Network.AWS.EC2.Metadata ( -- * Requests -- ** Running on EC2 isEC2 -- ** Dynamic , Dynamic (..) , dynamic -- ** Metadata , Metadata (..) , Mapping (..) , Info (..) , Interface (..) , metadata -- ** User data , userdata ) where import Control.Applicative import Control.Exception import Control.Monad import Control.Monad.Except import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy as LBS import Data.Maybe import Data.Monoid import Data.Text (Text) import qualified Data.Text as Text import Network.AWS.Data import Network.HTTP.Conduit data Dynamic = FWS -- ^ Value showing whether the customer has enabled detailed one-minute -- monitoring in CloudWatch. -- -- Valid values: enabled | disabled. | Document -- ^ JSON containing instance attributes, such as instance-id, -- private IP address, etc. | PKCS7 -- ^ Used to verify the document's authenticity and content against the -- signature. | Signature instance ToPath Dynamic where toPath x = case x of FWS -> "fws/instance-monitoring" Document -> "instance-identity/document" PKCS7 -> "instance-identity/pkcs7" Signature -> "instance-identity/signature" data Metadata = AMIId -- ^ The AMI ID used to launch the instance. | AMILaunchIndex -- ^ If you started more than one instance at the same time, this value -- indicates the order in which the instance was launched. -- The value of the first instance launched is 0. | AMIManifestPath -- ^ The path to the AMI's manifest file in Amazon S3. -- If you used an Amazon EBS-backed AMI to launch the instance, -- the returned result is unknown. | AncestorAMIIds -- ^ The AMI IDs of any instances that were rebundled to create this AMI. -- This value will only exist if the AMI manifest file contained an -- ancestor-amis key. | BlockDevice !Mapping -- ^ See: 'Mapping' | Hostname -- ^ The private hostname of the instance. In cases where multiple network -- interfaces are present, this refers to the eth0 device -- (the device for which the device number is 0). | IAM !Info -- ^ See: 'Info' | InstanceAction -- ^ Notifies the instance that it should reboot in preparation for bundling. -- Valid values: none | shutdown | bundle-pending. | InstanceId -- ^ The ID of this instance. | InstanceType -- ^ The type of instance. -- -- See: @InstanceType@ | KernelId -- ^ The ID of the kernel launched with this instance, if applicable. | LocalHostname -- ^ The private DNS hostname of the instance. In cases where multiple -- network interfaces are present, this refers to the eth0 device -- (the device for which the device number is 0). | LocalIPV4 -- ^ The private IP address of the instance. In cases where multiple network -- interfaces are present, this refers to the eth0 device -- (the device for which the device number is 0). | MAC -- ^ The instance's media access control (MAC) address. In cases where -- multiple network interfaces are present, this refers to the eth0 device -- (the device for which the device number is 0). | Network !Text !Interface -- ^ See: 'Interface' | AvailabilityZone -- ^ The Availability Zone in which the instance launched. | ProductCodes -- ^ Product codes associated with the instance, if any. | PublicHostname -- ^ The instance's public DNS. If the instance is in a VPC, this category -- is only returned if the enableDnsHostnames attribute is set to true. -- For more information, see Using DNS with Your VPC. | PublicIPV4 -- ^ The public IP address. If an Elastic IP address is associated with the -- instance, the value returned is the Elastic IP address. | OpenSSHKey -- ^ Public key. Only available if supplied at instance launch time. | RAMDiskId -- ^ The ID of the RAM disk specified at launch time, if applicable. | ReservationId -- ^ ID of the reservation. | SecurityGroups -- ^ The names of the security groups applied to the instance. deriving (Eq, Ord, Show) instance ToPath Metadata where toPath x = case x of AMIId -> "ami-id" AMILaunchIndex -> "ami-launch-index" AMIManifestPath -> "ami-manifest-path" AncestorAMIIds -> "ancestor-ami-ids" BlockDevice m -> "block-device-mapping/" <> toPath m Hostname -> "hostname" IAM m -> "iam/" <> toPath m InstanceAction -> "instance-action" InstanceId -> "instance-id" InstanceType -> "instance-type" KernelId -> "kernel-id" LocalHostname -> "local-hostname" LocalIPV4 -> "local-ipv4" MAC -> "mac" Network n m -> "network/interfaces/macs/" <> n <> "/" <> toPath m AvailabilityZone -> "placement/availability-zone" ProductCodes -> "product-codes" PublicHostname -> "public-hostname" PublicIPV4 -> "public-ipv4" OpenSSHKey -> "public-keys/0/openssh-key" RAMDiskId -> "ramdisk-id" ReservationId -> "reservation-id" SecurityGroups -> "security-groups" data Mapping = AMI -- ^ The virtual device that contains the root/boot file system. | EBS !Int -- ^ The virtual devices associated with Amazon EBS volumes, if present. -- This value is only available in metadata if it is present at launch time. -- The N indicates the index of the Amazon EBS volume (such as ebs1 or ebs2). | Ephemeral !Int -- ^ The virtual devices associated with ephemeral devices, if present. -- The N indicates the index of the ephemeral volume. | Root -- ^ The virtual devices or partitions associated with the root devices, -- or partitions on the virtual device, where the root (/ or C:) file system -- is associated with the given instance. | Swap -- ^ The virtual devices associated with swap. Not always present. deriving (Eq, Ord, Show) instance ToPath Mapping where toPath x = case x of AMI -> "ami" EBS n -> "ebs" <> toText n Ephemeral n -> "ephemeral" <> toText n Root -> "root" Swap -> "root" data Interface = IDeviceNumber -- ^ The device number associated with that interface. Each interface must -- have a unique device number. The device number serves as a hint to device -- naming in the instance; for example, device-number is 2 for the eth2 device. | IIPV4Associations !Text -- ^ The private IPv4 addresses that are associated with each public-ip -- address and assigned to that interface. | ILocalHostname -- ^ The interface's local hostname. | ILocalIPV4s -- ^ The private IP addresses associated with the interface. | IMAC -- ^ The instance's MAC address. | IOwnerId -- ^ The ID of the owner of the network interface. In multiple-interface -- environments, an interface can be attached by a third party, such as -- Elastic Load Balancing. Traffic on an interface is always billed to -- the interface owner. | IPublicHostname -- ^ The interface's public DNS. If the instance is in a VPC, this category -- is only returned if the enableDnsHostnames attribute is set to true. -- For more information, see Using DNS with Your VPC. | IPublicIPV4s -- ^ The Elastic IP addresses associated with the interface. There may be -- multiple IP addresses on an instance. | ISecurityGroups -- ^ Security groups to which the network interface belongs. Returned only -- for instances launched into a VPC. | ISecurityGroupIds -- ^ IDs of the security groups to which the network interface belongs. -- Returned only for instances launched into a VPC. For more information on -- security groups in the EC2-VPC platform, see Security Groups for Your VPC. | ISubnetId -- ^ The ID of the subnet in which the interface resides. Returned only for -- instances launched into a VPC. | ISubnetIPV4_CIDRBlock -- ^ The CIDR block of the subnet in which the interface resides. Returned -- only for instances launched into a VPC. | IVPCId -- ^ The ID of the VPC in which the interface resides. Returned only for -- instances launched into a VPC. | IVPCIPV4_CIDRBlock -- ^ The CIDR block of the VPC in which the interface resides. Returned only -- for instances launched into a VPC. deriving (Eq, Ord, Show) instance ToPath Interface where toPath x = case x of IDeviceNumber -> "device-number" IIPV4Associations ip -> "ipv4-associations/" <> ip ILocalHostname -> "local-hostname" ILocalIPV4s -> "local-ipv4s" IMAC -> "mac" IOwnerId -> "owner-id" IPublicHostname -> "public-hostname" IPublicIPV4s -> "public-ipv4s" ISecurityGroups -> "security-groups" ISecurityGroupIds -> "security-group-ids" ISubnetId -> "subnet-id" ISubnetIPV4_CIDRBlock -> "subnet-ipv4-cidr-block" IVPCId -> "vpc-id" IVPCIPV4_CIDRBlock -> "vpc-ipv4-cidr-block" data Info = Info -- ^ Returns information about the last time the instance profile was updated, -- including the instance's LastUpdated date, InstanceProfileArn, -- and InstanceProfileId. | SecurityCredentials (Maybe Text) -- ^ Where role-name is the name of the IAM role associated with the instance. -- Returns the temporary security credentials. -- -- See: 'Auth' for JSON deserialisation. deriving (Eq, Ord, Show) instance ToPath Info where toPath x = case x of Info -> "info" SecurityCredentials r -> "security-credentials/" <> fromMaybe "" r -- | Test whether the host is running on EC2 by requesting the instance-data. isEC2 :: Manager -> IO Bool isEC2 m = liftIO (req `catch` err) where req = do !_ <- request m "http://instance-data/latest" return True err :: HttpException -> IO Bool err = const (return False) dynamic :: MonadIO m => Manager -> Dynamic -> ExceptT HttpException m ByteString dynamic m = get m . mappend "http://169.254.169.254/latest/dynamic/" . toPath metadata :: MonadIO m => Manager -> Metadata -> ExceptT HttpException m ByteString metadata m = get m . mappend "http://169.254.169.254/latest/meta-data/" . toPath userdata :: MonadIO m => Manager -> ExceptT HttpException m (Maybe ByteString) userdata m = Just `liftM` get m "http://169.254.169.254/latest/user-data" `catchError` err where err (StatusCodeException s _ _) | fromEnum s == 404 = return Nothing err e = throwError e get :: MonadIO m => Manager -> Text -> ExceptT HttpException m ByteString get m url = ExceptT . liftIO $ req `catch` err where req = Right . strip <$> request m url strip bs | BS.isSuffixOf "\n" bs = BS.init bs | otherwise = bs err :: HttpException -> IO (Either HttpException a) err = return . Left request :: Manager -> Text -> IO ByteString request m url = do rq <- parseUrl (Text.unpack url) rs <- httpLbs (rq { responseTimeout = Just 2 }) m return . LBS.toStrict $ responseBody rs
dysinger/amazonka
amazonka/src/Network/AWS/EC2/Metadata.hs
mpl-2.0
12,551
0
12
3,549
1,487
835
652
198
2
module Main where import Lib main :: IO () main = print "hi"
shapr/schrabble
app/Main.hs
agpl-3.0
73
0
6
25
25
14
11
4
1
{-# LANGUAGE OverloadedStrings #-} import Language.PureScript.Bridge import Control.Lens import Data.Proxy import GHC.Generics import Servant.PureScript import Gonimo.CodeGen.TypeBridges import Gonimo.Server.Db.Entities import Gonimo.Server.Types import Gonimo.Server.State.Types (SessionId, MessageNumber) import Gonimo.Server.Error import Gonimo.WebAPI import Gonimo.WebAPI.Types as Client data GonimoBridge instance HasBridge GonimoBridge where languageBridge _ = buildBridge gonimoBridge gonimoProxy :: Proxy GonimoBridge gonimoProxy = Proxy data TestTypeConstructor m a = TestTypeConstructor (m a) deriving Generic myTypes :: [SumType 'Haskell] myTypes = [ mkSumType (Proxy :: Proxy Client.AuthData) , mkSumType (Proxy :: Proxy Account) , mkSumType (Proxy :: Proxy Client.InvitationInfo) , mkSumType (Proxy :: Proxy Client.InvitationReply) , mkSumType (Proxy :: Proxy ServerError) , mkSumType (Proxy :: Proxy AuthToken) , mkSumType (Proxy :: Proxy Device) , mkSumType (Proxy :: Proxy Coffee) , mkSumType (Proxy :: Proxy Invitation) , mkSumType (Proxy :: Proxy InvitationDelivery) , mkSumType (Proxy :: Proxy SendInvitation) , mkSumType (Proxy :: Proxy DeviceType) , mkSumType (Proxy :: Proxy Family) , mkSumType (Proxy :: Proxy DeviceInfo) , mkSumType (Proxy :: Proxy SessionId) , mkSumType (Proxy :: Proxy MessageNumber) , mkSumType (Proxy :: Proxy FamilyName) ] mySettings :: Settings mySettings = (addReaderParam "Authorization" defaultSettings & apiModuleName .~ "Gonimo.WebAPI") { _generateSubscriberAPI = True } main :: IO () main = do let gonimoFrontPath = "../gonimo-front/src" writePSTypes gonimoFrontPath (buildBridge gonimoBridge) myTypes writeAPIModuleWithSettings mySettings gonimoFrontPath gonimoProxy gonimoAPI
gonimo/gonimo-back
app/PSGenerator.hs
agpl-3.0
2,043
0
9
519
501
275
226
-1
-1
data Slovo = Samo Char | Crtica Slovo deriving (Eq) instance Show Slovo where show (Samo c) = [c] show (Crtica s) = (show s) ++ "'" data Lambda = Varijabla Slovo | Aplikacija Lambda Lambda | Apstrakcija Slovo Lambda instance Show Lambda where show (Varijabla s) = show s show (Aplikacija funkcija argument) = show funkcija ++ " " ++ show argument show (Apstrakcija slovo povratna_vrijednost) = "(lambda " ++ show slovo ++ " . " ++ show povratna_vrijednost ++ ")" lam (Varijabla v) izraz = Apstrakcija v izraz f # arg = Aplikacija f arg supst :: Lambda -> Lambda -> Lambda -> Lambda supst (Varijabla c) (Varijabla v) cime | c == v = cime | otherwise = (Varijabla c) supst (Aplikacija f arg) sto@(Varijabla v) cime = Aplikacija (supst f sto cime) (supst arg sto cime) supst (Apstrakcija x f) sto@(Varijabla v) cime | x == v = Apstrakcija x f | otherwise = Apstrakcija x (supst f sto cime) -- (\ x -> x + y) [y |-> x] ne radi kako treba problem = beta (lam y (lam x (f # x # y)) # x) -- trebalo bi dati (\ x' -> x' + x) beta (Aplikacija (Apstrakcija x f) y) = supst f (Varijabla x) y x = Varijabla (Samo 'x') x' = Varijabla (Crtica (Samo 'x')) x'' = Varijabla (Crtica (Crtica (Samo 'x'))) y = Varijabla (Samo 'y') y' = Varijabla (Crtica (Samo 'y')) z = Varijabla (Samo 'z') a = Varijabla (Samo 'a') b = Varijabla (Samo 'b') f = Varijabla (Samo 'f')
vedgar/mlr
2016 Kolokvij/Lambde.hs
unlicense
1,481
0
13
398
640
317
323
34
1
module SyntaxOne where x = (+) f :: String -> Int f xs = w `x` 1 where w = length xs
thewoolleyman/haskellbook
04/09/chad/SyntaxOne.hs
unlicense
90
0
7
27
44
26
18
5
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} -- | Example taken from: http://members.shaw.ca/el.supremo/MagickWand/gel.htm -- "Gel" Effects example -- http://www.imagemagick.org/Usage/advanced/#gel_effects import Control.Exception.Lifted import Control.Monad (void) import Graphics.ImageMagick.MagickWand import Prelude hiding (catch) main :: IO () main = withMagickWandGenesis $ do -- First step is to create the gel shape: {- convert -size 100x60 xc:none \ -fill red -draw 'circle 25,30 10,30' \ -draw 'circle 75,30 90,30' \ -draw 'rectangle 25,15 75,45' \ gel_shape.png -} localGenesis $ do -- Create a wand (_,mw) <- magickWand pw <- pixelWand (_,dw) <- drawingWand setSize mw 100 60 readImage mw "xc:none" pw `setColor` "red" dw `setFillColor` pw drawCircle dw 25 30 10 30 drawCircle dw 75 30 90 30 drawRectangle dw 25 15 75 45 -- Now we draw the Drawing wand on to the Magick Wand drawImage mw dw writeImage mw (Just "gel_shape.png") -- Next step is to create the gel highlight: {- convert gel_shape.png \ \( +clone -fx A +matte -blur 0x12 -shade 110x0 -normalize \ -sigmoidal-contrast 16,60% -evaluate multiply .5 \ -roll +5+10 +clone -compose Screen -composite \) \ -compose In -composite gel_highlight.png -} localGenesis $ do (_,mw) <- magickWand readImage mw "gel_shape.png" (_,mwc) <- cloneMagickWand mw (_,mwf) <- fxImage mwc "A" -- TODO: fails, should we ignore it? ignoreExceptions (mw `setImageAlphaChannel` deactivateAlphaChannel) blurImage mwf 0 12 shadeImage mwf True 110 0 normalizeImage mwf -- The last argument is specified as a percentage on the command line -- but is specified to the function as a percentage of the QuantumRange sigmoidalContrastImage mwf True 16 (0.6 * quantumRange) evaluateImage mwf multiplyEvaluateOperator 0.5 rollImage mwf 5 10 -- The +clone operation copies the original but only so that -- it can be used in the following composite operation, so we don't -- actually need to do a clone, just reference the original image. compositeImage mwf mw screenCompositeOp 0 0 compositeImage mw mwf inCompositeOp 0 0 writeImage mw (Just "gel_highlight.png") -- Now create the gel border {- convert gel_highlight.png \ \( +clone -fx A +matte -blur 0x2 -shade 0x90 -normalize \ -blur 0x2 -negate -evaluate multiply .4 -negate -roll -.5-1 \ +clone -compose Multiply -composite \) \ -compose In -composite gel_border.png -} localGenesis $ do (_,mw) <- magickWand readImage mw "gel_highlight.png" (_,mwc) <- cloneMagickWand mw (_,mwf) <- fxImage mwc "A" ignoreExceptions (mwf `setImageAlphaChannel` deactivateAlphaChannel) blurImage mwf 0 2 shadeImage mwf True 0 90 normalizeImage mwf blurImage mwf 0 2 negateImage mwf False evaluateImage mwf multiplyEvaluateOperator 0.4 negateImage mwf False rollImage mwf (-0.5) (-1) compositeImage mwf mw multiplyCompositeOp 0 0 compositeImage mw mwf inCompositeOp 0 0 writeImage mw (Just "gel_border.png") -- and finally the text and shadow effect {- convert gel_border.png \ -font Candice -pointsize 24 -fill white -stroke black \ -gravity Center -annotate 0 "Gel" -trim -repage 0x0+4+4 \ \( +clone -background navy -shadow 80x4+4+4 \) +swap \ -background none -flatten gel_button.png -} localGenesis $ do (_,mw) <- magickWand (_,dw) <- drawingWand pw <- pixelWand readImage mw "gel_border.png" dw `setFont` "Lucida-Handwriting-Italic" dw `setFontSize` 24 pw `setColor` "white" dw `setFillColor` pw pw `setColor` "black" dw `setStrokeColor` pw dw `setGravity` centerGravity -- It is important to notice here that MagickAnnotateImage renders the text on -- to the MagickWand, NOT the DrawingWand. It only uses the DrawingWand for font -- and colour information etc. annotateImage mw dw 0 0 0 "Gel" trimImage mw 0 resetImagePage mw (Just "0x0+4+4") (_,mwc) <- cloneMagickWand mw pw `setColor` "navy" mwc `setImageBackgroundColor` pw shadowImage mwc 80 4 4 4 (_,mwf) <- magickWand addImage mwf mwc addImage mwf mw pw `setColor` "none" mwf `setImageBackgroundColor` pw (_,mw') <- mergeImageLayers mwf flattenLayer writeImage mw' (Just "gel_button.png") ignoreExceptions f = catch (void f) (\(_::MagickWandException) -> return ())
flowbox-public/imagemagick
examples/gel.hs
apache-2.0
4,782
0
13
1,257
919
449
470
81
1
{- Control.Exception.Hierarchical -- Template Haskell for defining exceptions Copyright (C) 2014 Galois, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} {-| "Control.Exception" leverages "Data.Typeable" to fake subtyping and thereby give Haskell support for hierarchies of exceptions. However, defining exception hierarchies requires quite a bit of boilerplate. For instance, to define * a top-level exception, 'TracerException', * a sub-exception, 'TimingFailure', and * a sub-exception, 'WriteFailure', requires several paragraphs of code: > import Control.Exception > import Data.Typeable (Typeable, cast) > > data TracerException = forall e. Exception e => TracerException e > deriving Typeable > > instance Show TracerException where > show (TracerException e) = show e > > instance Exception TracerException > > data TimingFailure = TimingFailure > deriving (Show, Typeable) > > instance Exception TimingFailure where > toException = toException . TracerException > fromException x = do > TracerException a <- fromException x > cast a > > data WriteFailure = WriteFailure > deriving (Show, Typeable) > > instance Exception WriteFailure where > toException = toException . TracerException > fromException x = do > TracerException a <- fromException x > cast a Instead of writing this, one could simply write > import Control.Exception (SomeException(SomeException)) > import Control.Exception.Hierarchical > > mkAbstractException 'SomeException "TracerException" > mkException 'TracerException "TimingFailure" > mkException 'TracerException "WriteFailure" and allow Template Haskell to fill in the rest. This libray deals with two types of exceptions: /abstract/ and /concrete/ exceptions. Both types can be caught with 'Control.Exception.catch' and other associated functions; however, only you may only extend abstract exceptions, and you may only throw concrete ones. This is a fundamental limitation of the Haskell exception hierarchy system as it currently exists. -} {-# LANGUAGE TemplateHaskell #-} module Control.Exception.Hierarchical ( mkAbstractException , mkException ) where import Control.Exception (Exception, SomeException(SomeException)) import Control.Monad ((<=<), liftM) import Data.Typeable (Typeable, cast) import Language.Haskell.TH --------------------------- Hierarchies and casting ---------------------------- {-| Creates declarations to make a data type a sub-exception of another exception. This is best illustrated by some examples: > exceptionDeclaration 'SomeException 'MyAbstractException > ======> > instance Exception MyAbstractException > exceptionDeclaration 'MyAbstractException 'MyConcreteException > ======> > instance Exception MyConcreteException where > toException = toException . MyConcreteException > fromException = fromException >=> \(MyAbstractException x) -> cast x Note that exceptions directly under 'SomeException' are special-cased; the default implementation for the 'Exception' type class is sufficient in this case. -} exceptionDeclaration :: Name -- ^ the name of the super-exception -> Name -- ^ the name of the sub-exception -> DecsQ exceptionDeclaration super name = one $ instanceD' (cxt []) [t| Exception $(conT name) |] (if super == 'SomeException then {- 'name' is directly under 'SomeException', so use the default implementation for the conversion functions. -} return [] else {- 'name' is directly under some other exception, so explicitly define the conversion functions to set up the hierarchy correctly. -} exceptionHierarchyFunctions super) {-| Creates declarations to implement the 'Exception' instance for a sub-exception. -} exceptionHierarchyFunctions :: Name -- ^ the name of the super-exception -> DecsQ exceptionHierarchyFunctions super = [d| toException = toException . sup where sup = $(conE super) fromException = cast . sub <=< fromException where sub = $(destruct1 super) |] ----------------------------- Abstract exceptions ------------------------------ {-| Creates an /abstract/ sub-exception of an existing exception. As discussed in the introduction, such an exception cannot be thrown; it can only be extended. -} mkAbstractException :: Name -- ^ the name of the super-exception’s data constructor -> String -- ^ the name of the exception to create -> DecsQ mkAbstractException super name = let name' = mkName name in many [ abstractDataDeclaration name' , abstractShowDeclaration name' , exceptionDeclaration super name' ] {-| Defines a new data type suitable for use as an abstract exception. For example, > abstractDataDeclaration (mkName "Name") > ======> > data Name = forall e. Exception e => Name e > deriving Typeable -} abstractDataDeclaration :: Name -> DecsQ abstractDataDeclaration name = one $ dataD (cxt []) name [] [let e = mkName "e" in forallC [PlainTV e] (cxt [classP ''Exception [varT e]]) (normalC name [return (NotStrict, VarT e)])] [''Typeable] {-| Creates an instance declaration for an abstract exception type. For example, > abstractShowDeclaration ''Name > ======> > instance Show Name where > show (Name e) = show e -} abstractShowDeclaration :: Name -> DecsQ abstractShowDeclaration name = one $ instanceD' (cxt []) [t| Show $(conT name) |] [d| show = show . $(destruct1 name) |] ----------------------------- Concrete exceptions ----------------------------- {-| Creates a /concrete/ sub-exception of an existing exception. As discussed in the introduction, such an exception cannot be extended; it can only be thrown. -} mkException :: Name -- ^ the name of the super-exception’s data constructor -> String -- ^ the name of the exception to create -> DecsQ mkException super name = let name' = mkName name in many [ dataDeclaration name' , exceptionDeclaration super name' ] {-| Defines a new data type suitable for use as a concrete exception. For example, > dataDeclaration (mkName "Name") > ======> > data Name = Name > deriving (Show, Typeable) -} dataDeclaration :: Name -> DecsQ dataDeclaration name = one $ dataD (cxt []) name [] [normalC name []] [''Show, ''Typeable] ----------------------------------- Utility ----------------------------------- -- | Lifts 'DecQ' to a singleton 'DecsQ'. one :: DecQ -> DecsQ one = liftM (:[]) -- | Concatenates multiple 'DecsQ' values. many :: [DecsQ] -> DecsQ many = liftM concat . sequence -- | Like 'instanceD', but accepts a 'DecsQ' instead of a '[DecQ]'. instanceD' :: CxtQ -> TypeQ -> DecsQ -> DecQ instanceD' c t mkDecs = do decs <- mkDecs instanceD c t $ map return decs -- | Unwraps a single-field constructor. destruct1 :: Name -> ExpQ destruct1 name = do underlying <- newName "underlying" lam1E (conP name [varP underlying]) (varE underlying)
GaloisInc/hierarchical-exceptions
src/Control/Exception/Hierarchical.hs
apache-2.0
7,992
0
16
1,904
681
372
309
-1
-1
{- This code is derived from Andrej Bauer's implementation of the Reals using Dyadics -} {- | This module contains a purely Haskell implementation of dyadic rationals, suitable for interval arithmetic. A faster implementation of dyadic rationals would use a fast arbitrary-precision floating-point library, such as MPFR and the related hmpfr Haskell bindings for it. A dyadic number is a rational whose denominator is a power of 2. -} module Data.Real.Dyadic ( Dyadic(..), ilogb, dnormalize, dToFloat, dinv, dlog2, dshift, ddiv ) where import Data.Bits -- | A dyadic number is of the form @m * 2^e@ where @m@ is the /mantissa/ and @e@ is the /exponent/. data Dyadic = Dyadic { mant :: Integer, expo :: Int } instance Show Dyadic where show (Dyadic m e) = show m ++ "*2^" ++ show e shifted2 :: (Integer -> Integer -> a) -> Dyadic -> Dyadic -> a shifted2 f (Dyadic m1 e1) (Dyadic m2 e2) = case compare e1 e2 of LT -> f m1 (shiftL m2 (e2-e1)) EQ -> f m1 m2 GT -> f (shiftL m1 (e1-e2)) m2 instance Eq Dyadic where a == b = shifted2 (==) a b a /= b = shifted2 (/=) a b instance Ord Dyadic where compare a b = shifted2 compare a b instance Num Dyadic where Dyadic m1 e1 + Dyadic m2 e2 = Dyadic m3 e3 where m3 = if e1 < e2 then m1 + shiftL m2 (e2 - e1) else shiftL m1 (e1 - e2) + m2 e3 = min e1 e2 Dyadic m1 e1 - Dyadic m2 e2 = Dyadic m3 e3 where m3 = if e1 < e2 then m1 - shiftL m2 (e2 - e1) else shiftL m1 (e1 - e2) - m2 e3 = min e1 e2 Dyadic m1 e1 * Dyadic m2 e2 = Dyadic (m1 * m2) (e1 + e2) abs (Dyadic m e) = Dyadic (abs m) e signum (Dyadic m e) = fromInteger (signum m) fromInteger i = Dyadic i 0 -- | This was taken from -- | <http://www.haskell.org/pipermail/haskell-cafe/2008-February/039640.html> -- | and it computes the integral logarithm in given base. ilogb :: Integer -> Integer -> Int ilogb b n | n < 0 = ilogb b (- n) | n < b = 0 | otherwise = (up b n 1) - 1 where up b n a = if n < (b ^ a) then bin b (quot a 2) a else up b n (2*a) bin b lo hi = if (hi - lo) <= 1 then hi else let av = quot (lo + hi) 2 in if n < (b ^ av) then bin b lo av else bin b av hi dlog2 (Dyadic m e) = e + ilogb 2 m dnormalize :: Int -> Dyadic -> Dyadic dnormalize s a@(Dyadic m e) = if e < -s then Dyadic (shiftR m (-e - s)) (-s) else a dToFloat (Dyadic m e) = encodeFloat m e dinv :: Int -> Dyadic -> Dyadic dinv _ (Dyadic 0 _) = 0 dinv s (Dyadic m e) = Dyadic (shiftL 1 (s + 1 - e) `div` m) (-(s + 1)) ddiv :: Int -> Dyadic -> Dyadic -> Dyadic ddiv _ _ (Dyadic 0 _) = 0 ddiv s (Dyadic m1 e1) (Dyadic m2 e2) = Dyadic (shiftL m1 (s + 1 + e1 - e2) `div` m2) (-(s + 1)) dshift :: Dyadic -> Int -> Dyadic dshift (Dyadic m e) k = Dyadic m (e + k)
robbertkrebbers/fewdigits
Data/Real/Dyadic.hs
bsd-2-clause
2,990
0
14
946
1,189
610
579
55
4
module NinetyNine where myReverse :: [a] -> [a] myReverse = foldl (\acc x -> (x:acc)) []
naphthalene/haskell-99onthewall
ten/5.hs
bsd-2-clause
90
0
9
17
48
28
20
3
1
-- | Define a position datatype for giving locations in error messages. module Text.XML.HaXml.Posn ( -- * Position type Posn() -- ** Constructors of a new position , posInNewCxt -- :: String -> Maybe Posn -> Posn , noPos -- :: Posn -- ** Strictifier , forcep -- ** Modifiers , addcol, newline, tab, white ) where import Char -- | Source positions contain a filename, line, column, and an -- inclusion point, which is itself another source position, -- recursively. data Posn = Pn String !Int !Int (Maybe Posn) deriving (Eq) -- | Dummy value for generated data, where a true source position does -- not exist. noPos :: Posn noPos = Pn "no recorded position" 0 0 Nothing -- | @posInNewCxt name pos@ creates a new source position from an old one. -- It is used when opening a new file (e.g. a DTD inclusion), to denote -- the start of the file @name@, but retain the stacked information that -- it was included from the old @pos@. posInNewCxt :: String -> Maybe Posn -> Posn posInNewCxt name pos = Pn name 1 1 pos instance Show Posn where showsPrec p (Pn f l c i) = showString f . showString " at line " . shows l . showString " col " . shows c . ( case i of Nothing -> id Just p -> showString "\n used by " . shows p ) -- | Just used to strictify the internal values of a position, to avoid -- space leaks. forcep :: Posn -> Int forcep (Pn f n m i) = m `seq` n -- | Add n character positions to the given position. addcol :: Int -> Posn -> Posn addcol n (Pn f r c i) = Pn f r (c+n) i -- | Add a newline or tab to the given position. newline, tab :: Posn -> Posn newline (Pn f r c i) = Pn f (r+1) 1 i tab (Pn f r c i) = Pn f r (((c`div`8)+1)*8) i -- | Add the given whitespace char to the given position. -- Precondition: @white c | isSpace c = True@ white :: Char -> Posn -> Posn white ' ' = addcol 1 white '\n' = newline white '\r' = id white '\t' = tab white '\xa0' = addcol 1
FranklinChen/hugs98-plus-Sep2006
packages/HaXml/src/Text/XML/HaXml/Posn.hs
bsd-3-clause
2,182
0
12
696
487
263
224
41
1
module Test.Relation where import Data.List.TypeLevel import Data.List.TypeLevel.Union (Union) import Data.Proxy import Data.Proxy.TH import Data.Relation import qualified Data.Relation.Backend as Backend import Data.Tagged.Functor import Data.Text (Text) import Data.Time import qualified Data.Vector.Generic as G import Data.Vinyl hiding (Dict) import Data.Vinyl.DictFun import Data.Vinyl.Functor (Lift (..)) import Data.Vinyl.Functor (Identity (..)) import Data.Vinyl.Lens (rcast) import Data.Vinyl.Named import Test.Framework.Providers.API (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, (@?=)) data TestData a = TestData (forall rs. RelOp a rs -> IO (Backend.Test rs)) (Rec (ListToRelation a) AllTemplates) newtype ToRelation a rs = ToRelation (([Rec (TaggedFunctor Identity) rs] -> a rs) -> RelOp a rs) newtype ListToRelation a rs = ListToRelation (Backend.Test rs -> a rs) relationSpecTests :: forall a. (forall rs. RelOp a rs -> IO (Backend.Test rs)) -> Rec (ListToRelation a) AllTemplates -> [Test] relationSpecTests a b = map ($ TestData a b) [ testCase "Restriction Test 1 (Value Equality)" . testRestriction1 , testCase "Restriction Test 2 (Disjunction)" . testRestriction2 , testCase "Projection 1 (Contains Duplicates)" . testProjection1 , testCase "Projection 2 (Empty Null)" . testProjection2 , testCase "Projection 3 (Nonempty Null)" . testProjection3 , testCase "Natural Join 1" . testNaturalJoin1 , testCase "Natural Join 2" . testNaturalJoin2 , testCase "Natural Join 3 (Pushable Disjunction Afterwards)" . testNaturalJoin3 , testCase "Natural Join 4 (Nonpushable Disjunction Afterwards)" . testNaturalJoin4 , testCase "Natural Join 5 (Nonpushable Disjunction Afterwards, Join on Top)" . testNaturalJoin5 , testCase "Union 1 (Identity)" . testUnion1 ] type Person = Sort '[ '("person_name", Text) , '("person_id" , Int) , '("person_age" , Int) ] type Employment = Sort '[ '("person_id", Int) , '("company_id", Int) , '("start_date", Day) ] type Company = Sort '[ '("company_name", Text) , '("company_id", Int) ] tag :: v -> TaggedFunctor Identity '(k,v) tag = TaggedFunctor . Identity -- note: Arnold has not worked anywhere personTemplate :: Lift (->) (ListToRelation a) (RelOp a) Person personTemplate = Lift $ \(ListToRelation f) -> RelTable (reifyDictFun (Proxy :: Proxy MinimalConstraints) (rpure Proxy)) implicitOrdList $ f $ Backend.Test $ map rcast ( [ tag 1 :& tag "Drew" :& tag 24 :& RNil , tag 2 :& tag "Alexa" :& tag 14 :& RNil , tag 3 :& tag "Jordan" :& tag 39 :& RNil , tag 4 :& tag "Mary" :& tag 66 :& RNil , tag 5 :& tag "Arnold" :& tag 14 :& RNil , tag 6 :& tag "Carlos" :& tag 20 :& RNil , tag 7 :& tag "Juan" :& tag 52 :& RNil ] :: [ Rec (TaggedFunctor Identity) '[ '("person_id", Int) , '("person_name", Text) , '("person_age", Int) ] ] ) -- note: no one has worked at company 1002 companyTemplate :: Lift (->) (ListToRelation a) (RelOp a) Company companyTemplate = Lift $ \(ListToRelation f) -> RelTable (reifyDictFun (Proxy :: Proxy MinimalConstraints) (rpure Proxy)) implicitOrdList $ f $ Backend.Test [ tag "Dunder Mifflin" :& tag 1001 :& RNil , tag "Spade and Archer" :& tag 1002 :& RNil , tag "Monks Diner" :& tag 1003 :& RNil , tag "Moes Tavern" :& tag 1004 :& RNil , tag "The Krusty Krab" :& tag 1005 :& RNil ] employmentTemplate :: Lift (->) (ListToRelation a) (RelOp a) Employment employmentTemplate = Lift $ \(ListToRelation f) -> RelTable (reifyDictFun (Proxy :: Proxy MinimalConstraints) (rpure Proxy)) implicitOrdList $ f $ Backend.Test $ map rcast $ ( [ tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& RNil , tag 2 :& tag 1001 :& tag (fromGregorian 2004 3 23) :& RNil , tag 2 :& tag 1005 :& tag (fromGregorian 2001 10 2) :& RNil , tag 3 :& tag 1004 :& tag (fromGregorian 1998 3 19) :& RNil , tag 4 :& tag 1004 :& tag (fromGregorian 2001 3 12) :& RNil , tag 4 :& tag 1001 :& tag (fromGregorian 2008 4 9) :& RNil , tag 4 :& tag 1005 :& tag (fromGregorian 2012 3 8) :& RNil , tag 6 :& tag 1003 :& tag (fromGregorian 2002 11 14) :& RNil , tag 7 :& tag 1001 :& tag (fromGregorian 2001 11 7) :& RNil ] :: [ Rec (TaggedFunctor Identity) '[ '("person_id", Int) , '("company_id", Int) , '("start_date", Day) ] ] ) allTemplates :: Rec (Lift (->) (ListToRelation a) (RelOp a)) '[Person,Company,Employment] allTemplates = personTemplate :& companyTemplate :& employmentTemplate :& RNil type AllTemplates = '[Person,Company,Employment] type Fields (super :: [(k,*)]) (sub :: [k]) = [Rec (TaggedFunctor Identity) (SublistLookupManyUnordered super sub)] testRestriction1 :: TestData a -> Assertion testRestriction1 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run (restrict (valEq [pr1|"person_name"|] ("Carlos" :: Text)) person) actual @?= expected where expected = Backend.Test $ map rcast ( [tag 6 :& tag "Carlos" :& tag 20 :& RNil] :: Fields Person '["person_id","person_name","person_age"] ) testRestriction2 :: TestData a -> Assertion testRestriction2 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ flip restrict person $ predOr (valEq [pr1|"person_name"|] ("Carlos" :: Text)) (valEq [pr1|"person_age"|] (14 :: Int)) actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 6 :& tag "Carlos" :& tag 20 :& RNil , tag 5 :& tag "Arnold" :& tag 14 :& RNil , tag 2 :& tag "Alexa" :& tag 14 :& RNil ] :: Fields Person '["person_id","person_name","person_age"] ) testNaturalJoin1 :: TestData a -> Assertion testNaturalJoin1 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ naturalJoin employment $ flip restrict person $ predOr (valEq [pr1|"person_id"|] (1 :: Int)) (valEq [pr1|"person_id"|] (2 :: Int)) actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& tag "Drew" :& tag 24 :& RNil , tag 2 :& tag 1001 :& tag (fromGregorian 2004 3 23) :& tag "Alexa" :& tag 14 :& RNil , tag 2 :& tag 1005 :& tag (fromGregorian 2001 10 2) :& tag "Alexa" :& tag 14 :& RNil ] :: Fields (Union Employment Person) '["person_id","company_id","start_date","person_name","person_age"] ) testNaturalJoin2 :: TestData a -> Assertion testNaturalJoin2 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ restrict ( predOr (valEq [pr1|"person_id"|] (1 :: Int)) (valEq [pr1|"person_id"|] (2 :: Int)) ) $ naturalJoin employment person actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& tag "Drew" :& tag 24 :& RNil , tag 2 :& tag 1001 :& tag (fromGregorian 2004 3 23) :& tag "Alexa" :& tag 14 :& RNil , tag 2 :& tag 1005 :& tag (fromGregorian 2001 10 2) :& tag "Alexa" :& tag 14 :& RNil ] :: Fields (Union Employment Person) '["person_id","company_id","start_date","person_name","person_age"] ) testNaturalJoin3 :: TestData a -> Assertion testNaturalJoin3 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ restrict ( predOr (valEq [pr1|"person_id"|] (1 :: Int)) (valEq [pr1|"company_id"|] (1003 :: Int)) ) $ naturalJoin employment person actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& tag "Drew" :& tag 24 :& RNil , tag 6 :& tag 1003 :& tag (fromGregorian 2002 11 14) :& tag "Carlos" :& tag 20 :& RNil ] :: Fields (Union Employment Person) '["person_id","company_id","start_date","person_name","person_age"] ) testNaturalJoin4 :: TestData a -> Assertion testNaturalJoin4 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ restrict ( predOr (valEq [pr1|"person_name"|] ("Drew" :: Text)) (valEq [pr1|"start_date"|] (fromGregorian 2002 11 14)) ) $ naturalJoin employment person actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& tag "Drew" :& tag 24 :& RNil , tag 6 :& tag 1003 :& tag (fromGregorian 2002 11 14) :& tag "Carlos" :& tag 20 :& RNil ] :: Fields (Union Employment Person) '["person_id","company_id","start_date","person_name","person_age"] ) testNaturalJoin5 :: TestData a -> Assertion testNaturalJoin5 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ naturalJoin company $ restrict ( predOr (valEq [pr1|"person_name"|] ("Drew" :: Text)) (valEq [pr1|"start_date"|] (fromGregorian 2002 11 14)) ) $ naturalJoin employment person actual @?= expected where expected = Backend.Test $ map rcast ( [ tag "Moes Tavern" :& tag 1 :& tag 1004 :& tag (fromGregorian 2002 8 13) :& tag "Drew" :& tag 24 :& RNil , tag "Monks Diner" :& tag 6 :& tag 1003 :& tag (fromGregorian 2002 11 14) :& tag "Carlos" :& tag 20 :& RNil ] :: Fields (Union Company (Union Employment Person)) '["company_name", "person_id","company_id","start_date","person_name","person_age"] ) testProjection1 :: TestData a -> Assertion testProjection1 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ project [pr|"person_age"|] person actual @?= expected where expected = Backend.Test $ map rcast ( [ tag 24 :& RNil, tag 14 :& RNil, tag 39 :& RNil , tag 66 :& RNil, tag 20 :& RNil, tag 52 :& RNil ] :: Fields Person '["person_age"] ) testProjection2 :: TestData a -> Assertion testProjection2 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ project (Proxy :: Proxy '[]) $ restrict (valEq [pr1|"person_age"|] (105 :: Int)) $ person actual @?= expected where expected = Backend.Test [] testProjection3 :: TestData a -> Assertion testProjection3 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ project (Proxy :: Proxy '[]) $ restrict (valEq [pr1|"person_age"|] (14 :: Int)) $ person actual @?= expected where expected = Backend.Test [RNil] testUnion1 :: TestData a -> Assertion testUnion1 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ union company company actual @?= expected where expected = Backend.Test [ tag "Dunder Mifflin" :& tag 1001 :& RNil , tag "Spade and Archer" :& tag 1002 :& RNil , tag "Monks Diner" :& tag 1003 :& RNil , tag "Moes Tavern" :& tag 1004 :& RNil , tag "The Krusty Krab" :& tag 1005 :& RNil ] testUnion2 :: TestData a -> Assertion testUnion2 (TestData run f) = case rapply allTemplates f of person :& company :& employment :& RNil -> do actual <- run $ union (rename [pr1|"person_id"|] [pr1|"foo"|] (project [pr|"person_id"|] person)) (rename [pr1|"company_id"|] [pr1|"foo"|] (project [pr|"company_id"|] company)) actual @?= expected where expected = Backend.Test [ tag 1001 :& RNil , tag 1002 :& RNil , tag 1003 :& RNil , tag 1004 :& RNil , tag 1005 :& RNil , tag 1 :& RNil , tag 2 :& RNil , tag 3 :& RNil , tag 4 :& RNil , tag 5 :& RNil , tag 6 :& RNil , tag 7 :& RNil ] -- type PersonBig = Sort -- '[ '("person_name", Text) -- , '("person_id" , Int) -- , '("person_age" , Int) -- , '("person_weight" , Int) -- , '("person_height" , Int) -- , '("person_allergies" , Int) -- , '("person_alive" , Bool) -- , '("person_is_weasel" , Bool) -- , '("person_strength" , Int) -- ] -- type Person = -- '[ '("person_name", Text) -- , '("person_id" , Int) -- , '("person_age" , Int) -- ] -- -- type Employment = -- '[ '("start_date", Day) -- , '("person_id", Int) -- , '("company_id", Int) -- ] -- -- type Company = -- '[ '("company_name", Text) -- , '("company_id", Int) -- ]
andrewthad/vinyl-vectors
test/Test/Relation.hs
bsd-3-clause
13,411
0
19
3,671
4,584
2,382
2,202
-1
-1
{-# LANGUAGE RelaxedPolyRec #-} -- Needed for vhdl_ty_either', for some reason... module CLasH.VHDL.VHDLTools where -- Standard modules import qualified Maybe import qualified Data.Either as Either import qualified Data.List as List import qualified Data.Char as Char import qualified Data.Map as Map import qualified Control.Monad as Monad import qualified Data.Accessor.Monad.Trans.StrictState as MonadState -- VHDL Imports import qualified Language.VHDL.AST as AST -- GHC API import qualified CoreSyn import qualified Name import qualified OccName import qualified Var import qualified Id import qualified TyCon import qualified Type import qualified DataCon import qualified CoreSubst import qualified Outputable import qualified Unique import qualified VarSet -- Local imports import CLasH.VHDL.VHDLTypes import CLasH.Translator.TranslatorTypes import CLasH.Utils.Core.CoreTools import CLasH.Utils import CLasH.Utils.Pretty import CLasH.VHDL.Constants ----------------------------------------------------------------------------- -- Functions to generate concurrent statements ----------------------------------------------------------------------------- -- Create an unconditional assignment statement mkUncondAssign :: Either CoreSyn.CoreBndr AST.VHDLName -- ^ The signal to assign to -> AST.Expr -- ^ The expression to assign -> AST.ConcSm -- ^ The resulting concurrent statement mkUncondAssign dst expr = mkAssign dst Nothing expr -- Create a conditional assignment statement mkCondAssign :: Either CoreSyn.CoreBndr AST.VHDLName -- ^ The signal to assign to -> AST.Expr -- ^ The condition -> AST.Expr -- ^ The value when true -> AST.Expr -- ^ The value when false -> AST.ConcSm -- ^ The resulting concurrent statement mkCondAssign dst cond true false = mkAssign dst (Just (cond, true)) false -- Create a conditional or unconditional assignment statement mkAssign :: Either CoreSyn.CoreBndr AST.VHDLName -- ^ The signal to assign to -> Maybe (AST.Expr , AST.Expr) -- ^ Optionally, the condition to test for -- and the value to assign when true. -> AST.Expr -- ^ The value to assign when false or no condition -> AST.ConcSm -- ^ The resulting concurrent statement mkAssign dst cond false_expr = let -- I'm not 100% how this assignment AST works, but this gets us what we -- want... whenelse = case cond of Just (cond_expr, true_expr) -> let true_wform = AST.Wform [AST.WformElem true_expr Nothing] in [AST.WhenElse true_wform cond_expr] Nothing -> [] false_wform = AST.Wform [AST.WformElem false_expr Nothing] dst_name = case dst of Left bndr -> AST.NSimple (varToVHDLId bndr) Right name -> name assign = dst_name AST.:<==: (AST.ConWforms whenelse false_wform Nothing) in AST.CSSASm assign mkAltsAssign :: Either CoreSyn.CoreBndr AST.VHDLName -- ^ The signal to assign to -> [AST.Expr] -- ^ The conditions -> [AST.Expr] -- ^ The expressions -> AST.ConcSm -- ^ The Alt assigns mkAltsAssign dst conds exprs | (length conds) /= ((length exprs) - 1) = error "\nVHDLTools.mkAltsAssign: conditions expression mismatch" | otherwise = let whenelses = zipWith mkWhenElse conds exprs false_wform = AST.Wform [AST.WformElem (last exprs) Nothing] dst_name = case dst of Left bndr -> AST.NSimple (varToVHDLId bndr) Right name -> name assign = dst_name AST.:<==: (AST.ConWforms whenelses false_wform Nothing) in AST.CSSASm assign where mkWhenElse :: AST.Expr -> AST.Expr -> AST.WhenElse mkWhenElse cond true_expr = let true_wform = AST.Wform [AST.WformElem true_expr Nothing] in AST.WhenElse true_wform cond mkAssocElems :: [AST.Expr] -- ^ The argument that are applied to function -> AST.VHDLName -- ^ The binder in which to store the result -> Entity -- ^ The entity to map against. -> [AST.AssocElem] -- ^ The resulting port maps mkAssocElems args res entity = arg_maps ++ (Maybe.maybeToList res_map_maybe) where arg_ports = ent_args entity res_port_maybe = ent_res entity -- Create an expression of res to map against the output port res_expr = vhdlNameToVHDLExpr res -- Map each of the input ports arg_maps = zipWith mkAssocElem (map fst arg_ports) args -- Map the output port, if present res_map_maybe = fmap (\port -> mkAssocElem (fst port) res_expr) res_port_maybe -- | Create an VHDL port -> signal association mkAssocElem :: AST.VHDLId -> AST.Expr -> AST.AssocElem mkAssocElem port signal = Just port AST.:=>: (AST.ADExpr signal) -- | Create an aggregate signal mkAggregateSignal :: [AST.Expr] -> AST.Expr mkAggregateSignal x = AST.Aggregate (map (\z -> AST.ElemAssoc Nothing z) x) mkComponentInst :: String -- ^ The portmap label -> AST.VHDLId -- ^ The entity name -> [Integer] -- ^ Clock domains -> [AST.AssocElem] -- ^ The port assignments -> AST.ConcSm mkComponentInst label entity_id clockDomains portassigns = AST.CSISm compins where -- We always have a clock port, so no need to map it anywhere but here clkPorts = map (\clkId -> mkAssocElem clkId (idToVHDLExpr clkId)) $ map (AST.unsafeVHDLBasicId . ("clock" ++) . show) clockDomains resetn_port = mkAssocElem resetId (idToVHDLExpr resetId) compins = AST.CompInsSm (mkVHDLExtId label) (AST.IUEntity (AST.NSimple entity_id)) (AST.PMapAspect (portassigns ++ clkPorts ++ [resetn_port])) ----------------------------------------------------------------------------- -- Functions to generate VHDL Exprs ----------------------------------------------------------------------------- varToVHDLExpr :: Var.Var -> TypeSession AST.Expr varToVHDLExpr var = case Id.isDataConWorkId_maybe var of -- This is a dataconstructor. Just dc -> dataconToVHDLExpr dc -- Not a datacon, just another signal. Nothing -> return $ AST.PrimName $ AST.NSimple $ varToVHDLId var -- Turn a VHDLName into an AST expression vhdlNameToVHDLExpr = AST.PrimName -- Turn a VHDL Id into an AST expression idToVHDLExpr = vhdlNameToVHDLExpr . AST.NSimple -- Turn a Core expression into an AST expression exprToVHDLExpr core = varToVHDLExpr (exprToVar core) -- Turn a String into a VHDL expr containing an id stringToVHDLExpr :: String -> AST.Expr stringToVHDLExpr = idToVHDLExpr . mkVHDLExtId -- Turn a alternative constructor into an AST expression. For -- dataconstructors, this is only the constructor itself, not any arguments it -- has. Should not be called with a DEFAULT constructor. altconToVHDLExpr :: CoreSyn.AltCon -> TypeSession AST.Expr altconToVHDLExpr (CoreSyn.DataAlt dc) = dataconToVHDLExpr dc altconToVHDLExpr (CoreSyn.LitAlt _) = error "\nVHDL.conToVHDLExpr: Literals not support in case alternatives yet" altconToVHDLExpr CoreSyn.DEFAULT = error "\nVHDL.conToVHDLExpr: DEFAULT alternative should not occur here!" -- Turn a datacon (without arguments!) into a VHDL expression. dataconToVHDLExpr :: DataCon.DataCon -> TypeSession AST.Expr dataconToVHDLExpr dc = do typemap <- MonadState.get tsTypes htype_either <- mkHTypeEither (DataCon.dataConRepType dc) case htype_either of -- No errors Right htype -> do let dcname = DataCon.dataConName dc case htype of (BuiltinType "Bit") -> return $ AST.PrimLit $ case Name.getOccString dcname of "High" -> "'1'"; "Low" -> "'0'" (BuiltinType "Bool") -> return $ AST.PrimLit $ case Name.getOccString dcname of "True" -> "true"; "False" -> "false" otherwise -> do let existing_ty = Monad.liftM (fmap fst) $ Map.lookup htype typemap case existing_ty of Just ty -> do let lit = AST.PrimLit $ show $ getConstructorIndex htype $ Name.getOccString dcname return lit Nothing -> error $ "\nVHDLTools.dataconToVHDLExpr: Trying to make value for non-representable DataCon: " ++ pprString dc -- Error when constructing htype Left err -> error err ----------------------------------------------------------------------------- -- Functions dealing with names, variables and ids ----------------------------------------------------------------------------- -- Creates a VHDL Id from a binder varToVHDLId :: CoreSyn.CoreBndr -> AST.VHDLId varToVHDLId var = mkVHDLExtId $ varToUniqString var -- Creates a VHDL Name from a binder varToVHDLName :: CoreSyn.CoreBndr -> AST.VHDLName varToVHDLName = AST.NSimple . varToVHDLId -- Extracts the binder name as a String varToString :: CoreSyn.CoreBndr -> String varToString = OccName.occNameString . Name.nameOccName . Var.varName varToUniqString :: CoreSyn.CoreBndr -> String varToUniqString var = (varToString var ++ varToStringUniq var) -- Get the string version a Var's unique varToStringUniq :: Var.Var -> String varToStringUniq = show . Unique.getKey . Var.varUnique -- Extracts the string version of the name nameToString :: Name.Name -> String nameToString name = name' where name'' = OccName.occNameString $ Name.nameOccName name name' = case (filter (`notElem` ",") name'') of "()" -> "Tuple" ++ (show $ (+1) $ length $ filter (`elem` ",") name'') n -> name'' -- Shortcut for Basic VHDL Ids. -- Can only contain alphanumerics and underscores. The supplied string must be -- a valid basic id, otherwise an error value is returned. This function is -- not meant to be passed identifiers from a source file, use mkVHDLExtId for -- that. mkVHDLBasicId :: String -> AST.VHDLId mkVHDLBasicId s = AST.unsafeVHDLBasicId $ (strip_multiscore . strip_leading . strip_invalid) s where -- Strip invalid characters. strip_invalid = filter (`elem` ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] ++ "_") -- Strip leading numbers and underscores strip_leading = dropWhile (`elem` ['0'..'9'] ++ "_") -- Strip multiple adjacent underscores strip_multiscore = concatMap (\cs -> case cs of ('_':_) -> "_" _ -> cs ) . List.group -- Shortcut for Extended VHDL Id's. These Id's can contain a lot more -- different characters than basic ids, but can never be used to refer to -- basic ids. -- Use extended Ids for any values that are taken from the source file. mkVHDLExtId :: String -> AST.VHDLId mkVHDLExtId s = (AST.unsafeVHDLBasicId . zEncodeString . strip_multiscore . strip_leading . strip_invalid) s where -- Allowed characters, taken from ForSyde's mkVHDLExtId allowed = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] ++ " \"#&'()*+,./:;<=>_|!$%@?[]^`{}~-" strip_invalid = filter (`elem` allowed) strip_leading = dropWhile (`elem` ['0'..'9'] ++ "_") strip_multiscore = concatMap (\cs -> case cs of ('_':_) -> "_" _ -> cs ) . List.group -- Create a record field selector that selects the given label from the record -- stored in the given binder. mkSelectedName :: AST.VHDLName -> AST.VHDLId -> AST.VHDLName mkSelectedName name label = AST.NSelected $ name AST.:.: (AST.SSimple label) -- Create an indexed name that selects a given element from a vector. mkIndexedName :: AST.VHDLName -> AST.Expr -> AST.VHDLName -- Special case for already indexed names. Just add an index mkIndexedName (AST.NIndexed (AST.IndexedName name indexes)) index = AST.NIndexed (AST.IndexedName name (indexes++[index])) -- General case for other names mkIndexedName name index = AST.NIndexed (AST.IndexedName name [index]) ----------------------------------------------------------------------------- -- Functions dealing with VHDL types ----------------------------------------------------------------------------- builtin_types :: TypeMap builtin_types = Map.fromList [ (BuiltinType "Bit", Just (std_logicTM, Nothing)), (BuiltinType "Bool", Just (booleanTM, Nothing)) -- TysWiredIn.boolTy ] -- Is the given type representable at runtime? isReprType :: Type.Type -> TypeSession Bool isReprType ty = do ty_either <- mkHTypeEither ty return $ case ty_either of Left _ -> False Right _ -> True -- | Turn a Core type into a HType, returning an error using the given -- error string if the type was not representable. mkHType :: (TypedThing t, Outputable.Outputable t) => String -> t -> TypeSession HType mkHType msg ty = do htype_either <- mkHTypeEither ty case htype_either of Right htype -> return htype Left err -> error $ msg ++ err -- | Turn a Core type into a HType. Returns either an error message if -- the type was not representable, or the HType generated. mkHTypeEither :: (TypedThing t, Outputable.Outputable t) => t -> TypeSession (Either String HType) mkHTypeEither tything = case getType tything of Nothing -> return $ Left $ "\nVHDLTools.mkHTypeEither: Typed thing without a type: " ++ pprString tything Just ty -> mkHTypeEither' ty mkHTypeEither' :: Type.Type -> TypeSession (Either String HType) mkHTypeEither' ty | ty_has_free_tyvars ty = return $ Left $ "\nVHDLTools.mkHTypeEither': Cannot create type: type has free type variables: " ++ pprString ty | isStateType ty = return $ Right StateType | otherwise = case Type.splitTyConApp_maybe ty of Just (tycon, args) -> do typemap <- MonadState.get tsTypes let name = Name.getOccString (TyCon.tyConName tycon) let builtinTyMaybe = Map.lookup (BuiltinType name) typemap case builtinTyMaybe of (Just x) -> return $ Right $ BuiltinType name Nothing -> case name of "Vector" -> do let el_ty = tfvec_elem ty elem_htype_either <- mkHTypeEither el_ty case elem_htype_either of -- Could create element type Right elem_htype -> do len <- tfp_to_int (tfvec_len_ty ty) return $ Right $ VecType len elem_htype -- Could not create element type Left err -> return $ Left $ "\nVHDLTools.mkHTypeEither': Can not construct vectortype for elementtype: " ++ pprString el_ty ++ err "Unsigned" -> do len <- tfp_to_int (sized_word_len_ty ty) return $ Right $ SizedWType len "Signed" -> do len <- tfp_to_int (sized_word_len_ty ty) return $ Right $ SizedIType len "Index" -> do bound <- tfp_to_int (ranged_word_bound_ty ty) -- Upperbound is exclusive, hence the -1 return $ Right $ RangedWType (bound - 1) "()" -> do return $ Right UnitType "Clock" -> do return $ Left "\nVHDLTools.mkHTypeEither': Clock type is not representable" "Comp" -> do return $ Left "\nVHDLTools.mkHTypeEither': Comp type is not representable" otherwise -> mkTyConHType tycon args Nothing -> return $ Left $ "\nVHDLTools.mkHTypeEither': Do not know what to do with type: " ++ pprString ty mkTyConHType :: TyCon.TyCon -> [Type.Type] -> TypeSession (Either String HType) mkTyConHType tycon args = case TyCon.tyConDataCons tycon of -- Not an algebraic type [] -> return $ Left $ "VHDLTools.mkTyConHType: Only custom algebraic types are supported: " ++ pprString tycon dcs -> do let arg_tyss = map DataCon.dataConRepArgTys dcs let enum_ty = EnumType name (map (nameToString . DataCon.dataConName) dcs) case (concat arg_tyss) of -- No arguments, this is just an enumeration type [] -> return (Right enum_ty) -- At least one argument, this becomes an aggregate type _ -> do -- Resolve any type arguments to this type let real_arg_tyss = map (map (CoreSubst.substTy subst)) arg_tyss -- Remove any state type fields let real_arg_tyss_nostate = map (filter (\x -> not (isStateType x))) real_arg_tyss elem_htyss_either <- mapM (mapM mkHTypeEither) real_arg_tyss_nostate let (errors, elem_htyss) = unzip (map Either.partitionEithers elem_htyss_either) case (all null errors) of True -> case (dcs,filter (\x -> x /= UnitType && x /= StateType) $ concat elem_htyss) of -- A single constructor with a single (non-state) field? ([dc], [elem_hty]) -> return $ Right elem_hty -- If we get here, then all of the argument types were state -- types (we check for enumeration types at the top). Not -- sure how to handle this, so error out for now. (_, []) -> return $ Right StateType --error $ "VHDLTools.mkTyConHType: ADT with only State elements (or something like that?) Dunno how to handle this yet. Tycon: " ++ pprString tycon ++ " Arguments: " ++ pprString args -- A full ADT (with multiple fields and one or multiple -- constructors). (_, elem_htys) -> do let (_, fieldss) = List.mapAccumL (List.mapAccumL label_field) labels elem_htyss -- Only put in an enumeration as part of the aggregation -- when there are multiple datacons let enum_ty_part = case dcs of [dc] -> Nothing _ -> Just ("constructor", EnumType (name ++ "Con") (map (nameToString . DataCon.dataConName) dcs)) -- Create the AggrType HType return $ Right $ AggrType name enum_ty_part fieldss -- There were errors in element types False -> return $ Left $ "\nVHDLTools.mkTyConHType: Can not construct type for: " ++ pprString tycon ++ "\n because no type can be construced for some of the arguments.\n" ++ (concat $ concat errors) where name = (nameToString (TyCon.tyConName tycon)) tyvars = TyCon.tyConTyVars tycon tyVarArgMap = zip tyvars args dataConTyVars = (concatMap VarSet.varSetElems) $ (map Type.tyVarsOfType) $ (concatMap DataCon.dataConRepArgTys) $ TyCon.tyConDataCons tycon dataConTyVarArg x = (x, snd $ head $ filter (equalTyVarName x) tyVarArgMap) equalTyVarName z (tv,_) = (Name.nameOccName $ Var.varName z) == (Name.nameOccName $ Var.varName tv) subst = CoreSubst.extendTvSubstList CoreSubst.emptySubst $ map dataConTyVarArg dataConTyVars -- Label a field by taking the first available label and returning -- the rest. label_field :: [String] -> HType -> ([String], (String, HType)) label_field (l:ls) htype = (ls, (l, htype)) labels = [ [a,b] | a <- ['A'..'Z'], b <- ['A'..'Z'] ] --map (:[]) ['A'..'Z'] vhdlTy :: (TypedThing t, Outputable.Outputable t) => String -> t -> TypeSession (Maybe AST.TypeMark) vhdlTy msg ty = do htype <- mkHType msg ty vhdlTyMaybe htype -- | Translate a Haskell type to a VHDL type, generating a new type if needed. -- Returns an error value, using the given message, when no type could be -- created. Returns Nothing when the type is valid, but empty. vhdlTyMaybe :: HType -> TypeSession (Maybe AST.TypeMark) vhdlTyMaybe htype = do typemap <- MonadState.get tsTypes -- If not a builtin type, try the custom types let existing_ty = Map.lookup htype typemap case existing_ty of -- Found a type, return it Just (Just (t, _)) -> return $ Just t Just (Nothing) -> return Nothing -- No type yet, try to construct it Nothing -> do newty <- (construct_vhdl_ty htype) MonadState.modify tsTypes (Map.insert htype newty) case newty of Just (ty_id, ty_def) -> do MonadState.modify tsTypeDecls (\typedefs -> typedefs ++ [mktydecl (ty_id, ty_def)]) return $ Just ty_id Nothing -> return Nothing -- Construct a new VHDL type for the given Haskell type. Returns an error -- message or the resulting typemark and typedef. construct_vhdl_ty :: HType -> TypeSession TypeMapRec -- State types don't generate VHDL construct_vhdl_ty htype = case htype of StateType -> return Nothing UnitType -> return Nothing (SizedWType w) -> mkUnsignedTy w (SizedIType i) -> mkSignedTy i (RangedWType u) -> mkNaturalTy 0 u (VecType n e) -> mkVectorTy (VecType n e) -- Create a custom type from this tycon otherwise -> mkTyconTy htype -- | Create VHDL type for a custom tycon mkTyconTy :: HType -> TypeSession TypeMapRec mkTyconTy htype = case htype of (AggrType name enum_field_maybe fieldss) -> do let (labelss, elem_htypess) = unzip (map unzip fieldss) elemTyMaybess <- mapM (mapM vhdlTyMaybe) elem_htypess let elem_tyss = map Maybe.catMaybes elemTyMaybess case concat elem_tyss of [] -> -- No non-empty fields return Nothing _ -> do let reclabelss = map (map mkVHDLBasicId) labelss let elemss = zipWith (zipWith AST.ElementDec) reclabelss elem_tyss let elem_names = concatMap (concatMap prettyShow) elem_tyss tyCnt <- MonadState.getAndModify tsTypeCnt (+1) let ty_id = mkVHDLExtId $ name ++ "_" ++ (show tyCnt)-- elem_names -- Find out if we need to add an extra field at the start of -- the record type containing the constructor (only needed -- when there's more than one constructor). enum_ty_maybe <- case enum_field_maybe of Nothing -> return Nothing Just (_, enum_htype) -> do enum_ty_maybe' <- vhdlTyMaybe enum_htype case enum_ty_maybe' of Nothing -> error $ "Couldn't translate enumeration type part of AggrType: " ++ show htype -- Note that the first Just means the type is -- translateable, while the second Just means that there -- is a enum_ty at all (e.g., there's multiple -- constructors). Just enum_ty -> return $ Just enum_ty -- Create an record field declaration for the first -- constructor field, if needed. enum_dec_maybe <- case enum_field_maybe of Nothing -> return $ Nothing Just (enum_name, enum_htype) -> do enum_vhdl_ty_maybe <- vhdlTyMaybe enum_htype let enum_vhdl_ty = Maybe.fromMaybe (error $ "\nVHDLTools.mkTyconTy: Enumeration field should not have empty type: " ++ show enum_htype) enum_vhdl_ty_maybe return $ Just $ AST.ElementDec (mkVHDLBasicId enum_name) enum_vhdl_ty -- Turn the maybe into a list, so we can prepend it. let enum_decs = Maybe.maybeToList enum_dec_maybe let enum_tys = Maybe.maybeToList enum_ty_maybe let ty_def = AST.TDR $ AST.RecordTypeDef (enum_decs ++ concat elemss) let aggrshow = case enum_field_maybe of Nothing -> mkTupleShow (enum_tys ++ concat elem_tyss) ty_id Just (conLbl, EnumType tycon dcs) -> mkAdtShow conLbl dcs (map (map fst) fieldss) ty_id MonadState.modify tsTypeFuns $ Map.insert (htype, showIdString) (showId, aggrshow) return $ Just (ty_id, Just $ Left ty_def) (EnumType tycon dcs) -> do let ty_id = mkVHDLExtId tycon let range = AST.SubTypeRange (AST.PrimLit "0") (AST.PrimLit $ show ((length dcs) - 1)) let ty_def = AST.TDI $ AST.IntegerTypeDef range let enumShow = mkEnumShow dcs ty_id MonadState.modify tsTypeFuns $ Map.insert (htype, showIdString) (showId, enumShow) return $ Just (ty_id, Just $ Left ty_def) otherwise -> error $ "\nVHDLTools.mkTyconTy: Called for HType that is neiter a AggrType or EnumType: " ++ show htype -- | Create a VHDL vector type mkVectorTy :: HType -- ^ The Haskell type of the Vector -> TypeSession TypeMapRec -- ^ An error message or The typemark created. mkVectorTy (VecType len elHType) = do typesMap <- MonadState.get tsTypes elTyTmMaybe <- vhdlTyMaybe elHType case elTyTmMaybe of (Just elTyTm) -> do let ty_id = mkVHDLExtId $ "vector_"++ (AST.fromVHDLId elTyTm) ++ "_0_to_" ++ (show (len - 1)) let range = AST.ConstraintIndex $ AST.IndexConstraint [AST.ToRange (AST.PrimLit "0") (AST.PrimLit $ show (len - 1))] let existing_uvec_ty = fmap (fmap fst) $ Map.lookup (UVecType elHType) typesMap case existing_uvec_ty of Just (Just t) -> do let ty_def = AST.SubtypeIn t (Just range) return (Just (ty_id, Just $ Right ty_def)) Nothing -> do let vec_id = mkVHDLExtId $ "vector_" ++ (AST.fromVHDLId elTyTm) let vec_def = AST.TDA $ AST.UnconsArrayDef [tfvec_indexTM] elTyTm MonadState.modify tsTypes (Map.insert (UVecType elHType) (Just (vec_id, (Just $ Left vec_def)))) MonadState.modify tsTypeDecls (\typedefs -> typedefs ++ [mktydecl (vec_id, (Just $ Left vec_def))]) let vecShowFuns = mkVectorShow elTyTm vec_id mapM_ (\(id, subprog) -> MonadState.modify tsTypeFuns $ Map.insert (UVecType elHType, id) ((mkVHDLExtId id), subprog)) vecShowFuns let ty_def = AST.SubtypeIn vec_id (Just range) return (Just (ty_id, Just $ Right ty_def)) -- Vector of empty elements becomes empty itself. Nothing -> return Nothing mkVectorTy htype = error $ "\nVHDLTools.mkVectorTy: Called for HType that is not a VecType: " ++ show htype mkNaturalTy :: Int -- ^ The minimum bound (> 0) -> Int -- ^ The maximum bound (> minimum bound) -> TypeSession TypeMapRec -- ^ An error message or The typemark created. mkNaturalTy min_bound max_bound = do let bitsize = floor (logBase 2 (fromInteger (toInteger max_bound))) let ty_id = mkVHDLExtId $ "natural_" ++ (show min_bound) ++ "_to_" ++ (show max_bound) let range = AST.ConstraintIndex $ AST.IndexConstraint [AST.DownRange (AST.PrimLit $ show bitsize) (AST.PrimLit $ show min_bound)] let ty_def = AST.SubtypeIn unsignedTM (Just range) return (Just (ty_id, Just $ Right ty_def)) mkUnsignedTy :: Int -- ^ Haskell type of the unsigned integer -> TypeSession TypeMapRec mkUnsignedTy size = do let ty_id = mkVHDLExtId $ "unsigned_" ++ show size let range = AST.ConstraintIndex $ AST.IndexConstraint [AST.DownRange (AST.PrimLit $ show (size - 1)) (AST.PrimLit "0")] let ty_def = AST.SubtypeIn unsignedTM (Just range) return (Just (ty_id, Just $ Right ty_def)) mkSignedTy :: Int -- ^ Haskell type of the signed integer -> TypeSession TypeMapRec mkSignedTy size = do let ty_id = mkVHDLExtId $ "signed_" ++ show size let range = AST.ConstraintIndex $ AST.IndexConstraint [AST.DownRange (AST.PrimLit $ show (size - 1)) (AST.PrimLit "0")] let ty_def = AST.SubtypeIn signedTM (Just range) return (Just (ty_id, Just $ Right ty_def)) -- Finds the field labels and types for aggregation HType. Returns an -- error on other types. getFields :: HType -- ^ The HType to get fields for -> Int -- ^ The constructor to get fields for (e.g., 0 -- for the first constructor, etc.) -> [(String, HType)] -- ^ A list of fields, with their name and type getFields htype dc_i = case htype of (AggrType name _ fieldss) | dc_i >= 0 && dc_i < length fieldss -> fieldss!!dc_i | otherwise -> error $ "VHDLTool.getFields: Invalid constructor index: " ++ (show dc_i) ++ ". No such constructor in HType: " ++ (show htype) _ -> error $ "VHDLTool.getFields: Can't get fields from non-aggregate HType: " ++ show htype -- Finds the field labels for an aggregation type, as VHDLIds. getFieldLabels :: HType -- ^ The HType to get field labels for -> Int -- ^ The constructor to get fields for (e.g., 0 -- for the first constructor, etc.) -> [AST.VHDLId] -- ^ The labels getFieldLabels htype dc_i = ((map mkVHDLBasicId) . (map fst)) (getFields htype dc_i) -- Finds the field label for the constructor field, if any. getConstructorFieldLabel :: HType -> Maybe AST.VHDLId getConstructorFieldLabel (AggrType _ (Just con) _) = Just $ mkVHDLBasicId (fst con) getConstructorFieldLabel (AggrType _ Nothing _) = Nothing getConstructorFieldLabel htype = error $ "Can't get constructor field label from non-aggregate HType: " ++ show htype getConstructorIndex :: HType -> String -> Int getConstructorIndex (EnumType etype cons) dc = case List.elemIndex dc cons of Just (index) -> index Nothing -> error $ "VHDLTools.getConstructorIndex: constructor: " ++ show dc ++ " is not part of type: " ++ show etype ++ ", which only has constructors: " ++ show cons getConstructorIndex htype _ = error $ "VHDLTools.getConstructorIndex: Can't get constructor index for non-Enum type: " ++ show htype mktydecl :: (AST.VHDLId, Maybe (Either AST.TypeDef AST.SubtypeIn)) -> Maybe AST.PackageDecItem mktydecl (_, Nothing) = Nothing mktydecl (ty_id, Just (Left ty_def)) = Just $ AST.PDITD $ AST.TypeDec ty_id ty_def mktydecl (ty_id, Just (Right ty_def)) = Just $ AST.PDISD $ AST.SubtypeDec ty_id ty_def mkTupleShow :: [AST.TypeMark] -- ^ type of each tuple element -> AST.TypeMark -- ^ type of the tuple -> AST.SubProgBody mkTupleShow elemTMs tupleTM = AST.SubProgBody showSpec [] [showExpr] where tupPar = AST.unsafeVHDLBasicId "tup" parenPar = AST.unsafeVHDLBasicId "paren" showSpec = AST.Function showId [AST.IfaceVarDec tupPar tupleTM, AST.IfaceVarDec parenPar booleanTM] stringTM showExpr = AST.ReturnSm (Just $ AST.PrimLit "'('" AST.:&: showMiddle AST.:&: AST.PrimLit "')'") where showMiddle = if null elemTMs then AST.PrimLit "''" else foldr1 (\e1 e2 -> e1 AST.:&: AST.PrimLit "','" AST.:&: e2) $ map ((genExprFCall2 showId) . (\x -> (selectedName tupPar x, AST.PrimLit "false"))) (take tupSize recordlabels) recordlabels = map (\c -> mkVHDLBasicId c) [ [a,b] | a <- ['A'..'Z'], b <- ['A'..'Z'] ] -- ['A'..'Z'] tupSize = length elemTMs selectedName par = (AST.PrimName . AST.NSelected . (AST.NSimple par AST.:.:) . tupVHDLSuffix) mkAdtShow :: String -> [String] -- Constructors -> [[String]] -- Fields for every constructor -> AST.TypeMark -> AST.SubProgBody mkAdtShow conLbl conIds elemIdss adtTM = AST.SubProgBody showSpec [] [showExpr] where adtPar = AST.unsafeVHDLBasicId "adt" parenPar = AST.unsafeVHDLBasicId "paren" showSpec = AST.Function showId [AST.IfaceVarDec adtPar adtTM, AST.IfaceVarDec parenPar booleanTM] stringTM showExpr = AST.CaseSm ((selectedName adtPar) (mkVHDLBasicId conLbl)) [AST.CaseSmAlt [AST.ChoiceE $ AST.PrimLit $ show x] ( if (null (elemIdss!!x)) then [AST.ReturnSm (Just $ ((genExprFCall2 showId) . (\x -> (selectedName adtPar x, AST.PrimLit "false")) $ mkVHDLBasicId conLbl) AST.:&: showFields x)] else [addParens (((genExprFCall2 showId) . (\x -> (selectedName adtPar x, AST.PrimLit "false")) $ mkVHDLBasicId conLbl) AST.:&: showFields x)] ) | x <- [0..(length conIds) -1]] showFields i = if (null (elemIdss!!i)) then AST.PrimLit "\"\"" else foldr1 (\e1 e2 -> e1 AST.:&: e2) $ map ((AST.PrimLit "' '" AST.:&:) . (genExprFCall2 showId) . (\x -> (selectedName adtPar x, AST.PrimLit "true"))) (map mkVHDLBasicId (elemIdss!!i)) selectedName par = (AST.PrimName . AST.NSelected . (AST.NSimple par AST.:.:) . tupVHDLSuffix) addParens :: AST.Expr -> AST.SeqSm addParens k = AST.IfSm (AST.PrimName (AST.NSimple parenPar)) [AST.ReturnSm (Just (AST.PrimLit "'('" AST.:&: k AST.:&: AST.PrimLit "')'" ))] [] (Just $ AST.Else [AST.ReturnSm (Just k)]) mkEnumShow :: [String] -> AST.TypeMark -> AST.SubProgBody mkEnumShow elemIds enumTM = AST.SubProgBody showSpec [] [showExpr] where enumPar = AST.unsafeVHDLBasicId "enum" parenPar = AST.unsafeVHDLBasicId "paren" showSpec = AST.Function showId [AST.IfaceVarDec enumPar enumTM, AST.IfaceVarDec parenPar booleanTM] stringTM showExpr = AST.CaseSm (AST.PrimName $ AST.NSimple enumPar) [AST.CaseSmAlt [AST.ChoiceE $ AST.PrimLit $ show x] [AST.ReturnSm (Just $ AST.PrimLit $ '"':(elemIds!!x)++['"'])] | x <- [0..(length elemIds) -1]] mkVectorShow :: AST.TypeMark -- ^ elemtype -> AST.TypeMark -- ^ vectype -> [(String,AST.SubProgBody)] mkVectorShow elemTM vectorTM = [ (headId, AST.SubProgBody headSpec [] [headExpr]) , (tailId, AST.SubProgBody tailSpec [AST.SPVD tailVar] [tailExpr, tailRet]) , (showIdString, AST.SubProgBody showSpec [AST.SPSB doShowDef] [showRet]) ] where vecPar = AST.unsafeVHDLBasicId "vec" resId = AST.unsafeVHDLBasicId "res" parenPar = AST.unsafeVHDLBasicId "paren" headSpec = AST.Function (mkVHDLExtId headId) [AST.IfaceVarDec vecPar vectorTM] elemTM -- return vec(0); headExpr = AST.ReturnSm (Just (AST.PrimName $ AST.NIndexed (AST.IndexedName (AST.NSimple vecPar) [AST.PrimLit "0"]))) vecSlice init last = AST.PrimName (AST.NSlice (AST.SliceName (AST.NSimple vecPar) (AST.ToRange init last))) tailSpec = AST.Function (mkVHDLExtId tailId) [AST.IfaceVarDec vecPar vectorTM] vectorTM -- variable res : fsvec_x (0 to vec'length-2); tailVar = AST.VarDec resId (AST.SubtypeIn vectorTM (Just $ AST.ConstraintIndex $ AST.IndexConstraint [AST.ToRange (AST.PrimLit "0") (AST.PrimName (AST.NAttribute $ AST.AttribName (AST.NSimple vecPar) (AST.NSimple $ mkVHDLBasicId lengthId) Nothing) AST.:-: (AST.PrimLit "2")) ])) Nothing -- res AST.:= vec(1 to vec'length-1) tailExpr = AST.NSimple resId AST.:= (vecSlice (AST.PrimLit "1") (AST.PrimName (AST.NAttribute $ AST.AttribName (AST.NSimple vecPar) (AST.NSimple $ mkVHDLBasicId lengthId) Nothing) AST.:-: AST.PrimLit "1")) tailRet = AST.ReturnSm (Just $ AST.PrimName $ AST.NSimple resId) showSpec = AST.Function showId [AST.IfaceVarDec vecPar vectorTM, AST.IfaceVarDec parenPar booleanTM] stringTM doShowId = AST.unsafeVHDLBasicId "doshow" doShowDef = AST.SubProgBody doShowSpec [] [doShowRet] where doShowSpec = AST.Function doShowId [AST.IfaceVarDec vecPar vectorTM] stringTM -- case vec'len is -- when 0 => return ""; -- when 1 => return head(vec); -- when others => return show(head(vec)) & ',' & -- doshow (tail(vec)); -- end case; doShowRet = AST.CaseSm (AST.PrimName (AST.NAttribute $ AST.AttribName (AST.NSimple vecPar) (AST.NSimple $ mkVHDLBasicId lengthId) Nothing)) [AST.CaseSmAlt [AST.ChoiceE $ AST.PrimLit "0"] [AST.ReturnSm (Just $ AST.PrimLit "\"\"")], AST.CaseSmAlt [AST.ChoiceE $ AST.PrimLit "1"] [AST.ReturnSm (Just $ genExprFCall2 showId (genExprFCall (mkVHDLExtId headId) (AST.PrimName $ AST.NSimple vecPar),AST.PrimLit "false") )], AST.CaseSmAlt [AST.Others] [AST.ReturnSm (Just $ genExprFCall2 showId (genExprFCall (mkVHDLExtId headId) (AST.PrimName $ AST.NSimple vecPar), AST.PrimLit "false") AST.:&: AST.PrimLit "','" AST.:&: genExprFCall doShowId (genExprFCall (mkVHDLExtId tailId) (AST.PrimName $ AST.NSimple vecPar)) ) ]] -- return '<' & doshow(vec) & '>'; showRet = AST.ReturnSm (Just $ AST.PrimLit "'<'" AST.:&: genExprFCall doShowId (AST.PrimName $ AST.NSimple vecPar) AST.:&: AST.PrimLit "'>'" ) mkBuiltInShow :: [AST.SubProgBody] mkBuiltInShow = [ AST.SubProgBody showBitSpec [] [showBitExpr] , AST.SubProgBody showBoolSpec [] [showBoolExpr] , AST.SubProgBody showSingedSpec [] [showSignedExpr] , AST.SubProgBody showUnsignedSpec [] [showUnsignedExpr] -- , AST.SubProgBody showNaturalSpec [] [showNaturalExpr] ] where bitPar = AST.unsafeVHDLBasicId "s" boolPar = AST.unsafeVHDLBasicId "b" signedPar = AST.unsafeVHDLBasicId "sint" unsignedPar = AST.unsafeVHDLBasicId "uint" parenPar = AST.unsafeVHDLBasicId "paren" -- naturalPar = AST.unsafeVHDLBasicId "nat" showBitSpec = AST.Function showId [AST.IfaceVarDec bitPar std_logicTM, AST.IfaceVarDec parenPar booleanTM] stringTM -- if s = '1' then return "'1'" else return "'0'" showBitExpr = AST.IfSm (AST.PrimName (AST.NSimple bitPar) AST.:=: AST.PrimLit "'1'") [AST.ReturnSm (Just $ AST.PrimLit "\"High\"")] [] (Just $ AST.Else [AST.ReturnSm (Just $ AST.PrimLit "\"Low\"")]) showBoolSpec = AST.Function showId [AST.IfaceVarDec boolPar booleanTM, AST.IfaceVarDec parenPar booleanTM] stringTM -- if b then return "True" else return "False" showBoolExpr = AST.IfSm (AST.PrimName (AST.NSimple boolPar)) [AST.ReturnSm (Just $ AST.PrimLit "\"True\"")] [] (Just $ AST.Else [AST.ReturnSm (Just $ AST.PrimLit "\"False\"")]) showSingedSpec = AST.Function showId [AST.IfaceVarDec signedPar signedTM, AST.IfaceVarDec parenPar booleanTM] stringTM showSignedExpr = AST.ReturnSm (Just $ AST.PrimName $ AST.NAttribute $ AST.AttribName (AST.NSimple integerId) (AST.NIndexed $ AST.IndexedName (AST.NSimple imageId) [signToInt]) Nothing ) where signToInt = genExprFCall (mkVHDLBasicId toIntegerId) (AST.PrimName $ AST.NSimple signedPar) showUnsignedSpec = AST.Function showId [AST.IfaceVarDec unsignedPar unsignedTM, AST.IfaceVarDec parenPar booleanTM] stringTM showUnsignedExpr = AST.ReturnSm (Just $ AST.PrimName $ AST.NAttribute $ AST.AttribName (AST.NSimple integerId) (AST.NIndexed $ AST.IndexedName (AST.NSimple imageId) [unsignToInt]) Nothing ) where unsignToInt = genExprFCall (mkVHDLBasicId toIntegerId) (AST.PrimName $ AST.NSimple unsignedPar) -- showNaturalSpec = AST.Function showId [AST.IfaceVarDec naturalPar naturalTM] stringTM -- showNaturalExpr = AST.ReturnSm (Just $ -- AST.PrimName $ AST.NAttribute $ AST.AttribName (AST.NSimple integerId) -- (AST.NIndexed $ AST.IndexedName (AST.NSimple imageId) [AST.PrimName $ AST.NSimple $ naturalPar]) Nothing ) genExprFCall :: AST.VHDLId -> AST.Expr -> AST.Expr genExprFCall fName args = AST.PrimFCall $ AST.FCall (AST.NSimple fName) $ map (\exp -> Nothing AST.:=>: AST.ADExpr exp) [args] genExprFCall2 :: AST.VHDLId -> (AST.Expr, AST.Expr) -> AST.Expr genExprFCall2 fName (arg1, arg2) = AST.PrimFCall $ AST.FCall (AST.NSimple fName) $ map (\exp -> Nothing AST.:=>: AST.ADExpr exp) [arg1,arg2] genExprPCall2 :: AST.VHDLId -> AST.Expr -> AST.Expr -> AST.SeqSm genExprPCall2 entid arg1 arg2 = AST.ProcCall (AST.NSimple entid) $ map (\exp -> Nothing AST.:=>: AST.ADExpr exp) [arg1,arg2] mkSigDec :: CoreSyn.CoreBndr -> TranslatorSession (Maybe AST.SigDec) mkSigDec bndr = do let error_msg = "\nVHDL.mkSigDec: Can not make signal declaration for type: \n" ++ pprString bndr type_mark_maybe <- MonadState.lift tsType $ vhdlTy error_msg (Var.varType bndr) case type_mark_maybe of Just type_mark -> return $ Just (AST.SigDec (varToVHDLId bndr) type_mark Nothing) Nothing -> return Nothing -- | Does the given thing have a non-empty type? hasNonEmptyType :: (TypedThing t, Outputable.Outputable t) => String -> t -> TranslatorSession Bool hasNonEmptyType errMsg thing = MonadState.lift tsType $ isJustM (vhdlTy (errMsg ++ "\nVHDLTools.hasNonEmptyType: Non representable type?") thing)
christiaanb/clash
clash/CLasH/VHDL/VHDLTools.hs
bsd-3-clause
41,122
444
28
10,597
9,978
5,220
4,758
618
11
{-# LANGUAGE OverloadedStrings #-} module Path where import Graphics.Blank import Wiki -- (578,200) main :: IO () main = blankCanvas 3000 $ \ context -> do send context $ do beginPath() moveTo(100, 20) -- line 1 lineTo(200, 160) -- quadratic curve quadraticCurveTo(230, 200, 250, 120) -- bezier curve bezierCurveTo(290, -40, 300, 200, 400, 150) -- line 2 lineTo(500, 90) lineWidth 5 strokeStyle "blue" stroke() wiki $ snapShot context "images/Path.png" wiki $ close context
ku-fpg/blank-canvas
wiki-suite/Path.hs
bsd-3-clause
595
0
14
194
185
94
91
18
1
{-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} module Network.SSH.PrivateKeyFormat where import Network.SSH.Messages import Network.SSH.Protocol import Network.SSH.PubKey import Control.Applicative import Control.Monad import Crypto.Error import Crypto.Number.Basic (numBytes) import qualified Crypto.PubKey.DSA as DSA import qualified Crypto.PubKey.ECC.ECDSA as ECDSA import qualified Crypto.PubKey.ECC.Types as ECC import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Crypto.PubKey.RSA as RSA import Data.ByteArray.Encoding import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 import Data.Foldable (traverse_) import Data.Serialize import Data.Word #if !MIN_VERSION_base(4,8,0) import Data.Traversable (traverse) #endif data PrivateKeyFile = PrivateKeyFile { pkfCipherName :: S.ByteString , pkfKdfName :: S.ByteString , pkfKdfOptions :: S.ByteString , pkfPublicKeys :: [S.ByteString] , pkfPrivateKeys :: S.ByteString } deriving Show data PrivateKeyList = PrivateKeyList { checkInt :: Word32 , privateKeys :: [(SshPubCert,PrivateKey,S.ByteString)] } authMagic :: S.ByteString authMagic = "openssh-key-v1\0" armorHeader :: S.ByteString armorHeader = "-----BEGIN OPENSSH PRIVATE KEY-----" armorFooter :: S.ByteString armorFooter = "-----END OPENSSH PRIVATE KEY-----" getPrivateKeyFile :: Get PrivateKeyFile getPrivateKeyFile = do authMagic1 <- label "magic" $ getByteString $ S.length authMagic unless (authMagic == authMagic1) (fail "bad magic value") pkfCipherName <- label "cipherName" getString pkfKdfName <- label "kdfName" getString pkfKdfOptions <- label "ldfOptions" getString n <- label "number of keys" getWord32be pkfPublicKeys <- label "public keys" (replicateM (fromIntegral n) getString) pkfPrivateKeys <- label "private key blob" getString return PrivateKeyFile{..} putPrivateKeyFile :: Putter PrivateKeyFile putPrivateKeyFile pkf = do putByteString authMagic putString "none" putString "none" putString "" putWord32be (fromIntegral (length (pkfPublicKeys pkf))) traverse_ putString (pkfPublicKeys pkf) putString (pkfPrivateKeys pkf) getPrivateKeyList :: Int -> Get PrivateKeyList getPrivateKeyList n = do checkInt <- getWord32be checkInt1 <- getWord32be unless (checkInt == checkInt1) (fail "incorrect decryption password") privateKeys <- replicateM n getPrivateKey return PrivateKeyList{..} -- putPrivateKeyList :: Putter PrivateKeyList -- putPrivateKeyList pkl = -- do putWord32be (checkInt pkl) -- traverse_ putPrivateKey (privateKeys pkl) getPrivateKey :: Get (SshPubCert, PrivateKey, S.ByteString) getPrivateKey = label "private key" $ do ty <- label "private key type" getString (pub,priv) <- case ty of "ssh-rsa" -> label "rsa key" $ do public_n <- getMpInt public_e <- getMpInt private_d <- getMpInt private_qinv <- getMpInt private_p <- getMpInt private_q <- getMpInt let private_dP = private_d `mod` (private_p-1) private_dQ = private_d `mod` (private_q-1) public_size = numBytes public_n private_pub = RSA.PublicKey{..} return (SshPubRsa public_e public_n, PrivateRsa RSA.PrivateKey{..}) "ssh-dss" -> label "dsa key" $ do params_p <- getMpInt params_q <- getMpInt params_g <- getMpInt public_y <- getMpInt private_x <- getMpInt let private_params = DSA.Params{..} return ( SshPubDss params_p params_q params_g public_y , PrivateDsa DSA.PrivateKey{..} ) "ssh-ed25519" -> label "ed25519 key" $ do pub1 <- getString priv <- getString let (sec,pub2) = S.splitAt 32 priv guard (pub1 == pub2) case liftA2 (,) (Ed25519.secretKey sec) (Ed25519.publicKey pub1) of CryptoPassed (c,d) -> let c' = Ed25519SecretKey c d' = Ed25519PublicKey d in return (SshPubEd25519 pub1, PrivateEd25519 c' d') _ -> fail "bad ed25519 key" "ecdsa-sha2-nistp256" -> label "ecdsap256 key" $ do (pub, priv) <- getEccPubPriv "nistp256" (ECC.getCurveByName ECC.SEC_p256r1) return (SshPubEcDsaP256 pub, PrivateEcdsa256 priv) "ecdsa-sha2-nistp384" -> label "ecdsap384 key" $ do (pub, priv) <- getEccPubPriv "nistp384" (ECC.getCurveByName ECC.SEC_p384r1) return (SshPubEcDsaP384 pub, PrivateEcdsa384 priv) "ecdsa-sha2-nistp521" -> label "ecdsap521 key" $ do (pub, priv) <- getEccPubPriv "nistp521" (ECC.getCurveByName ECC.SEC_p521r1) return (SshPubEcDsaP521 pub, PrivateEcdsa521 priv) _ -> fail "Unknown key type" comment <- getString return (pub, priv, comment) getEccPubPriv :: S.ByteString -> ECC.Curve -> Get (S.ByteString, ECDSA.PrivateKey) getEccPubPriv name curve = do name1 <- getString guard (name == name1) pubBytes <- getString priv <- getMpInt case pointFromBytes curve pubBytes of CryptoFailed e -> fail (show e) CryptoPassed _ -> return () return (pubBytes, ECDSA.PrivateKey curve priv) removePadding :: S.ByteString -> Either String S.ByteString removePadding xs | S.null xs = Left "Attempted to remove padding from empty bytestring" | S.length xs < padLen = Left "Padding incorrect" | otherwise = Right dat where padLen = fromIntegral (S.last xs) :: Int dat = S.take (S.length xs - padLen) xs addPadding :: S.ByteString -> S.ByteString addPadding xs = xs `S.append` pad where padLen = 16 - S.length xs `mod` 16 pad = S.pack [1..fromIntegral padLen] parsePrivateKeyFile :: S.ByteString -> Either String PrivateKeyFile parsePrivateKeyFile xs = do step1 <- case dropWhile (/= armorHeader) (S8.lines xs) of [] -> Left "Missing private key header" _:ys -> Right ys step2 <- case break (== armorFooter) step1 of (_,[]) -> Left "Missing private key footer" (ys,_:_) -> Right ys step3 <- convertFromBase Base64 (S8.concat step2) runGet getPrivateKeyFile step3 extractPK :: PrivateKeyFile -> Either String [(SshPubCert,PrivateKey,S.ByteString)] extractPK pkf = case pkfKdfName pkf of "none" -> go (pkfPrivateKeys pkf) name -> Left ("unknown kdf: " ++ S8.unpack name) where go privBytes = privateKeys <$> runGet (getPrivateKeyList (length (pkfPublicKeys pkf))) privBytes -- | Merge multiple new OpenSSH private key files into a single one -- The file format as defined supports this though openssh doesn't -- appear to actually handle it correctly. --mergePrivateKeys :: [S.ByteString] -> Either String S.ByteString --mergePrivateKeys xs = -- do pkfs1 <- traverse parsePrivateKeyFile xs -- pkf <- case pkfs1 of -- [] -> Left "No private key files" -- pkf:_ -> return pkf -- -- priv:privs <- traverse (removePadding . pkfPrivateKeys) pkfs1 -- -- let discardCheckBytes = S.drop 8 -- pkf' = pkf { pkfPublicKeys = pkfPublicKeys =<< pkfs1 -- , pkfPrivateKeys = addPadding -- $ priv -- `S.append` S.concat (map discardCheckBytes privs) -- } -- -- lineLen = 70 -- to match openssh's behavior -- dataLine = convertToBase Base64 (runPut (putPrivateKeyFile pkf')) -- -- return $ S8.unlines $ [ armorHeader ] -- ++ chunks lineLen dataLine -- ++ [ armorFooter ] -- -- chunks :: Int -> S.ByteString -> [S.ByteString] chunks n xs | S.length xs <= n = [xs] | otherwise = a : chunks n b where (a,b) = S.splitAt n xs
glguy/ssh-hans
src/Network/SSH/PrivateKeyFormat.hs
bsd-3-clause
8,188
14
24
2,233
1,907
982
925
158
8
{-# LANGUAGE MultiParamTypeClasses #-} module Data.Interface.Package ( ModuleEnv , singleModuleInterface , PackageInterface(..) , showPackageId , PackageDiff(..) , PackageId , PackageIdentifier , parsePackageId ) where import Data.Function ( on ) import Data.Map ( Map ) import qualified Data.Map as Map import Data.Set ( Set ) import Distribution.Text import Distribution.Package as C --import qualified Distribution.License as C import Data.Interface.Change import Data.Interface.Module type ModuleEnv = Map ModuleName ModuleInterface singleModuleInterface :: ModuleInterface -> ModuleEnv singleModuleInterface iface = Map.singleton (moduleName iface) iface data PackageInterface = PackageInterface { pkgId :: PackageId --, pkgInfo :: PackageInfo , pkgExposedModules :: ModuleEnv , pkgHiddenModules :: Set ModuleName } deriving (Show) {- PackageInterface notes: - PackageInterface provides a view of an installed package after conditionals and dependency ranges have been resolved. - Hidden modules must be included in the map because their exports can be visible in exposed modules. -} showPackageId :: PackageId -> String showPackageId = display data PackageDiff = PackageDiff { diffPkgId :: Change C.PackageId --, diffPkgInfo :: Change PackageInfo , diffPkgExposedModules :: MapDiff ModuleName ModuleDiff ModuleInterface , diffPkgHiddenModules :: SetDiff ModuleName } instance ToChange PackageInterface PackageDiff where toChange d = PackageInterface <$> toChange (diffPkgId d) <*> toChange (diffPkgExposedModules d) <*> toChange (diffPkgHiddenModules d) instance Diff PackageInterface PackageDiff where diff a b = PackageDiff { diffPkgId = on diff pkgId a b --, diffPkgInfo = on diff pkgInfo a b , diffPkgExposedModules = on diff pkgExposedModules a b , diffPkgHiddenModules = on diff pkgHiddenModules a b } parsePackageId :: String -> Maybe PackageId parsePackageId = simpleParse {-- TODO - Should we include this information? data PackageInfo = PackageInfo { pkgLicense :: C.License , pkgCopyright :: String , pkgMaintainer :: String -- ... etc } deriving (Show, Eq) -}
cdxr/haskell-interface
src/Data/Interface/Package.hs
bsd-3-clause
2,372
0
11
574
385
220
165
46
1
{-# LANGUAGE CPP, FlexibleContexts #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-} module Llvm.Hir.Target (module Llvm.Hir.DataLayoutMetrics ,module Llvm.Hir.Target ,module Llvm.Hir.Target.Linux_Gnu) where import Llvm.Hir.DataLayoutMetrics import Llvm.Hir.Target.Linux_Gnu data Target = forall a. (Show a, DataLayoutMetrics a) => Target a instance Show Target where show (Target x) = show x targets :: [Target] targets = [Target I386_Pc_Linux_Gnu, Target X86_64_Pc_Linux_Gnu, Target X86_64_Unknown_Linux_Gnu ,Target I386_Unknown_FreeBsd_10_2 ]
mlite/hLLVM
src/Llvm/Hir/Target.hs
bsd-3-clause
759
0
8
127
141
85
56
-1
-1
{-# LANGUAGE FlexibleContexts #-} module BoardParser (pBoard) where import Data.Array (array) import Data.Char (digitToInt) import Text.Parsec import Board (Board, board) import Position (Position, rows) import Tile (Tile, tile) pTile :: Stream s m Char => ParsecT s u m Tile pTile = fmap (tile . digitToInt) $ oneOf "0123" pRow :: Stream s m Char => [Position] -> ParsecT s u m [(Position, Tile)] pRow ps = sequence $ map (flip fmap pTile . (,)) ps pBoard :: Stream s m Char => ParsecT s u m Board pBoard = fmap (board . array (minBound, maxBound) . concat) $ sequence $ flip map rows $ \ps -> do as <- pRow ps _ <- newline return as
jameshales/voltorb-flip
src/BoardParser.hs
bsd-3-clause
648
0
13
130
286
151
135
17
1
{-# OPTIONS -XTypeSynonymInstances -XFlexibleInstances #-} module TexTables.Types where import TexTables.Utils import Text.Printf type TableTitles = Line type TableEquations = [EqnEx] type EqnEx = (Head -> Int -> Case) type Eqn = (Head -> Case) -- |Root of a table data Head = Table TableEquations TableTitles [Line] deriving Show -- |Line of a table data Line = Line [Case] | HLine deriving Show -- |Row of a table data Case = String String | Int Int | Float Float | Skip | Equation Eqn instance Show Eqn where show _ = "<eqn>" instance Show EqnEx where show _ = "<eqn>" instance Show Case where show (String a) = a show (Float a) = printf "%.2f" (a :: Float) show (Skip) = "" show (Int a) = show a show (Equation a) = show a -- |Applies the equations applyEqn (Table [] titles lines) = Table [] titles lines applyEqn (head@(Table (eq:eqns) titles lines)) = applyEqn (Table eqns titles [Line (p ++ [eq head i]) | i <- [0..length lines - 1], let p = case lines !! i of (Line n) -> n]) class (Evaluable a) where evalTeX :: a -> String evaluate :: a -> a instance Evaluable Head where evalTeX s@(Table equations titles lines) = let f_lines = case applyEqn s of (Table _ _ v) -> v in "\\begin{tabular}" +! evalTeX HLine +! evalTeX titles +! evalTeX HLine +! (joinMap (evalTeX) "\n" f_lines) +! evalTeX HLine +! "\\end{tabular}" where fmt :: [String] -> String fmt l = (join " & " l) ++ " \\\\" evaluate = applyEqn instance Evaluable Line where evalTeX (Line l) = joinMap (evalTeX) " & " l ++ " \\\\" evalTeX HLine = "\\hline" evaluate a = a instance Evaluable Case where evalTeX a = show a evaluate a = a
davbaumgartner/tex-tables
TexTables/Types.hs
bsd-3-clause
1,766
62
17
462
667
366
301
56
1
{-# LANGUAGE OverloadedStrings #-} module Rho.Magnet where import Data.Bits import qualified Data.ByteString as B import qualified Data.ByteString.Builder as BB import qualified Data.ByteString.Char8 as BC import qualified Data.ByteString.Lazy as LB import Data.Char (digitToInt, intToDigit) import Data.Maybe (mapMaybe) import Data.Monoid import Data.Word (Word8) import Network.URI (unEscapeString) import Rho.InfoHash import Rho.Tracker import Rho.Utils data Magnet = Magnet { mHash :: InfoHash , mTrackers :: [Tracker] , mDisplayName :: Maybe String } deriving (Show, Eq) parseMagnet :: B.ByteString -> Either String Magnet parseMagnet bs = do let args = parseArgs bs case lookup "xt" args of Nothing -> Left "Can't parse xt argument from magnet URL." Just xt -> let dn = lookup "dn" args tr = mapMaybe (either (const Nothing) Just . parseTrackerBS) . -- TODO: redundant bytestring packing/unpacking here map (BC.pack . unEscapeString . BC.unpack . snd) . filter ((==) "tr" . fst) $ args xt' = parseInfoHash (B.drop 9 xt) -- drop "urn:btih:" prefix and parse in Right $ Magnet xt' tr ((unEscapeString . BC.unpack) `fmap` dn) printMagnet :: Magnet -> B.ByteString printMagnet (Magnet (InfoHash bs) trs dn) = LB.toStrict . BB.toLazyByteString . mconcat $ [ BB.byteString "magnet:?xt=urn:btih:" , BB.byteString $ bsEncodeBytes bs , maybe mempty (BB.byteString . ("&dn=" <>) . BC.pack) dn ] <> map (BB.byteString . ("&tr=" <>) . printTracker) trs where bsEncodeBytes :: B.ByteString -> B.ByteString bsEncodeBytes = BC.pack . encodeBytes . B.unpack encodeBytes :: [Word8] -> String encodeBytes [] = "" encodeBytes (w : ws) = let ln = w .&. 0x0F hn = w `shiftR` 4 in intToDigit (fromIntegral hn) : intToDigit (fromIntegral ln) : encodeBytes ws printTracker :: Tracker -> B.ByteString printTracker (HTTPTracker uri) = BC.pack $ show uri printTracker (UDPTracker host port) = "udp://" <> host <> ":" <> BC.pack (show port) -- | Parse character representation of info hash(e.g. hex notation, two -- chars per byte) to byte representation. -- -- TODO: We probably need some error handling here. Make sure info_hash is -- 20-byte long. parseInfoHash :: B.ByteString -> InfoHash parseInfoHash = InfoHash . LB.toStrict . BB.toLazyByteString . go where go bs = case BC.uncons bs of Nothing -> mempty Just (c1, rest) -> case BC.uncons rest of Nothing -> error "error while parsing info hash" Just (c2, rest') -> BB.word8 (fromIntegral $ (digitToInt c1 `shiftL` 4) + digitToInt c2) <> go rest' -- | Parse `a=b` pairs from a query string. Parsing starts from the -- position of '?' in the string. -- -- >>> parseArgs (BC.pack "dummy?a=b&c=d") -- [("a","b"),("c","d")] -- -- >>> parseArgs (BC.pack "?") -- [] -- parseArgs :: B.ByteString -> [(B.ByteString, B.ByteString)] parseArgs = -- split to (key, val) pairs map (BC.tail <.> BC.span (/= '=')) -- split to key=val strings . BC.split '&' -- drop the prefix . BC.tail . BC.dropWhile (/= '?')
osa1/rho-torrent
src/Rho/Magnet.hs
bsd-3-clause
3,377
51
25
903
900
496
404
63
3
{-# LANGUAGE OverloadedStrings #-} module Hackerrank where import Control.Lens import Data.Map as Map import Data.Aeson import Data.Aeson.Lens (key) import Network.Wreq type Resp = Response (Map String Value) languagesUrl = "http://api.hackerrank.com/checker/languages.json" languages :: IO (Maybe Value) languages = do r <- asValue =<< get languagesUrl return (r ^? responseBody . key "languages")
arknave/utpc
src/Hackerrank.hs
bsd-3-clause
410
0
10
62
113
62
51
-1
-1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} ------------------------------------------------------------------------------ -- | This module defines our application's state type and an alias for its -- handler monad. module Application where ------------------------------------------------------------------------------ import Control.Lens import Control.Monad.State (get) import Snap import Snap.Snaplet.Heist import Snap.Snaplet.Auth import Snap.Snaplet.Session import Snap.Snaplet.SqliteSimple ------------------------------------------------------------------------------ data App = App { _heist :: Snaplet (Heist App) , _sess :: Snaplet SessionManager , _db :: Snaplet Sqlite , _auth :: Snaplet (AuthManager App) } makeLenses ''App instance HasHeist App where heistLens = subSnaplet heist instance HasSqlite (Handler b App) where getSqliteState = with db get ------------------------------------------------------------------------------ type AppHandler = Handler App App
nurpax/snaplet-sqlite-simple
example/src/Application.hs
bsd-3-clause
1,116
0
11
156
175
102
73
23
0
{-# LANGUAGE CPP, ScopedTypeVariables #-} -- | -- Module : Language.Haskell.Packages -- Copyright : (c) Thiago Arrais 2009 -- License : BSD3 -- -- Maintainer : jpmoresmau@gmail.com -- Stability : beta -- Portability : portable -- -- Packages from packages databases (global, user). -- see <http://stackoverflow.com/questions/1522104/how-to-programmatically-retrieve-ghc-package-information> module Language.Haskell.Packages ( getPkgInfos ) where import Prelude hiding (Maybe) import qualified System.Info import qualified Config import Data.List import Data.Maybe import Control.Monad import Distribution.InstalledPackageInfo #if MIN_VERSION_Cabal(1,22,0) import Distribution.ModuleName #else import Control.Applicative import Distribution.Text #endif import System.Directory import System.Environment (getEnv) import System.FilePath import System.IO import qualified Control.Exception as Exc import GHC.Paths import qualified Control.Exception as Exception -- This was borrowed from the ghc-pkg source: #if MIN_VERSION_Cabal(1,22,0) type InstalledPackageInfoString = InstalledPackageInfo_ ModuleName #else type InstalledPackageInfoString = InstalledPackageInfo_ String #endif -- | Types of cabal package databases data CabalPkgDBType = PkgDirectory FilePath | PkgFile FilePath type InstalledPackagesList = [(FilePath, [InstalledPackageInfo])] -- | Fetch the installed package info from the global and user package.conf -- databases, mimicking the functionality of ghc-pkg. getPkgInfos :: Maybe FilePath -- ^ the path to the cabal sandbox if any -> IO InstalledPackagesList getPkgInfos msandbox= let -- | Test for package database's presence in a given directory -- NB: The directory is returned for later scanning by listConf, -- which parses the actual package database file(s). lookForPackageDBIn :: FilePath -> IO (Maybe InstalledPackagesList) lookForPackageDBIn dir = let path_dir = dir </> "package.conf.d" path_file = dir </> "package.conf" path_sd_dir= dir </> ("packages-" ++ ghcVersion ++ ".conf") -- cabal sandboxes path_ghc_dir= dir </> currentArch ++ '-' : currentOS ++ "-ghc-" ++ ghcVersion ++ "-packages.conf.d" in join . listToMaybe . filter isJust <$> mapM readIfExists [PkgDirectory path_dir,PkgFile path_file,PkgDirectory path_sd_dir,PkgDirectory path_ghc_dir] currentArch :: String currentArch = System.Info.arch currentOS :: String currentOS = System.Info.os ghcVersion :: String ghcVersion = Config.cProjectVersion in do -- Get the global package configuration database: global_conf <- do r <- lookForPackageDBIn getLibDir case r of Nothing -> ioError $ userError ("Can't find package database in " ++ getLibDir) Just pkgs -> return pkgs -- Get the user package configuration database user_conf <- case msandbox of Nothing -> do e_appdir <- Exc.try $ getAppUserDataDirectory "ghc" case e_appdir of Left (_::Exc.IOException) -> return [] Right appdir -> do let subdir = currentArch ++ '-' : currentOS ++ '-' : ghcVersion dir = appdir </> subdir r <- lookForPackageDBIn dir case r of Nothing -> return [] Just pkgs -> return pkgs Just sd->do r <- lookForPackageDBIn sd case r of Nothing -> return [] Just pkgs -> return pkgs -- Process GHC_PACKAGE_PATH, if present: e_pkg_path <- Exc.try (getEnv "GHC_PACKAGE_PATH") env_stack <- case e_pkg_path of Left (_::Exc.IOException) -> return [] Right path -> do pkgs <- mapM readContents [PkgDirectory pkg | pkg <- splitSearchPath path] return $ concat pkgs -- Send back the combined installed packages list: return (env_stack ++ user_conf ++ global_conf) readIfExists :: CabalPkgDBType -> IO (Maybe InstalledPackagesList) readIfExists p@(PkgDirectory path_dir) = do exists_dir <- doesDirectoryExist path_dir if exists_dir then Just <$> readContents p else return Nothing readIfExists p@(PkgFile path_dir) = do exists_dir <- doesFileExist path_dir if exists_dir then Just <$> readContents p else return Nothing -- | Read the contents of the given directory, searching for ".conf" files, and parse the -- package contents. Returns a singleton list (directory, [installed packages]) readContents :: CabalPkgDBType -- ^ The package database -> IO InstalledPackagesList -- ^ Installed packages readContents pkgdb = let -- | List package configuration files that might live in the given directory listConf :: FilePath -> IO [FilePath] listConf dbdir = do conf_dir_exists <- doesDirectoryExist dbdir if conf_dir_exists then do files <- getDirectoryContents dbdir return [ dbdir </> file | file <- files, ".conf" `isSuffixOf` file] else return [] -- | Read a file, ensuring that UTF8 coding is used for GCH >= 6.12 readUTF8File :: FilePath -> IO String readUTF8File file = do h <- openFile file ReadMode #if __GLASGOW_HASKELL__ >= 612 -- fix the encoding to UTF-8 hSetEncoding h utf8 Exc.catch (hGetContents h) (\(err :: Exc.IOException)->do print err hClose h h' <- openFile file ReadMode hSetEncoding h' localeEncoding hGetContents h' ) #else hGetContents h #endif -- | This function was lifted directly from ghc-pkg. Its sole purpose is -- parsing an input package description string and producing an -- InstalledPackageInfo structure. convertPackageInfoIn :: InstalledPackageInfoString -> InstalledPackageInfo convertPackageInfoIn (pkgconf@(InstalledPackageInfo { exposedModules = e, hiddenModules = h })) = pkgconf{ exposedModules = convert e, hiddenModules = convert h } #if MIN_VERSION_Cabal(1,22,0) where convert = map id #else where convert = mapMaybe simpleParse #endif -- | Utility function that just flips the arguments to Control.Exception.catch catchError :: IO a -> (String -> IO a) -> IO a catchError io handler = io `Exception.catch` handler' where handler' (Exception.ErrorCall err) = handler err -- | Slightly different approach in Cabal 1.8 series, with the package.conf.d -- directories, where individual package configuration files are association -- pairs. pkgInfoReader :: FilePath -> IO [InstalledPackageInfo] pkgInfoReader f = Exc.catch ( do pkgStr <- readUTF8File f let pkgInfo = parseInstalledPackageInfo pkgStr case pkgInfo of ParseOk _ info -> return [info] ParseFailed err -> do print err return [emptyInstalledPackageInfo] ) (\(_::Exc.IOException)->return [emptyInstalledPackageInfo]) in case pkgdb of (PkgDirectory pkgdbDir) -> do confs <- listConf pkgdbDir pkgInfoList <- mapM pkgInfoReader confs return [(pkgdbDir, join pkgInfoList)] (PkgFile dbFile) -> do pkgStr <- readUTF8File dbFile let pkgs = map convertPackageInfoIn $ readObj "InstalledPackageInfo" pkgStr pkgInfoList <- Exception.evaluate pkgs `catchError` (\e-> ioError $ userError $ "parsing " ++ dbFile ++ ": " ++ show e) return [(takeDirectory dbFile, pkgInfoList)] -- GHC.Path sets libdir for us... getLibDir :: String getLibDir = libdir -- | read an object from a String, with a given error message if it fails readObj :: Read a=> String -> String -> a readObj msg s=let parses=reads s -- :: [(a,String)] in if null parses then error (msg ++ ": " ++ s ++ ".") else fst $ head parses
JPMoresmau/ghc-pkg-lib
src/Language/Haskell/Packages.hs
bsd-3-clause
8,543
1
27
2,573
1,639
837
802
143
7
module Zero.ResetToken.Internal ( ResetSecret(..) , ResetBundle(..) ) where import Servant.API.Experimental.Auth (AuthProtect) import Data.Text (Text) import Zero.Swagger ------------------------------------------------------------------------------ data ResetSecret = ResetSecretSealed Text | ResetSecret { as_kB :: Text, as_verifier :: Text } deriving (Show, Generic) instance ToJSON ResetSecret where toJSON (ResetSecretSealed t) = object [ "sealed" .= t ] toJSON (ResetSecret kB v) = object [ "kB" .= kB, "verifier" .= v ] instance ToSchema ResetSecret where declareNamedSchema proxy = genericDeclareNamedSchema defaultSchemaOptions proxy & mapped.schema.description ?~ "The client encrypted payload delivered upon account reset." & mapped.schema.example ?~ toJSON (ResetSecret "<srp_kB>" "<verifier>") -- | Verify and decrypt the sealed reset secret. instance MimeUnrender PlainText ResetSecret -- where -- mimeUnrender resetText = resetText ------------------------------------------------------------------------------ data ResetBundle = ResetBundle { bundle :: Text } deriving (Generic, Show) instance ToJSON ResetBundle instance FromJSON ResetBundle instance ToSchema ResetBundle where declareNamedSchema proxy = genericDeclareNamedSchema defaultSchemaOptions proxy & mapped.schema.description ?~ "An encrypted tokenpair bundle containing a reset token and a keyfetch token." & mapped.schema.example ?~ toJSON (ResetBundle "<hex64-bundle>")
et4te/zero
src-shared/Zero/ResetToken/Internal.hs
bsd-3-clause
1,552
0
11
263
316
171
145
-1
-1
{-# LANGUAGE ForeignFunctionInterface #-} module Network.EXTLS where import Data.Bits import qualified Data.ByteString as B import Data.ByteString.Internal (createAndTrim) import Data.ByteString.Unsafe (unsafeUseAsCStringLen) import Data.Functor import Data.Word import Foreign.C.Types import Foreign.Ptr -- a socket as a pair of callbacks -- use, e.g., Network.Socket.sendBuf and Network.Socket.recvBuf data Endpoint = Endpoint { send :: Ptr Word8 -> Int -> IO Int, recv :: Ptr Word8 -> Int -> IO Int } -- abstract types from openssl newtype BIO = BIO (Ptr BIO) newtype SSL = SSL (Ptr SSL) newtype SSL_CTX = SSL_CTX (Ptr SSL_CTX) data TLS = TLS { tlsRawSSL :: Ptr SSL, tlsEndpoint :: Endpoint } -- note -- SSL_read will do an incomplete read if you ask for more than is left in the current TLS record foreign import ccall safe "SSL_read" raw_read :: Ptr SSL -> Ptr a -> CInt -> IO CInt -- note -- SSL_write will never do a partial write when blocking unless specifically asked to with SSL_set_mode or SSL_CTX_set_mode foreign import ccall safe "SSL_write" raw_write :: Ptr SSL -> Ptr a -> CInt -> IO CInt foreign import ccall safe "SSL_set_bio" set_bio :: Ptr SSL -> Ptr BIO -> Ptr BIO -> IO () foreign import ccall safe "SSL_set_mode" set_mode :: Ptr SSL -> CLong -> IO CLong -- note -- may return NULL on error foreign import ccall safe "SSL_new" raw_new :: Ptr SSL_CTX -> IO (Ptr SSL) mode_auto_retry :: CLong mode_auto_retry = 0x4 mode_release_buffers :: CLong mode_release_buffers = 0x10 -- XXX add error checking for nonpositive length and SSL error returns wrap_buf :: (Ptr SSL -> Ptr a -> CInt -> IO CInt) -> (TLS -> Ptr a -> Int -> IO Int) wrap_buf f (TLS { tlsRawSSL = ssl }) buf n = fromIntegral <$> f ssl buf (fromIntegral n) read_buf :: TLS -> Ptr a -> Int -> IO Int read_buf = wrap_buf raw_read write_buf :: TLS -> Ptr a -> Int -> IO Int write_buf = wrap_buf raw_write read :: TLS -> Int -> IO B.ByteString read h n = createAndTrim n (flip (read_buf h) n) write :: TLS -> B.ByteString -> IO Int write h s = unsafeUseAsCStringLen s (uncurry (write_buf h))
xplat/extls
src/Network/EXTLS.hs
bsd-3-clause
2,179
0
11
469
624
328
296
39
1
module Main where import Ivory.Tower.Config import Ivory.OS.FreeRTOS.Tower.STM32 import LDrive.Platforms import LDrive.Tests.Calib (app) main :: IO () main = compileTowerSTM32FreeRTOS testplatform_stm32 p $ app (stm32config_clock . testplatform_stm32) testplatform_adcs testplatform_enc testplatform_spi testplatform_pwm testplatform_uart testplatform_leds where p topts = getConfig topts testPlatformParser
sorki/odrive
test/CalibTest.hs
bsd-3-clause
496
0
8
127
95
53
42
15
1
module Sols.Prob13 ( solution ) where parseNumbers :: String -> [Integer] parseNumbers txt = map read txtLines where txtLines = lines txt solution = do nums <- readFile "./data/prob13.txt" print $ take 10 $ show $ sum $ parseNumbers nums
authentik8/haskell-euler
src/Sols/Prob13.hs
bsd-3-clause
259
0
11
61
86
43
43
8
1
{-# LANGUAGE TemplateHaskell, StandaloneDeriving, GeneralizedNewtypeDeriving #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Micro.PkgMsgpack where import Micro.Types import qualified Data.MessagePack as MsgPack import Data.MessagePack (deriveObject) import Data.ByteString.Lazy as BS serialise :: Tree -> BS.ByteString serialise = MsgPack.pack deserialise :: BS.ByteString -> Tree deserialise = MsgPack.unpack deriveObject False ''Tree
arianvp/binary-serialise-cbor
bench/Micro/PkgMsgpack.hs
bsd-3-clause
444
0
6
52
85
51
34
12
1
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} module Aws.DynamoDB.Commands.GetItem ( GetItem(..) , GetItemResponse(..) , getItem ) where import Aws.Core import Aws.DynamoDB.Core import Control.Applicative import Data.Aeson import qualified Data.Map as Map import qualified Data.Text as T import qualified Test.QuickCheck as QC data GetItem = GetItem { giKey :: Key -- Yes , giTableName :: TableName -- Yes , giAttributesToGet :: Maybe [T.Text] -- No , giConsistentRead :: Maybe Bool -- No , giReturnConsumedCapacity :: Maybe Bool -- No } deriving (Show, Eq) instance ToJSON GetItem where toJSON (GetItem a b c d e) = object[ "Key" .= a , "TableName" .= b , "AttributesToGet" .= c , "ConsistentRead" .= d , "ReturnConsumedCapacity" .= e ] instance FromJSON GetItem where parseJSON (Object v) = GetItem <$> v .: "Key" <*> v .: "TableName" <*> v .:? "AttributesToGet" <*> v .:? "ConsistentRead" <*> v .:? "ReturnConsumedCapacity" instance QC.Arbitrary GetItem where arbitrary = GetItem <$> QC.arbitrary <*> QC.arbitrary <*> QC.arbitrary <*> QC.arbitrary <*> QC.arbitrary data GetItemResult = GetItemResult{ consumedCapacity :: Maybe ConsumedCapacity, item :: Maybe Item } deriving(Show, Eq) instance FromJSON GetItemResult where parseJSON (Object v) = GetItemResult <$> v .:? "ConsumedCapacity" <*> v .:? "Item" data GetItemResponse = GetItemResponse { girConsumedCapacity :: Maybe ConsumedCapacity, girItem :: Maybe Item }deriving (Show,Eq) instance ToJSON GetItemResponse where toJSON (GetItemResponse a b) = object[ "ConsumedCapacity" .= a , "Item" .= b ] instance FromJSON GetItemResponse where parseJSON (Object v) = GetItemResponse <$> v .:? "ConsumedCapacity" <*> v .:? "Item" instance QC.Arbitrary GetItemResponse where arbitrary = GetItemResponse <$> QC.arbitrary <*> QC.arbitrary --getItem :: GetItem getItem a b c d e = GetItem a b c d e instance SignQuery GetItem where type ServiceConfiguration GetItem = DdbConfiguration signQuery a@GetItem {..} = ddbSignQuery DdbQuery { ddbqMethod = Post , ddbqRequest = "" , ddbqQuery = [] , ddbqCommand = "DynamoDB_20120810.GetItem" , ddbqBody = Just $ toJSON $ a } instance ResponseConsumer GetItem GetItemResponse where type ResponseMetadata GetItemResponse = DdbMetadata responseConsumer _ mref = ddbResponseConsumer mref $ \rsp -> cnv <$> jsonConsumer rsp where cnv (GetItemResult a b) = GetItemResponse a b instance Transaction GetItem GetItemResponse instance AsMemoryResponse GetItemResponse where type MemoryResponse GetItemResponse = GetItemResponse loadToMemory = return
ywata/dynamodb
Aws/DynamoDB/Commands/GetItem.hs
bsd-3-clause
3,509
0
15
1,255
749
411
338
87
1
{-# LANGUAGE OverloadedStrings #-} module System.Nemesis.Utils where import Prelude hiding ((-)) infixr 0 - {-# INLINE (-) #-} (-) :: (a -> b) -> a -> b f - x = f x ljust :: Int -> a -> [a] -> [a] ljust n x xs | n < length xs = xs | otherwise = take n (xs ++ replicate n x)
nfjinjing/nemesis
src/System/Nemesis/Utils.hs
bsd-3-clause
286
0
9
75
135
73
62
11
1
---------------------------------------------------------------------------- -- | -- Module : ESpecificExportListWithChildrenPlusSome -- Copyright : (c) Sergey Vinokurov 2018 -- License : BSD3-style (see LICENSE) -- Maintainer : serg.foo@gmail.com ---------------------------------------------------------------------------- {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} module ESpecificExportListWithChildrenPlusSome ( FooE(FooE1, FrobEP) , BarE(BarE1, unBarE, BazEP) , quuxE , QuuxE(QuuxE2, QuuxEP) , commonFunc , derivedE ) where data FooE = FooE1 { fooE1 :: Int , fooE2 :: !Double } newtype BarE = BarE1 { unBarE :: [Double] } pattern BazEP :: Double -> Double -> BarE pattern BazEP x y = BarE1 [x, y] quuxE :: Int -> Int quuxE x = x pattern FrobEP :: Int -> FooE pattern FrobEP x = FooE1 { fooE1 = x, fooE2 = 0 } data QuuxE = QuuxE1 Int | QuuxE2 pattern QuuxEP :: Int -> QuuxE pattern QuuxEP n = QuuxE1 n commonFunc :: Double -> Double commonFunc x = x + x * x $([d| derivedE :: Int -> Int derivedE x = x |])
sergv/tags-server
test-data/0012resolve_reexport_import_cycles/ESpecificExportListWithChildrenPlusSome.hs
bsd-3-clause
1,133
0
9
236
256
154
102
41
1
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts, RankNTypes, GADTs, PatternSynonyms, TypeFamilies, DeriveGeneric, GeneralizedNewtypeDeriving #-} module QueryArrow.SQL.SQL where import QueryArrow.Syntax.Term hiding (Subst, subst) import QueryArrow.Syntax.Type import QueryArrow.Semantics.TypeChecker import QueryArrow.Syntax.Serialize import QueryArrow.Syntax.Utils import QueryArrow.DB.GenericDatabase import QueryArrow.ListUtils import QueryArrow.Utils import QueryArrow.Semantics.Domain import Prelude hiding (lookup) import Data.List (intercalate, (\\),union, nub) import Data.Either (rights, lefts, isRight) import Control.Monad.Trans.State.Strict (StateT, get, put, evalStateT, runStateT, modify) import Control.Monad.Trans.Class (lift) import Data.Map.Strict (empty, Map, insert, member, singleton, lookup, fromList, keys, toList, elems, size) import Data.Monoid ((<>)) import Data.Maybe import Control.Monad import Debug.Trace import qualified Data.Text as T import Data.Set (toAscList, Set) import qualified Data.Set as Set import Algebra.Lattice import Algebra.Lattice.Dropped import Algebra.Lattice.Ordered import System.Log.Logger import GHC.Generics import Data.Yaml import Control.Comonad.Cofree type Col = String type TableName = String data Table = OneTable {tableName::TableName, sqlVar:: SQLVar} deriving (Eq, Ord, Show, Generic) data FromTable = SimpleTable TableName SQLVar | QueryTable SQL SQLVar deriving (Eq, Ord, Show) data SQLMapping = SQLMapping { sqlMappingPredName :: String, sqlMappingTable :: TableName, sqlMappingCols :: [Col]} deriving (Eq, Ord, Show, Generic) type SQLTableList = [FromTable] -- merge two lists of sql tables -- only support onetable mergeTables :: SQLTableList -> SQLTableList -> SQLTableList mergeTables = union newtype SQLVar = SQLVar {unSQLVar :: String} deriving (Eq, Ord, Show, FromJSON, ToJSON) instance FromJSON Table instance ToJSON Table instance FromJSON SQLQualifiedCol instance ToJSON SQLQualifiedCol instance FromJSON SQLMapping instance ToJSON SQLMapping data SQLQualifiedCol = SQLQualifiedCol { tableVar :: SQLVar, colName :: Col} deriving (Eq, Ord, Show, Generic) type SQLOper = String data SQLExpr = SQLColExpr2 String | SQLColExpr SQLQualifiedCol | SQLIntConstExpr Integer | SQLStringConstExpr T.Text | SQLNullExpr | SQLParamExpr String | SQLExprText String | SQLListExpr [SQLExpr] | SQLCastExpr SQLExpr String | SQLArrayExpr SQLExpr SQLExpr | SQLInfixFuncExpr String SQLExpr SQLExpr | SQLFuncExpr String [SQLExpr] | SQLFuncExpr2 String SQLExpr deriving (Eq, Ord, Show) isSQLConstExpr :: SQLExpr -> Bool isSQLConstExpr (SQLIntConstExpr _ ) = True isSQLConstExpr (SQLStringConstExpr _ ) = True isSQLConstExpr (SQLParamExpr _) = True isSQLConstExpr _ = False data SQLCond = SQLCompCond SQLOper SQLExpr SQLExpr | SQLAndCond SQLCond SQLCond | SQLOrCond SQLCond SQLCond | SQLExistsCond SQL | SQLNotCond SQLCond | SQLTrueCond | SQLFalseCond deriving (Eq, Ord, Show) getSQLConjuncts :: SQLCond -> [SQLCond] getSQLConjuncts (SQLAndCond conj1 conj2) = getSQLConjuncts conj1 ++ getSQLConjuncts conj2 getSQLConjuncts cond = [cond] getSQLDisjuncts :: SQLCond -> [SQLCond] getSQLDisjuncts (SQLOrCond conj1 conj2) = getSQLConjuncts conj1 ++ getSQLConjuncts conj2 getSQLDisjuncts cond = [cond] (.&&.) :: SQLCond -> SQLCond -> SQLCond a .&&. b = SQLAndCond a b (.||.) :: SQLCond -> SQLCond -> SQLCond a .||. b = SQLOrCond a b (.=.) :: SQLExpr -> SQLExpr -> SQLCond a .=. b = SQLCompCond "=" a b (.<>.) :: SQLExpr -> SQLExpr -> SQLCond a .<>. b = SQLCompCond "<>" a b data SQLOrder = ASC | DESC deriving (Eq, Ord, Show) type IntLattice = Dropped (Ordered Int) pattern IntLattice a = Drop (Ordered a) data SQL = SQLQuery {sqlSelect :: [ (Var, SQLExpr) ], sqlFrom :: SQLTableList, sqlWhere :: SQLCond, sqlOrderBy :: [(SQLExpr, SQLOrder)], sqlLimit :: IntLattice, sqlDistinct :: Bool, sqlGroupBy :: [SQLExpr]} deriving (Eq, Ord, Show) data SQLStmt = SQLQueryStmt SQL | SQLInsertStmt TableName [(Col,SQLExpr)] [FromTable] SQLCond | SQLUpdateStmt (TableName, SQLVar) [(Col,SQLExpr)] SQLCond | SQLDeleteStmt (TableName, SQLVar) SQLCond deriving (Eq, Ord, Show) instance Serialize FromTable where serialize (SimpleTable tablename var) = tablename ++ " " ++ serialize var serialize (QueryTable qu var) = "(" ++ serialize qu ++ ") " ++ serialize var instance Serialize SQLCond where serialize a = show2 a [] instance Serialize SQLExpr where serialize a = show2 a [] class Show2 a where show2 :: a -> [SQLVar] -> String instance Show2 SQLCond where show2 (SQLCompCond op lhs rhs) sqlvar = show2 lhs sqlvar ++ " " ++ op ++ " " ++ show2 rhs sqlvar show2 (SQLAndCond a b) sqlvar = "(" ++ show2 a sqlvar ++ " AND " ++ show2 b sqlvar ++ ")" show2 (SQLOrCond a b) sqlvar = "(" ++ show2 a sqlvar ++ " OR " ++ show2 b sqlvar ++ ")" show2 (SQLTrueCond) _ = "True" show2 (SQLFalseCond) _ = "False" show2 (SQLExistsCond sql) sqlvar = "(EXISTS (" ++ show2 sql sqlvar ++ "))" show2 (SQLNotCond sql) sqlvar = "(NOT (" ++ show2 sql sqlvar ++ "))" instance Show2 SQLExpr where show2 (SQLColExpr2 col) sqlvar = col show2 (SQLColExpr (SQLQualifiedCol var col)) sqlvar = if var `elem` sqlvar then col else serialize var ++ "." ++ col show2 (SQLIntConstExpr i) _ = show i show2 (SQLStringConstExpr s) _ = "'" ++ sqlStringEscape (T.unpack s) ++ "'" show2 (SQLParamExpr _) _ = "?" show2 (SQLCastExpr arg ty) sqlvar = "cast(" ++ show2 arg sqlvar ++ " as " ++ ty ++ ")" show2 (SQLArrayExpr arr inx) sqlvar = "(" ++ show2 arr sqlvar ++ ")[" ++ show2 inx sqlvar ++ "]" show2 (SQLInfixFuncExpr fn a b) sqlvar = "(" ++ show2 a sqlvar ++ fn ++ show2 b sqlvar ++ ")" show2 (SQLListExpr args) sqlvar = "ARRAY[" ++ intercalate "," (map (\a -> show2 a sqlvar) args) ++ "]" show2 (SQLFuncExpr fn args) sqlvar = fn ++ "(" ++ intercalate "," (map (\a -> show2 a sqlvar) args) ++ ")" show2 (SQLFuncExpr2 fn arg) sqlvar = fn ++ " " ++ show2 arg sqlvar show2 (SQLExprText s) _ = s show2 SQLNullExpr _ = "NULL" instance Serialize SQLVar where serialize (SQLVar var) = var showWhereCond2 :: SQLCond -> [SQLVar] -> String showWhereCond2 cond sqlvar = case cond of SQLTrueCond -> "" _ -> " WHERE " ++ show2 cond sqlvar instance Show2 SQL where show2 (SQLQuery cols tables conds orderby limit distinct groupby) sqlvar = "SELECT " ++ (if distinct then "DISTINCT " else "") ++ (if null cols then "1" else intercalate "," (map (\(var, expr) -> show2 expr sqlvar ++ " AS \"" ++ serialize var ++ "\"") cols)) ++ (if null tables then "" else " FROM " ++ intercalate "," (map serialize tables)) ++ (showWhereCond2 conds sqlvar) ++ (if null groupby then "" else " GROUP BY " ++ intercalate "," (map serialize groupby)) ++ (if null orderby then "" else " ORDER BY " ++ intercalate "," (map (\(expr, ord) -> serialize expr ++ " " ++ case ord of ASC -> "ASC" DESC -> "DESC") orderby)) ++ (case limit of Top -> "" IntLattice n -> " LIMIT " ++ show n) instance Serialize SQL where serialize sql = show2 sql [] instance Serialize SQLStmt where serialize (SQLQueryStmt sql) = serialize sql serialize (SQLInsertStmt tname colsexprs tables cond) = let (cols, exprs) = unzip colsexprs in "INSERT INTO " ++ tname ++ " (" ++ intercalate "," cols ++ ")" ++ if null tables && all isSQLConstExpr exprs then " VALUES (" ++ intercalate "," (map (\a -> show2 a []) exprs)++ ")" else " SELECT " ++ intercalate "," (map (\a -> show2 a []) exprs) ++ (if null tables then "" else " FROM " ++ intercalate "," (map serialize tables)) ++ showWhereCond2 cond [] serialize (SQLDeleteStmt (tname, sqlvar) cond) = "DELETE FROM " ++ tname ++ showWhereCond2 cond [sqlvar] serialize (SQLUpdateStmt (tname, sqlvar) colsexprs cond) = "UPDATE " ++ tname ++ " SET " ++ intercalate "," (map (\(col, expr)-> col ++ " = " ++ show2 expr [sqlvar]) colsexprs) ++ showWhereCond2 cond [sqlvar] sqlStringEscape :: String -> String sqlStringEscape = concatMap f where f '\'' = "''" f a = [a] sqlPatternEscape :: String -> String sqlPatternEscape = concatMap f where f '\\' = "\\\\" f a = [a] class Subst a where subst :: Map SQLVar SQLVar -> a -> a class SQLFreeVars a where fv :: a -> [(TableName, SQLVar)] instance Subst SQLVar where subst varmap var = case lookup var varmap of Nothing -> var Just var2 -> var2 instance Subst FromTable where subst varmap (SimpleTable tablename var) = SimpleTable tablename (subst varmap var) subst varmap (QueryTable qu var) = QueryTable (subst varmap qu) (subst varmap var) instance SQLFreeVars Table where fv (OneTable tablename var) = [(tablename, var)] instance Subst SQLCond where subst varmap (SQLCompCond op a b) = SQLCompCond op (subst varmap a) (subst varmap b) subst varmap (SQLAndCond a b) = SQLAndCond (subst varmap a) (subst varmap b) subst varmap (SQLOrCond a b) = SQLOrCond (subst varmap a) (subst varmap b) subst _ (SQLTrueCond ) = SQLTrueCond subst _ (SQLFalseCond ) = SQLFalseCond subst varmap (SQLNotCond a) = SQLNotCond (subst varmap a) -- subst varmap (SQLExistsCond sql) = SQLExistsCond (subst varmap sql) subst _ _ = error "unsupported SQLCond" instance Subst SQLExpr where subst varmap (SQLColExpr qcol) = SQLColExpr (subst varmap qcol) subst _ a = a instance Subst SQLQualifiedCol where subst varmap (SQLQualifiedCol var col) = SQLQualifiedCol (subst varmap var) col instance Subst a => Subst [a] where subst varmap = map (subst varmap) instance Subst (Var, SQLExpr) where subst varmap (a,b) = (a, subst varmap b) instance Subst (SQLExpr, SQLOrder) where subst varmap (a,b) = (subst varmap a, b) instance Subst SQL where subst varmap (SQLQuery sel fro whe orderby limit distinct groupby) = SQLQuery (subst varmap sel) (subst varmap fro) (subst varmap whe) (subst varmap orderby) limit distinct (subst varmap groupby) type SQLQuery0 = ([Var], SQLStmt) -- return vars, sql type SQLQuery = ([Var], SQLStmt, [Var]) -- return vars, sql, param vars instance Semigroup SQL where (SQLQuery sselect1 sfrom1 swhere1 [] Top False []) <> (SQLQuery sselect2 sfrom2 swhere2 [] Top False []) = SQLQuery (sselect1 ++ sselect2) (sfrom1 `mergeTables` sfrom2) (swhere1 .&&. swhere2) [] Top False [] _ <> _ = error "sand: incompatible order by, limit, distinct, or group by" instance Monoid SQL where mempty = SQLQuery [] [] SQLTrueCond [] top False [] sor :: SQL -> SQL -> SQL sor (SQLQuery sselect1 sfrom1 swhere1 [] Top False []) (SQLQuery sselect2 sfrom2 swhere2 [] Top False []) = SQLQuery (sselect1 ++ sselect2) (sfrom1 `mergeTables` sfrom2) (swhere1 .||. swhere2) [] top False [] sor _ _ = error "sor: incompatible order by, limit, distinct, or group by" snot :: SQL -> SQL snot (SQLQuery sselect sfrom swhere _ (Drop (Ordered 0)) _ _) = mempty snot (SQLQuery sselect sfrom swhere _ _ _ _) = SQLQuery sselect sfrom (SQLNotCond swhere) [] top False [] swhere :: SQLCond -> SQL swhere swhere1 = SQLQuery [] [] swhere1 [] top False [] -- translate relational calculus to sql -- If P maps to table T col_1 ... col_n -- {xs : P(e_1,e_2,...,e_n)} -- translates to SELECT cols FROM T P WHERE ... -- if x_i = e_j then "col_j", if there are multiple j's, choose any one -- if e_i is a const, then "P.col_i = e_i" -- if e_i is a variable, and e_j = e_i then "P.col_j = P.col_i" -- otherwise True -- rep map maps a FO variable to a qualified column in sql so that all implicit equality constraints are -- compared with this column -- table map maps a table name and a list of primary key expressions to a sql var so that all predicates -- that share this list of primary key expressions uses the same sql var type RepMap = Map Var SQLExpr type TableMap = Map (TableName, [Expr]) SQLVar -- predicate -> table type PredTableMap = Map PredName (TableName, [Col]) -- table -> cols, primary key type Schema = Map TableName ([Col], [Col]) -- builtin predicate -> op, neg op newtype BuiltIn = BuiltIn (Map PredName ([Expr] -> TransMonad SQL)) simpleBuildIn :: String -> ([SQLExpr] -> TransMonad SQL) -> [Expr] -> TransMonad SQL simpleBuildIn n builtin args = do let err m = do a <- m case a of Left expr -> return expr Right _ -> error ("unconstrained argument to built-in predicate " ++ n) sqlExprs <- mapM (err . sqlExprFromArg) args builtin sqlExprs repBuildIn :: ([Either SQLExpr Var] -> [(Var, SQLExpr)]) -> [Expr] -> TransMonad SQL repBuildIn builtin args = do sqlExprs <- mapM sqlExprFromArg args let varexprs = builtin sqlExprs mapM_ (uncurry addVarRep) varexprs return mempty data TransState = TransState { builtin :: BuiltIn, predtablemap :: PredTableMap, repmap :: RepMap, tablemap :: TableMap, -- this is a list of free vars that appear in atoms to be deleted they must be linear nextid :: Maybe Pred, ptm :: PredTypeMap } type TransMonad a = StateT TransState NewEnv a freshSQLVar :: TableName -> TransMonad SQLVar freshSQLVar tablename = lift $ SQLVar <$> new (StringWrapper tablename) subState :: TransMonad a -> TransMonad a subState a = do state <- get r <- a put state return r sqlExprListFromArg :: Expr -> TransMonad [SQLExpr] sqlExprListFromArg e = do let l = exprListFromExpr e l2 <- mapM sqlExprFromArg l let l3 = filter isRight l2 if null l3 then return (lefts l2) else error ("sqlExprListFromArg: unrepresented var(s) in cast expr " ++ show (rights l3)) sqlExprFromArg :: Expr -> TransMonad (Either SQLExpr Var) sqlExprFromArg arg = do ts <- get case arg of VarExpr var2 -> return (case lookup var2 (repmap ts) of Just e -> Left e Nothing -> Right var2) IntExpr i -> return (Left (SQLIntConstExpr i)) StringExpr s -> return (Left (SQLStringConstExpr s)) ListConsExpr a b -> do l <- sqlExprListFromArg arg return (Left (SQLListExpr l)) ListNilExpr -> do l <- sqlExprListFromArg arg return (Left (SQLListExpr l)) NullExpr -> return (Left (SQLNullExpr)) CastExpr t v -> do e2 <- sqlExprFromArg v case e2 of Left e -> return (Left (SQLCastExpr e (case t of TextType -> "text" Int64Type -> "integer"))) Right var -> error ("unrepresented var in cast expr " ++ show var ++ " " ++ show (repmap ts)) addVarRep :: Var -> SQLExpr -> TransMonad () addVarRep var expr = modify (\ts-> ts {repmap = insert var expr (repmap ts)}) condFromArg :: (Expr, SQLQualifiedCol) -> TransMonad SQLCond condFromArg (arg, col) = do v <- sqlExprFromArg arg case v of Left expr -> return ((SQLColExpr col) .=. expr) Right var2 -> do addVarRep var2 (SQLColExpr col) return SQLTrueCond -- add a sql representing the row identified by the keys addTable :: TableName -> [Expr] -> SQLVar -> TransMonad () addTable tablename prikeyargs sqlvar2 = modify (\ts -> ts {tablemap = insert (tablename, prikeyargs) sqlvar2 (tablemap ts)}) class Params a where params :: a -> [Var] instance Params a => Params [a] where params = foldMap params instance Params SQLExpr where params (SQLParamExpr p) = [Var p] params (SQLCastExpr e _) = params e params (SQLArrayExpr a b) = params a ++ params b params (SQLInfixFuncExpr _ a b) = params a ++ params b params (SQLFuncExpr _ es) = foldMap params es params (SQLFuncExpr2 _ e) = params e params _ = [] instance Params SQLCond where params (SQLCompCond _ e1 e2) = params e1 ++ params e2 params (SQLOrCond c1 c2 ) = params c1 ++ params c2 params (SQLAndCond c1 c2) = params c1 ++ params c2 params (SQLNotCond c) = params c params (SQLFalseCond) = [] params (SQLTrueCond) = [] params (SQLExistsCond sql) = params sql instance Params SQLStmt where params (SQLQueryStmt sql) = params sql params (SQLInsertStmt _ vs _ cond) = params (map snd vs) ++ params cond params (SQLUpdateStmt _ vs cond) = params (map snd vs) ++ params cond params (SQLDeleteStmt _ cond) = params cond instance Params FromTable where params (SimpleTable _ _) = [] params (QueryTable sql _) = params sql instance Params SQL where params (SQLQuery sel from cond _ _ _ _) = params sel ++ params from ++ params cond instance Params (Var, SQLExpr) where params (_, expr) = params expr instance Params SQLQuery where params (_, _, params) = params instance Serialize SQLQuery where serialize (_, stmt, _) = show stmt translateQueryToSQL :: [Var] -> Formula -> TransMonad SQLQuery translateQueryToSQL vars formula = do ts <- get let nextid1 = nextid ts let repmap1 = repmap ts case formula of FAtomic (Atom p [VarExpr v]) | (predName <$> nextid1) == Just p -> if v `member` repmap1 then error (show "translateQueryToSQL: nextid " ++ show v ++ " is already bound") else return ([v], SQLQueryStmt (SQLQuery {sqlSelect = [(v, SQLFuncExpr "nextval" [SQLStringConstExpr (T.pack "R_ObjectId")])], sqlFrom = [], sqlWhere = SQLTrueCond, sqlDistinct = False, sqlOrderBy = [], sqlLimit = top, sqlGroupBy = []}), []) _ -> do (vars, sql) <- if pureF formula then do (SQLQuery sels tablelist cond1 orderby limit distinct groupby) <- translateFormulaToSQL formula ts <- get let map2 = fromList sels <> repmap ts let extractCol var = case lookup var map2 of Just col -> col _ -> error ("translateQueryToSQL: " ++ show var ++ " doesn't correspond to a column while translating query " ++ show formula ++ " to SQL, available " ++ show (repmap ts)) let cols = map extractCol vars sql = SQLQueryStmt (SQLQuery (zip vars cols) tablelist cond1 orderby limit distinct groupby) return (vars, sql) else translateInsertToSQL formula let sql2 = simplifySQLCond sql return (vars, sql2, params sql2) class SimplifySQLCond a where simplifySQLCond :: a -> a instance SimplifySQLCond SQLStmt where simplifySQLCond (SQLQueryStmt sql) = SQLQueryStmt (simplifySQLCond sql) simplifySQLCond (SQLInsertStmt t s f cond) = SQLInsertStmt t s f (simplifySQLCond cond) simplifySQLCond (SQLUpdateStmt t cs cond) = SQLUpdateStmt t cs (simplifySQLCond cond) simplifySQLCond (SQLDeleteStmt t cond) = SQLDeleteStmt t (simplifySQLCond cond) instance SimplifySQLCond SQL where simplifySQLCond (SQLQuery s f cond orderby limit distinct groupby) = SQLQuery s (simplifySQLCond f) (simplifySQLCond cond) orderby limit distinct groupby instance SimplifySQLCond a => SimplifySQLCond [a] where simplifySQLCond = map simplifySQLCond instance SimplifySQLCond FromTable where simplifySQLCond table@(SimpleTable _ _) = table simplifySQLCond (QueryTable sql v) = QueryTable (simplifySQLCond sql) v instance SimplifySQLCond SQLCond where simplifySQLCond c@(SQLCompCond "=" a b) | a == b = SQLTrueCond simplifySQLCond c@(SQLCompCond _ _ _) = c simplifySQLCond (SQLTrueCond) = SQLTrueCond simplifySQLCond (SQLFalseCond) = SQLFalseCond simplifySQLCond c@(SQLAndCond _ _) = let conj = getSQLConjuncts c conj2 = concatMap (getSQLConjuncts . simplifySQLCond) conj conj3 = filter (\a -> case a of SQLTrueCond -> False _ -> True) conj2 conj4 = if all (\a -> case a of SQLFalseCond -> False _ -> True) conj3 then conj3 else [SQLFalseCond] conj5 = nub conj4 in if null conj5 then SQLTrueCond else foldl1 (.&&.) conj5 simplifySQLCond (SQLOrCond a b) = case simplifySQLCond a of SQLFalseCond -> simplifySQLCond b SQLTrueCond -> SQLTrueCond a' -> case simplifySQLCond b of SQLFalseCond -> a' SQLTrueCond -> SQLTrueCond b' -> SQLOrCond a' b' simplifySQLCond (SQLNotCond a) = case simplifySQLCond a of SQLTrueCond -> SQLFalseCond SQLFalseCond -> SQLTrueCond a' -> SQLNotCond a' simplifySQLCond (SQLExistsCond sql) = SQLExistsCond (simplifySQLCond sql) sqlexists :: SQL -> SQL sqlexists (SQLQuery _ _ _ _ (Drop (Ordered 0)) _ _) = sqlfalse sqlexists (SQLQuery cols tablelist cond _ _ _ _) = SQLQuery [] [] (SQLExistsCond (SQLQuery cols tablelist cond [] Top False [])) [] top False [] sqlfalse :: SQL sqlfalse = SQLQuery [] [] (SQLFalseCond) [] top False [] sqlsummarize :: [(Var, SQLExpr)] -> [SQLExpr] -> SQL -> SQL sqlsummarize funcs groupby (SQLQuery _ from whe [] Top False []) = SQLQuery funcs from whe [] top False groupby sqlsummarize funcs groupby (SQLQuery _ from whe (_ : _) _ _ _) = error "cannot summarize orderby selection" sqlsummarize funcs groupby (SQLQuery _ from whe _ (Drop _) _ _) = error "cannot summarize limit selection" sqlsummarize funcs groupby (SQLQuery _ from whe _ _ True _) = error "cannot summarize distinct selection" sqlsummarize funcs groupby (SQLQuery _ from whe _ _ _ (_ : _)) = error "cannot summarize groupby selection" sqlsummarize2 :: [(Var, SQLExpr)] -> [SQLExpr] -> SQL -> SQLVar -> SQL sqlsummarize2 funcs groupby sql v = SQLQuery funcs [QueryTable sql v] SQLTrueCond [] top False [] sqlorderby :: SQLOrder -> SQLExpr -> SQL -> SQL sqlorderby ord expr (SQLQuery sel from whe orderby limit distinct groupby) = SQLQuery sel from whe ((expr, ord) : orderby) limit distinct groupby sqllimit :: IntLattice -> SQL -> SQL sqllimit n (SQLQuery sel from whe orderby limit distinct groupby) = SQLQuery sel from whe orderby (n /\ limit) distinct groupby findRep :: Var -> TransMonad SQLExpr findRep v = do ts <- get case lookup v (repmap ts) of Nothing -> error ("cannot find representative for variable " ++ show v) Just expr -> return expr translateFormulaToSQL :: Formula -> TransMonad SQL translateFormulaToSQL (FAtomic a) = translateAtomToSQL a translateFormulaToSQL (FSequencing form1 form2) = mappend <$> translateFormulaToSQL form1 <*> translateFormulaToSQL form2 translateFormulaToSQL (FOne) = return mempty translateFormulaToSQL (FChoice form1 form2) = sor <$> translateFormulaToSQL form1 <*> translateFormulaToSQL form2 translateFormulaToSQL (FPar form1 form2) = sor <$> translateFormulaToSQL form1 <*> translateFormulaToSQL form2 translateFormulaToSQL (FZero) = return sqlfalse translateFormulaToSQL (Aggregate Exists conj) = do sql <- subState (translateFormulaToSQL conj) return (sqlexists sql) translateFormulaToSQL (Aggregate Not form) = snot <$> subState (translateFormulaToSQL form) translateFormulaToSQL (Aggregate (Summarize funcs groupby) conj) = do sql@(SQLQuery sel fro whe ord lim dis gro) <- translateFormulaToSQL conj funcs' <- mapM (\(Bind v@(Var vn) s) -> do r <- case s of Max v2 -> do r <- findRep v2 return (v, SQLFuncExpr "coalesce" [SQLFuncExpr "max" [r], SQLIntConstExpr 0]) Min v2 -> do r <- findRep v2 return (v, SQLFuncExpr "coalesce" [SQLFuncExpr "min" [r], SQLIntConstExpr 0]) Sum v2 -> do r <- findRep v2 return (v, SQLFuncExpr "coalesce" [SQLFuncExpr "sum" [r], SQLIntConstExpr 0]) Average v2 -> do r <- findRep v2 return (v, SQLFuncExpr "coalesce" [SQLFuncExpr "average" [r], SQLIntConstExpr 0]) Count -> return (v, SQLFuncExpr "count" [SQLExprText "*"]) CountDistinct v2 -> do r <- findRep v2 return (v, SQLFuncExpr "count" [SQLFuncExpr2 "distinct" r]) Random v2 -> do r <- findRep v2 return (v, SQLArrayExpr (SQLFuncExpr "array_agg" [r]) (SQLIntConstExpr 1)) addVarRep v (SQLColExpr2 vn) return r) funcs groupbyreps <- mapM findRep groupby if null sel then return (sqlsummarize funcs' groupbyreps sql) else do qv <- freshSQLVar "qu" return (sqlsummarize2 funcs' groupbyreps sql qv) translateFormulaToSQL (Aggregate (Limit n) form) = sqllimit (IntLattice n) <$> translateFormulaToSQL form translateFormulaToSQL (Aggregate (OrderByAsc v) form) = do sql <- translateFormulaToSQL form rep <- findRep v return (sqlorderby ASC rep sql) translateFormulaToSQL (Aggregate (OrderByDesc v) form) = do sql <- translateFormulaToSQL form rep <- findRep v return (sqlorderby DESC rep sql) translateFormulaToSQL (Aggregate Distinct form) = do sql <- translateFormulaToSQL form return sql {sqlDistinct = True} translateFormulaToSQL form = error "unsupported" lookupTableVar :: String -> [Expr] -> TransMonad (Bool, SQLVar) lookupTableVar tablename prikeyargs = do ts <- get case lookup (tablename, prikeyargs) (tablemap ts) of -- check if there already is a table with same primary key Just v -> return (False, v) Nothing -> do sqlvar2 <- freshSQLVar tablename addTable tablename prikeyargs sqlvar2 return (True, sqlvar2) translateAtomToSQL :: Atom -> TransMonad SQL translateAtomToSQL (Atom name args) = do ts <- get let (BuiltIn builtints) = builtin ts --try builtin first case lookup name builtints of Just builtinpred -> builtinpred args Nothing -> case lookup name (predtablemap ts) of Just (tablename, cols) -> do (tables, varmap, cols2, args2) <- case lookup name (ptm ts) of Nothing -> error ("translateAtomToSQL: cannot find predicate " ++ show name) Just pt -> do let prikeyargs = keyComponents pt args let prikeyargcols = keyComponents pt cols -- if primary key columns correspond to args (new, v) <- lookupTableVar tablename prikeyargs if new then return ([SimpleTable tablename v], v, cols, args) else do let cols2 = cols \\ prikeyargcols let args2 = args \\ prikeyargs return ([], v, cols2 , args2) let cols3 = map (SQLQualifiedCol varmap) cols2 condsFromArgs <- mapM condFromArg (zip args2 cols3) let cond3 = foldl (.&&.) SQLTrueCond condsFromArgs return (SQLQuery [] tables cond3 [] top False [] ) Nothing -> error (show name ++ " is not defined") -- formula must be pure translateInsertToSQL :: Formula -> TransMonad SQLQuery0 translateInsertToSQL form = do let conjs = getFsequencings form let f p [] = (p,[]) f p forms0@(form : forms) | pureF form = f (p ++ [form]) forms | otherwise = (p, forms0) let (p, e) = f [] conjs let lits = map (\(FInsert lit) -> lit) e let form' = fsequencing p translateInsertToSQL' lits form' translateInsertToSQL' :: [Lit] -> Formula -> TransMonad SQLQuery0 translateInsertToSQL' lits conj = do (SQLQuery _ tablelist cond orderby limit distinct groupby) <- translateFormulaToSQL conj if distinct then error "cannot insert from distinct selection" else do ts <- get let keymap = sortByKey (ptm ts) lits if size keymap > 1 then error ("translateInsertToSQL: more than one key " ++ show keymap) else do insertparts <- sortParts <$> (concat <$> mapM combineLitsSQL (elems keymap)) case insertparts of [insertpart] -> do ts <- get let sql = toInsert tablelist cond insertpart return ([], sql) _ -> error ("translateInsertToSQL: more than one actions " ++ show insertparts ++ show lits) -- each SQLStmt must be an Insert statement sortParts :: [SQLStmt] -> [SQLStmt] sortParts [] = [] sortParts (p : ps) = b++[a] where (a, b) = foldl (\(active, done) part -> case (active, part) of (SQLInsertStmt tname colexprs tablelist cond, SQLInsertStmt tname2 colexprs2 tablelist2 cond2) | tname == tname2 && compatible colexprs colexprs2 && cond == cond2 -> (SQLInsertStmt tname ( colexprs `union` colexprs2) (tablelist ++ tablelist2) cond, done) (SQLUpdateStmt tnamevar colexprs cond, SQLUpdateStmt tnamevar2 colexprs2 cond2) | tnamevar == tnamevar2 && compatible colexprs colexprs2 && cond == cond2 -> (SQLUpdateStmt tnamevar ( colexprs `union` colexprs2) cond, done) (SQLDeleteStmt tnamevar cond, SQLDeleteStmt tnamevar2 cond2) | tnamevar == tnamevar2 && cond == cond2 -> (SQLDeleteStmt tnamevar cond, done) _ -> (part, active : done)) (p,[]) ps where compatible colexpr = all (\(col, expr) -> all (\(col2, expr2) ->col2 /= col || expr2 == expr) colexpr) toInsert :: [FromTable] -> SQLCond -> SQLStmt -> SQLStmt toInsert tablelist cond (SQLInsertStmt tname colexprs tablelist2 cond2) = SQLInsertStmt tname colexprs (tablelist ++ tablelist2) (cond .&&. cond2) toInsert tablelist cond (SQLDeleteStmt tname cond2) = SQLDeleteStmt tname (cond .&&. cond2) toInsert tablelist cond (SQLUpdateStmt tname colexprs cond2) = SQLUpdateStmt tname colexprs (cond .&&. cond2) combineLitsSQL :: [Lit] -> TransMonad [SQLStmt] combineLitsSQL lits = do ts <- get combineLits (ptm ts) lits generateUpdateSQL generateInsertSQL generateDeleteSQL preproc0 tname cols pred1 args = do let key = keyComponents pred1 args (_, sqlvar2) <- lookupTableVar tname key let qcol_args = zip (map (SQLQualifiedCol sqlvar2) cols) args return (qcol_args, sqlvar2) preproc tname cols pred1 args = do (qcol_args , sqlvar2) <- preproc0 tname cols pred1 args let keyqcol_args = keyComponents pred1 qcol_args let propqcol_args = propComponents pred1 qcol_args return (keyqcol_args, propqcol_args, sqlvar2) generateDeleteSQL :: Atom -> TransMonad [SQLStmt] generateDeleteSQL atom = do sql <- translateDeleteAtomToSQL atom return [sql] generateInsertSQL :: [Atom] -> TransMonad [SQLStmt] generateInsertSQL atoms = do let map1 = sortAtomByPred atoms mapM generateInsertSQLForPred (toList map1) generateUpdateSQL :: [Atom] -> [Atom] -> TransMonad [SQLStmt] generateUpdateSQL pospropatoms negpropatoms = do let posprednamemap = sortAtomByPred pospropatoms let negprednamemap = sortAtomByPred negpropatoms let allkeys = keys posprednamemap `union` keys negprednamemap let poslist = [l | key <- allkeys, let l = case lookup key posprednamemap of Nothing -> []; Just l' -> l'] let neglist = [l | key <- allkeys, let l = case lookup key negprednamemap of Nothing -> []; Just l' -> l'] mapM generateUpdateSQLForPred (zip3 allkeys poslist neglist) generateInsertSQLForPred :: (PredName, [Atom]) -> TransMonad SQLStmt generateInsertSQLForPred (pred1, [posatom]) = translatePosInsertAtomToSQL posatom -- set property generateInsertSQLForPred (pred1, _) = error "unsupported number of pos and neg literals" -- set property generateUpdateSQLForPred :: (PredName, [Atom], [Atom]) -> TransMonad SQLStmt generateUpdateSQLForPred (pred1, [posatom], _) = translatePosUpdateAtomToSQL posatom -- set property generateUpdateSQLForPred (pred1, [], [negatom]) = translateNegUpdateAtomToSQL negatom -- set property generateUpdateSQLForPred (pred1, _, _) = error "unsupported number of pos and neg literals" -- set property translateDeleteAtomToSQL :: Atom -> TransMonad SQLStmt translateDeleteAtomToSQL (Atom pred1 args) = do ts <- get case lookup pred1 (predtablemap ts) of Just (tname, cols) -> case lookup pred1 (ptm ts) of Nothing -> error ("translateDeleteAtomToSQL: cannot find predicate " ++ show pred1) Just pt -> do (qcol_args, sqlvar2) <- preproc0 tname cols pt args cond <- foldl (.&&.) SQLTrueCond <$> mapM qcolArgToDelete qcol_args return (SQLDeleteStmt (tname, sqlvar2) cond) Nothing -> error "not an updatable predicate" translatePosInsertAtomToSQL :: Atom -> TransMonad SQLStmt translatePosInsertAtomToSQL (Atom pred1 args) = do ts <- get case lookup pred1 (predtablemap ts) of Just (tname, cols) -> do let col_args = zip cols args colexprs <- mapM colArgToValue col_args return (SQLInsertStmt tname colexprs [] SQLTrueCond) Nothing -> error "not an updatable predicate" translatePosUpdateAtomToSQL :: Atom -> TransMonad SQLStmt translatePosUpdateAtomToSQL (Atom pred1 args) = do ts <- get case lookup pred1 (predtablemap ts) of Just (tname, cols) -> case lookup pred1 (ptm ts) of Nothing -> error ("translatePosUpdateAtomToSQL: cannot find predicate " ++ show pred1) Just pt -> do (keyqcol_args, propqcol_args, sqlvar2) <- preproc tname cols pt args cond <- foldl (.&&.) SQLTrueCond <$> mapM qcolArgToUpdateCond keyqcol_args set <- mapM qcolArgToSet propqcol_args return (SQLUpdateStmt (tname, sqlvar2) set cond) Nothing -> error "not an updatable predicate" translateNegUpdateAtomToSQL :: Atom -> TransMonad SQLStmt translateNegUpdateAtomToSQL (Atom pred1 args) = do ts <- get case lookup pred1 (predtablemap ts) of Just (tname, cols) -> case lookup pred1 (ptm ts) of Nothing -> error ("translatePosUpdateAtomToSQL: cannot find predicate " ++ show pred1) Just pt -> do (keyqcol_args, propqcol_args, sqlvar2) <- preproc tname cols pt args cond <- foldl (.&&.) SQLTrueCond <$> mapM qcolArgToUpdateCond keyqcol_args (set, conds) <- unzip <$> mapM qcolArgToSetNull propqcol_args return (SQLUpdateStmt (tname, sqlvar2) set (foldl (.&&.) cond conds)) Nothing -> error "not an updatable predicate" qcolArgToDelete :: (SQLQualifiedCol, Expr) -> TransMonad SQLCond qcolArgToDelete (qcol, arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return (SQLColExpr qcol .=. sqlexpr) Right var -> error ("unbounded var " ++ show var) colArgToValue :: (Col, Expr) -> TransMonad (Col, SQLExpr) colArgToValue (col, arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return (col, sqlexpr) Right _ -> do ts <- get error ("qcolArgToValue: set value to unbounded var" ++ show col ++ " " ++ serialize arg ++ " " ++ show (repmap ts)) qcolArgToUpdateCond :: (SQLQualifiedCol, Expr) -> TransMonad SQLCond qcolArgToUpdateCond (qcol, arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return (SQLColExpr qcol .=. sqlexpr) Right var -> do addVarRep var (SQLColExpr qcol) return SQLTrueCond -- unbounded var qcolArgToCond :: (SQLQualifiedCol, Expr) -> TransMonad SQLCond qcolArgToCond (qcol, arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return (SQLColExpr qcol .=. sqlexpr) Right var -> do addVarRep var (SQLColExpr qcol) return SQLTrueCond -- unbounded var qcolArgToSet :: (SQLQualifiedCol, Expr) -> TransMonad (Col, SQLExpr) qcolArgToSet (SQLQualifiedCol var col, arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return (col, sqlexpr) Right _ -> do ts <- get error ("qcolArgToSet: set value to unbounded var" ++ show (var, col) ++ " " ++ serialize arg ++ " " ++ show (repmap ts)) qcolArgToSetNull :: (SQLQualifiedCol, Expr) -> TransMonad ((Col, SQLExpr), SQLCond) qcolArgToSetNull (qcol@(SQLQualifiedCol var col), arg) = do sqlexpr <- sqlExprFromArg arg case sqlexpr of Left sqlexpr -> return ((col, SQLNullExpr), SQLColExpr qcol .=. sqlexpr) Right var -> do addVarRep var (SQLColExpr qcol) return ((col, SQLNullExpr), SQLTrueCond) data SQLTrans = SQLTrans BuiltIn PredTableMap (Maybe Pred) PredTypeMap data SQLState = SQLState { queryKeys:: [(String, [Expr])], updateKey:: Maybe (String, [Expr]), ksDeleteProp :: [PredName], ksDeleteObj :: Bool, ksInsertProp :: [PredName], ksInsertObj :: Bool, ksQuery :: Bool, env :: Set Var, deleteConditional :: Bool } pureOrExecF :: SQLTrans -> Set Var -> FormulaT -> StateT SQLState Maybe () pureOrExecF (SQLTrans (BuiltIn builtin) predtablemap nextid ptm) dvars (FAtomicA _ (Atom n@(PredName _ pn) args)) = do ks <- get if Just n == (predName <$> nextid) then lift Nothing else if isJust (updateKey ks) then lift Nothing else if n `member` builtin then return () else case lookup n predtablemap of Nothing -> -- trace ("pureOrExecF: cannot find table for predicate " ++ show n ++ " ignored, the predicate nextid is " ++ show nextid) $ return () Just (tablename, _) -> case lookup n ptm of Nothing -> error ("pureOrExecF: cannot find predicate " ++ show n ++ " available predicates: " ++ show ptm) Just pt -> do let key = keyComponents pt args put ks{queryKeys = queryKeys ks `union` [(tablename, key)]} pureOrExecF trans@(SQLTrans _ _ _ ptm) dvars form@(FSequencingA _ form1 form2) = do pureOrExecF trans dvars form1 let dvars2 = determinedVars (toDSP ptm) dvars form1 pureOrExecF trans dvars2 form2 pureOrExecF trans dvars form@(FParA _ form1 form2) = -- only works if all vars are determined if freeVars form1 `Set.isSubsetOf` dvars && freeVars form2 `Set.isSubsetOf` dvars then do ks <- get if isJust (updateKey ks) then lift Nothing else do put ks {ksQuery = True} pureOrExecF trans dvars form1 pureOrExecF trans dvars form2 else lift Nothing pureOrExecF trans dvars form@(FChoiceA _ form1 form2) = -- only works if all vars are determined if freeVars form1 `Set.isSubsetOf` dvars && freeVars form2 `Set.isSubsetOf` dvars then do ks <- get if isJust (updateKey ks) then lift Nothing else do put ks {ksQuery = True} pureOrExecF trans dvars form1 pureOrExecF trans dvars form2 else lift Nothing pureOrExecF (SQLTrans builtin predtablemap _ ptm) _ form@(FInsertA _ (Lit sign0 (Atom pred0 args))) = do ks <- get if ksQuery ks || deleteConditional ks then lift Nothing else case lookup pred0 ptm of Nothing -> error ("pureOrExecF: cannot find predicate " ++ show pred0 ++ " available predicates: " ++ show ptm) Just pt -> do let key = keyComponents pt args tablename = case lookup pred0 predtablemap of Just (tn, _) -> tn Nothing -> error ("pureOrExecF: cannot find table for predicate " ++ show pred0) let isObject = isObjectPred ptm pred0 let isDelete = case sign0 of Pos -> False Neg -> True ks' <- case updateKey ks of Nothing -> if isObject then if isDelete then if not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{updateKey = Just (tablename, key), ksDeleteObj = True} else if not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{updateKey = Just (tablename, key), ksInsertObj = True} else if isDelete then if not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{updateKey = Just (tablename, key), ksDeleteProp = [pred0]} else if not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{updateKey = Just (tablename, key), ksInsertProp = [pred0]} Just key' -> if isObject then if isDelete then if not (null (ksInsertProp ks)) || ksInsertObj ks || ksDeleteObj ks || (tablename, key) /= key' || not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{ksDeleteObj = True} else lift Nothing else if isDelete then if not (null (ksInsertProp ks)) || ksInsertObj ks || ksDeleteObj ks || pred0 `elem` (ksDeleteProp ks) || (tablename, key) /= key' || not (superset [(tablename, key)] (queryKeys ks)) then lift Nothing else return ks{ksDeleteProp = ksDeleteProp ks ++ [pred0]} else if not (null (ksDeleteProp ks)) || ksDeleteObj ks || pred0 `elem` (ksInsertProp ks) || (tablename, key) /= key' then lift Nothing else return ks{ksInsertProp = ksInsertProp ks ++ [pred0]} let isDeleteConditional = isDelete && not (all isVar (propComponents pt args)) -- || (not isDelete && not (all isVar (keyComponents pt args))) put ks'{deleteConditional = isDeleteConditional} pureOrExecF _ _ (FOneA _) = return () pureOrExecF _ _ (FZeroA _) = return () pureOrExecF trans dvars for@(AggregateA _ Not form) = do ks <- get if isJust (updateKey ks) then lift Nothing else do put ks {ksQuery = True} pureOrExecF trans dvars form pureOrExecF trans dvars for@(AggregateA _ Exists form) = do ks <- get if isJust (updateKey ks) then lift Nothing else do put ks {ksQuery = True} pureOrExecF trans dvars form pureOrExecF _ _ (AggregateA _ _ _) = lift Nothing sequenceF :: SQLTrans -> FormulaT -> StateT SQLState Maybe () sequenceF (SQLTrans _ _ (Just nextid1) _) (FAtomicA _ (Atom p [_])) | predName nextid1 == p = return () sequenceF _ _ = lift Nothing limitF :: SQLTrans -> Set Var -> FormulaT -> StateT SQLState Maybe () limitF trans dvars (AggregateA _ (Limit _) form) = do ks <- get put ks {ksQuery = True} limitF trans dvars form limitF trans dvars form = orderByF trans dvars form orderByF :: SQLTrans -> Set Var -> FormulaT -> StateT SQLState Maybe () orderByF trans dvars (AggregateA _ (OrderByAsc _) form) = do ks <- get put ks {ksQuery = True} orderByF trans dvars form orderByF trans dvars (AggregateA _ (OrderByDesc _) form) = do ks <- get put ks {ksQuery = True} orderByF trans dvars form orderByF trans dvars form = distinctF trans dvars form distinctF :: SQLTrans -> Set Var -> FormulaT -> StateT SQLState Maybe () distinctF trans dvars (AggregateA _ Distinct form) = do ks <- get put ks {ksQuery = True} distinctF trans dvars form distinctF trans dvars form = summarizeF trans dvars form summarizeF :: SQLTrans -> Set Var -> FormulaT -> StateT SQLState Maybe () summarizeF trans dvars (AggregateA _ (Summarize _ _) form) = do ks <- get put ks {ksQuery = True} summarizeF trans dvars form summarizeF trans dvars form = pureOrExecF trans dvars form instance IGenericDatabase01 SQLTrans where type GDBQueryType SQLTrans = (Bool, [Var], [CastType], String, [Var]) type GDBFormulaType SQLTrans = FormulaT gTranslateQuery trans ret query@(vtm :< _) env = do let (SQLTrans builtin predtablemap nextid ptm) = trans env2 = foldl (\map2 key@(Var w) -> insert key (SQLParamExpr w) map2) empty env (sql@(retvars, sqlquery, _), ts') = runNew (runStateT (translateQueryToSQL (toAscList ret) (stripAnnotations query)) (TransState {builtin = builtin, predtablemap = predtablemap, repmap = env2, tablemap = empty, nextid = nextid, ptm = ptm})) retvartypes = map (\var0 -> case lookup var0 vtm of Nothing -> error ("var type not found: " ++ show var0 ++ " from " ++ show query) Just (ParamType _ _ _ _ p) -> p) retvars debugM "SQL" ("gTranslateQuery of SQLTrans: " ++ show env ++ "\n-----------------\n" ++ serialize query ++ "\n---------------->\n" ++ serialize sqlquery ++ "\n----------------") return (case sqlquery of SQLQueryStmt _ -> True _ -> False, retvars, retvartypes, serialize sqlquery, params sql) gSupported trans ret form env = let initstate = SQLState [] Nothing [] False [] False (not (null ret)) env False in layeredF form && (isJust (evalStateT (limitF trans env form) initstate ) || isJust (evalStateT (sequenceF trans form) initstate))
xu-hao/QueryArrow
QueryArrow-db-sql-common/src/QueryArrow/SQL/SQL.hs
bsd-3-clause
49,536
353
23
15,666
10,747
6,279
4,468
913
30
module Types where import Data.Char import System.FilePath import Text.Printf data Args = A { bver :: Bool , out_c :: FilePath , out_h :: FilePath , out_urs :: FilePath , out_wrapper :: FilePath , out_ffi_js :: FilePath , mangle_css_url :: Bool , inp :: FilePath } -- out_ffi_js a = wrap (out_ffi_js_lib a) where -- wrap [] = [] -- wrap p = (dropExtension p) ++ "_js.urs" guessModName :: FilePath -> String guessModName = uwModName . (++ ".urs") uwModName :: FilePath -> String uwModName = upper1 . notnum . map under . takeFileName . dropExtension . checkurs where checkurs x | (takeExtension x) == ".urs" = x | (takeExtension x) == ".ur" = x | otherwise = error $ "uwModName: FILE.urs expected (got " ++ x ++ ")" under c | c`elem`"_-. /" = '_' | otherwise = c upper1 [] = [] upper1 (x:xs) = (toUpper x) : xs notnum [] = error $ "uwModName: Empty name" notnum n@(x:xs) | isDigit x = error $ "uwModName: Names starting from digit is not allowed (got " ++ n ++ ")" | otherwise = n urblobfun = "blob" urtextfun = "text" cblobfun a = printf "uw_%s_%s" (uwModName (out_urs a)) urblobfun ctextfun a = printf "uw_%s_%s" (uwModName (out_urs a)) urtextfun type Url = String css_mangle_flag = "css-mangle-urls"
grwlf/cake3
app/UrEmbed/Types.hs
bsd-3-clause
1,301
0
12
314
407
217
190
33
3
module Renkon.Cli ( start ) where import ClassyPrelude import Options.Declarative as Options import Renkon.Command.Exec as ExecCommand import Renkon.Command.Info as InfoCommand import Renkon.Command.List as ListCommand import Renkon.Command.Path as PathCommand import Renkon.Config start :: IO () start = run_ $ Group "Generator manager" [ subCmd "list" list' , subCmd "info" info' , subCmd "exec" exec' , subCmd "path" path' ] list' :: Flag "d" '["detail"] "" "show detail infomation" Bool -> Cmd "List available generators" () list' detail = liftIO $ do let detail' = get detail config <- boot ListCommand.run config detail' info' :: Arg "<GENERATOR>" String -> Cmd "Display detail information of the generator" () info' generator = liftIO $ do let generator' = pack $ get generator config <- boot InfoCommand.run config generator' exec' :: Arg "<GENERATOR>" String -> Arg "[ARGS...]" [String] -> Cmd "Launch the generator" () exec' generator args = liftIO $ do let generator' = pack $ get generator args' = pack <$> get args config <- boot ExecCommand.run config generator' args' path' :: Cmd "Display path information" () path' = liftIO $ do config <- boot PathCommand.run config
kayhide/renkon
src/Renkon/Cli.hs
bsd-3-clause
1,264
0
12
263
383
194
189
-1
-1