code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Pipes.Network.IRC.Run where
import Control.Monad (forever)
import qualified Data.Foldable as F
import Data.Maybe (fromJust, isJust)
import Data.Monoid
import qualified Network.Socket.ByteString as S
import Pipes
import Pipes.Network.TCP
import qualified Pipes.Prelude as P
import Pipes.Network.IRC.Core
import Pipes.Network.IRC.Types hiding (HostName)
runIrc :: IRCSettings -> IO ()
runIrc (settings@IRCSettings{..}) =
connect host port $ \(sock,_) -> do
S.send sock $ mconcat ["NICK ", nick, "\n",
"USER ", nick, " bot bot: ", nick, "\n\n"]
F.forM_ channels $ \c -> S.send sock ("JOIN " <> c <> "\n")
runEffect $ forever $
for (parsedMessages sock) $ \msg -> do
yield msg >-> handlePing >-> writeMsg sock
yield msg >-> filterMsg settings
>-> hook settings
>-> writeMsg sock
parsedMessages sock =
fromSocket sock 512
>-> parseMsg
>-> P.filter isJust --- ugly!!
>-> P.map fromJust
{-
/---->p1-->p2-->
Source ---->/ \----> Sink
\ /
\---->p3------->
Source :: Producer A m r
p1 :: Pipe A B m r
p2 :: Pipe B C m r
p3 :: Pipe A C m r
Sink :: Consumer C m r
-}
| co-dan/pipes-irc | src/Pipes/Network/IRC/Run.hs | bsd-3-clause | 1,424 | 0 | 16 | 473 | 330 | 182 | 148 | 30 | 1 |
module Gidl.Types.AST where
type Identifier = String
type TypeName = String
data TypeEnv
= TypeEnv [(TypeName, Type)]
deriving (Eq, Show)
emptyTypeEnv :: TypeEnv
emptyTypeEnv = TypeEnv []
data Type
= StructType String [(Identifier, Type)]
| PrimType PrimType
deriving (Eq, Show)
data PrimType
= Newtype String PrimType
| EnumType String Bits [(Identifier, Integer)]
| AtomType Atom
deriving (Eq, Show)
data Atom
= AtomInt Bits
| AtomWord Bits
| AtomFloat
| AtomDouble
deriving (Eq, Show)
data Bits
= Bits8
| Bits16
| Bits32
| Bits64
deriving (Eq, Show)
| GaloisInc/gidl | src/Gidl/Types/AST.hs | bsd-3-clause | 600 | 0 | 8 | 133 | 202 | 119 | 83 | 29 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.DrawElementsBaseVertex
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/draw_elements_base_vertex.txt ARB_draw_elements_base_vertex> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.DrawElementsBaseVertex (
-- * Functions
glDrawElementsBaseVertex,
glDrawElementsInstancedBaseVertex,
glDrawRangeElementsBaseVertex,
glMultiDrawElementsBaseVertex
) where
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/DrawElementsBaseVertex.hs | bsd-3-clause | 807 | 0 | 4 | 87 | 46 | 37 | 9 | 6 | 0 |
-- Quantities
-- Copyright (C) 2015-2016 Moritz Schulte <mtesseract@silverratio.net>
-- API is not necessarily stable.
{-# LANGUAGE OverloadedStrings #-}
module Quantities.Printer
( printQuantity
, printMixed
, printFloat
, printRational
, printNumber
) where
import Control.Lens
import Data.Ratio (numerator, denominator)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import Formatting
import Quantities.Types
import Quantities.Units
import Quantities.Util
---------------------
-- Pretty Printers --
---------------------
printRational :: Rational -> Text
printRational x =
let n = numerator x
d = denominator x
in format (int % "/" % int) n d
printFloat :: Rational -> Text
printFloat x = format float x
-- | Convert a rational number into its string representation as a
-- mixed number; e.g. 1 % 2 => "1/2".
printMixed :: Rational -> Text
printMixed x' =
let x = abs x' -- Absolute Value of x.
minus = x' < 0 -- Sign of x.
num = numerator x -- Numerator of x.
denom = denominator x -- Denominator of x.
i = num `div` denom -- Number of times the denominator
-- fits into the numerator...
rest = num `mod` denom -- ... and the rest.
prefix = if i == 0
-- If num < denom, then we surely do not have an
-- integer prefix.
then if rest == 0
-- If also the rest is zero, then "0" is
-- the final result. Otherwise we simply
-- have an empty prefix.
then "0"
else ""
else showText i
fraction = if rest == 0
then ""
else format (int % "/" % int) rest denom
in -- Assemble the final string:
format (text % text % text % text)
-- Handle minus sign:
(if minus then "-" else "")
-- Then the integer prefix:
prefix
-- If prefix and fraction is both non-empty, then we
-- need to add a blank:
(if T.null prefix || T.null fraction then "" else " ")
-- And finally the fraction:
fraction
-- | This is the list of denomitors we prefer, when possible. e.g.,
-- during printing Rationals or when trying to clever approximate
-- quantities.
goodDenominators :: [Integer]
goodDenominators = [2, 3, 4]
-- | Convert a rational number into a convenient string
-- representation: If the denominator is contained in a list of "good"
-- denominators then display the number as a mixed number, otherwise
-- display it as a real number.
printNumber :: Rational -> Text
printNumber x =
let denom = abs (denominator x)
in if denom `elem` goodDenominators
then printMixed x
else format float x
-- | Pretty print a Quantity.
printQuantity :: (Rational -> Text) -> Quantity -> Text
printQuantity printer q =
format (text % " " % text) (printer (q ^. number)) (printUnit (q ^. unit))
| mtesseract/quantities | src/Quantities/Printer.hs | bsd-3-clause | 3,280 | 0 | 13 | 1,162 | 555 | 320 | 235 | 54 | 6 |
module HaskellTools.Hackage ( producePackages
, producePackagesWithDeps
, PackageWithDeps
) where
import qualified Distribution.Hackage.DB as DB
import Distribution.Package
import Distribution.PackageDescription
import Language.Haskell.Extension
import Control.Monad (unless, liftM)
import Pipes
import Data.Maybe
type PackageWithDeps = (PackageDescription, [Extension], [Dependency])
producePackages :: Int -> Producer [PackageDescription] IO ()
producePackages page = do
pkgs <- lift $ packages page
unless (null pkgs) $ do
yield pkgs
producePackages $ page + 1
producePackagesWithDeps :: Int -> Producer [PackageWithDeps] IO ()
producePackagesWithDeps page = do
pkgs <- lift $ packagesWithDeps page
unless (null pkgs) $ do
yield pkgs
producePackagesWithDeps $ page + 1
packages :: Int -> IO [PackageDescription]
packages page = liftM (map (\(p, _, _) -> p)) $ packagesWithDeps page
packagesWithDeps :: Int -> IO [PackageWithDeps]
packagesWithDeps page = liftM (pageSlice . pkgsWithDeps . DB.toAscList) DB.readHackage
where
latestVersion = head . DB.toDescList . snd
extractBuildInfo gp = (packageDescription gp, extractExts gp, extractDeps gp)
extractExts gp = lib2exts (condLibrary gp) ++ exe2exts (condExecutables gp)
extractDeps gp = lib2deps (condLibrary gp) ++ exe2deps (condExecutables gp)
exe2deps = concatMap (condTreeConstraints . snd)
lib2deps = concatMap condTreeConstraints . maybeToList
lib2exts = buildInfo2exts . lib2buildInfo
exe2exts = buildInfo2exts . exe2buildInfo
buildInfo2exts = concatMap (\b -> defaultExtensions b
++ otherExtensions b
++ oldExtensions b)
exe2buildInfo = map (buildInfo . condTreeData . snd)
lib2buildInfo = map (libBuildInfo . condTreeData) . maybeToList
pkgsWithDeps = map (extractBuildInfo . snd . latestVersion)
slice from to xs = take (to - from + 1) (drop from xs)
pageSlice = slice (page * pageSize) (page * pageSize + pageSize)
pageSize = 1000
| diogob/haskell-tools | src/HaskellTools/Hackage.hs | bsd-3-clause | 2,184 | 0 | 12 | 543 | 642 | 333 | 309 | 44 | 1 |
import Test.QuickCheck
import Test.QuickCheck.Test (isSuccess)
import Control.Monad
import System.Exit (exitFailure)
import PredicateSpec
import FOLSpec
main :: IO ()
main = do
let
tests =
[ quickCheckResult prop_predicate_eq_itself
, quickCheckResult prop_predicate_cmp_itself
, quickCheckResult prop_predicate_ord
-- , quickCheckResult prop_parsing_back -- Will be reinstated once I figure out how to handle top/bot printing.
, quickCheckResult prop_fol_ord
, quickCheckResult prop_fol_self_eq
]
success <- fmap (all isSuccess) . sequence $ tests
unless success exitFailure
| PhDP/Sphinx-AI | tests/Properties.hs | mit | 628 | 0 | 12 | 121 | 132 | 66 | 66 | 17 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
{- |
Module : Yi.CompletionTree
License : GPL-2
Maintainer : yi-devel@googlegroups.com
Stability : experimental
Portability : portable
Little helper for completion interfaces.
Intended to be imported qualified:
>import qualified Yi.CompletionTree as CT
-}
module Yi.CompletionTree (
-- * CompletionTree type
CompletionTree (CompletionTree),
-- * Lists
fromList, toList,
-- * Modification
complete, update,
-- * Debugging
pretty,
-- ** Lens
unCompletionTree
) where
import Control.Arrow (first)
import Data.Function (on)
import Data.List (partition, maximumBy, intercalate)
import qualified Data.Map.Strict as M
import Data.Map.Strict (Map)
import Data.Maybe (isJust, fromJust, listToMaybe, catMaybes)
import qualified Data.ListLike as LL
import Data.ListLike (ListLike)
import Lens.Micro.Platform (over, Lens', _2, (.~), (&))
import Data.Binary (Binary)
-- | A CompletionTree is a map of partial completions.
--
-- Example:
--
-- fromList ["put","putStr","putStrLn","print","abc"]
--
-- Gives the following tree:
--
-- / \
-- "p" "abc"
-- / \
-- "ut" "rint"
-- / \
-- "Str" ""
-- / \
-- "Ln" ""
--
-- (The empty strings are needed to denote the end of a word)
-- (A CompletionTree is not limited to a binary tree)
newtype CompletionTree a = CompletionTree {_unCompletionTree :: (Map a (CompletionTree a))}
deriving (Monoid, Eq, Binary)
unCompletionTree :: Lens' (CompletionTree a) (Map a (CompletionTree a))
unCompletionTree f ct = (\unCompletionTree' -> ct {_unCompletionTree = unCompletionTree'}) <$>
f (_unCompletionTree ct)
instance (Ord a, Show a, ListLike a i) => Show (CompletionTree a) where
show ct = "fromList " ++ show (toList ct)
-- | This function converts a list of completable elements to a CompletionTree
-- It finds elements that share a common prefix and groups them.
--
-- prop> fromList . toList = id
fromList :: (Ord a, ListLike a i, Eq i) => [a] -> CompletionTree a
fromList [] = mempty
fromList (x:xs)
| x == mempty = over unCompletionTree (M.insert mempty mempty) (fromList xs)
| otherwise = case maximumBy' (compare `on` childrenIn xs) (tail $ LL.inits x) of
Nothing -> over unCompletionTree (M.insert x mempty) (fromList xs)
Just parent -> case first (x:) $ partition (parent `LL.isPrefixOf`) xs of
([_],rest) -> over unCompletionTree (M.insert parent mempty) $ fromList rest
(hasParent, rest) -> over unCompletionTree (M.insert parent (fromList $
map (fromJust . LL.stripPrefix parent) hasParent)) $ fromList rest
-- A parent is the prefix and the children are the items with the parent as prefix
where childrenIn list parent = length $ filter (parent `LL.isPrefixOf`) list
-- | The largest element of a non-empty structure with respect to the
-- given comparison function, Nothing if there are multiple 'largest' elements.
maximumBy' :: Eq a => (a -> a -> Ordering) -> [a] -> Maybe a
maximumBy' cmp l | atleast 2 (== max') l = Nothing
| otherwise = Just max'
where max' = maximumBy cmp l
-- This short-circuits if the condition is met n times before the end of the list.
atleast :: Int -> (a -> Bool) -> [a] -> Bool
atleast 0 _ _ = True
atleast _ _ [] = False
atleast n cmp' (x:xs) | cmp' x = atleast (n - 1) cmp' xs
| otherwise = atleast n cmp' xs
-- | Complete as much as possible without guessing.
--
-- Examples:
--
-- >>> complete $ fromList ["put","putStrLn","putStr"]
-- ("put", fromList ["","Str","StrLn"])
--
-- >>> complete $ fromList ["put","putStr","putStrLn","abc"]
-- ("", fromList ["put","putStr","putStrLn","abc"])
complete :: (Eq i, Ord a, ListLike a i) => CompletionTree a -> (a, CompletionTree a)
complete (CompletionTree ct)
| M.size ct == 1 = if snd (M.elemAt 0 ct) == mempty
then M.elemAt 0 ct & _2 .~ fromList [mempty]
else M.elemAt 0 ct
| otherwise = (mempty,CompletionTree ct)
-- | Update the CompletionTree with new information.
-- An empty list means that there is no completion left.
-- A [mempty] means that the end of a word is reached.
--
-- Examples:
--
-- >>> update (fromList ["put","putStr"]) "p"
-- fromList ["ut","utStr"]
--
-- >>> update (fromList ["put","putStr"]) "put"
-- fromList ["","Str"]
--
-- >>> update (fromList ["put","putStr"]) "putS"
-- fromList ["tr"]
--
-- >>> update (fromList ["put"]) "find"
-- fromList []
--
-- >>> update (fromList ["put"]) "put"
-- fromList [""]
update :: (Ord a, ListLike a i, Eq i) => CompletionTree a -> a -> CompletionTree a
update (CompletionTree ct) p
-- p is empty, this case just doesn't make sense:
| mempty == p = error "Can't update a CompletionTree with a mempty"
-- p is a key in the map ct that doesn't have children:
-- (This means the end of a word is reached)
| isJust one && mempty == fromJust one = CompletionTree $ M.singleton mempty mempty
-- p is a key in the map ct with children:
| isJust one = fromJust one
-- a substring of p is a key in ct:
| isJust remaining = uncurry update $ fromJust remaining
-- p is a substring of a key in ct:
| otherwise = CompletionTree $ M.mapKeys fromJust
$ M.filterWithKey (const . isJust)
$ M.mapKeys (LL.stripPrefix p) ct
where
one = M.lookup p ct
remaining = listToMaybe . catMaybes $
map (\p' -> (,fromJust $ LL.stripPrefix p' p) <$> M.lookup p' ct) (tail $ LL.inits p)
-- | Converts a CompletionTree to a list of completions.
--
-- prop> toList . fromList = sort . nub
--
-- Examples:
--
-- >>> toList mempty
-- []
--
-- >>> toList (fromList ["a"])
-- ["a"]
--
-- >>> toList (fromList ["a","a","a"])
-- ["a"]
--
-- >>> toList (fromList ["z","x","y"])
-- ["x","y","z"]
toList :: (Ord a, ListLike a i) => CompletionTree a -> [a]
toList ct
| mempty == ct = []
| otherwise = toList' ct
where
toList' (CompletionTree ct')
| M.null ct' = [mempty]
| otherwise = concat $ M.elems $ M.mapWithKey (\k v -> map (k `LL.append`) $ toList' v) ct'
-- TODO: make this function display a tree and rename to showTree
-- | For debugging purposes.
--
-- Example:
--
-- >>> putStrLn $ pretty $ fromList ["put", "putStr", "putStrLn"]
-- ["put"[""|"Str"[""|"Ln"]]]
pretty :: Show a => CompletionTree a -> String
pretty (CompletionTree ct)
| M.null ct = ""
| otherwise = "[" ++ intercalate "|" (M.elems (M.mapWithKey (\k v -> shows k (pretty v)) ct)) ++ "]"
| noughtmare/yi | yi-core/src/Yi/CompletionTree.hs | gpl-2.0 | 6,841 | 0 | 22 | 1,640 | 1,626 | 881 | 745 | 79 | 3 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>SMOS Toolbox Help</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">JavaHelpSearch</data>
</view>
</helpset>
| bcdev/smos-box | smos-visat/src/main/resources/doc/help/smosbox.hs | gpl-3.0 | 773 | 77 | 18 | 165 | 291 | 143 | 148 | -1 | -1 |
#!/usr/bin/env stack
{- stack runghc --verbosity info --package pandoc-types -}
import Text.Pandoc.Builder
import Text.Pandoc.JSON
main :: IO ()
main = toJSONFilter dropWebBlocks
dropWebBlocks :: Block -> Block
dropWebBlocks (Div ("",["web"],[]) _) = Plain []
dropWebBlocks x = x
| ony/hledger | tools/pandoc-drop-web-blocks.hs | gpl-3.0 | 283 | 1 | 11 | 41 | 98 | 49 | 49 | 7 | 1 |
-- Manually write instances.
{-# LANGUAGE CPP #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Twitter.Manual
(
Metadata(..)
, Geo(..)
, Story(..)
, Result(..)
) where
import Prelude ()
import Prelude.Compat
import Control.Applicative
import Data.Monoid ((<>))
import Prelude hiding (id)
import Twitter
#ifndef HAS_BOTH_AESON_AND_BENCHMARKS
import Data.Aeson hiding (Result)
#else
import "aeson" Data.Aeson hiding (Result)
import qualified "aeson-benchmarks" Data.Aeson as B
#endif
instance ToJSON Metadata where
toJSON Metadata{..} = object [
"result_type" .= result_type
]
toEncoding Metadata{..} = pairs $
"result_type" .= result_type
instance FromJSON Metadata where
parseJSON (Object v) = Metadata <$> v .: "result_type"
parseJSON _ = empty
instance ToJSON Geo where
toJSON Geo{..} = object [
"type_" .= type_
, "coordinates" .= coordinates
]
toEncoding Geo{..} = pairs $
"type_" .= type_
<> "coordinates" .= coordinates
instance FromJSON Geo where
parseJSON (Object v) = Geo <$>
v .: "type_"
<*> v .: "coordinates"
parseJSON _ = empty
instance ToJSON Story where
toJSON Story{..} = object [
"from_user_id_str" .= from_user_id_str
, "profile_image_url" .= profile_image_url
, "created_at" .= created_at
, "from_user" .= from_user
, "id_str" .= id_str
, "metadata" .= metadata
, "to_user_id" .= to_user_id
, "text" .= text
, "id" .= id_
, "from_user_id" .= from_user_id
, "geo" .= geo
, "iso_language_code" .= iso_language_code
, "to_user_id_str" .= to_user_id_str
, "source" .= source
]
toEncoding Story{..} = pairs $
"from_user_id_str" .= from_user_id_str
<> "profile_image_url" .= profile_image_url
<> "created_at" .= created_at
<> "from_user" .= from_user
<> "id_str" .= id_str
<> "metadata" .= metadata
<> "to_user_id" .= to_user_id
<> "text" .= text
<> "id" .= id_
<> "from_user_id" .= from_user_id
<> "geo" .= geo
<> "iso_language_code" .= iso_language_code
<> "to_user_id_str" .= to_user_id_str
<> "source" .= source
instance FromJSON Story where
parseJSON (Object v) = Story <$>
v .: "from_user_id_str"
<*> v .: "profile_image_url"
<*> v .: "created_at"
<*> v .: "from_user"
<*> v .: "id_str"
<*> v .: "metadata"
<*> v .: "to_user_id"
<*> v .: "text"
<*> v .: "id"
<*> v .: "from_user_id"
<*> v .: "geo"
<*> v .: "iso_language_code"
<*> v .: "to_user_id_str"
<*> v .: "source"
parseJSON _ = empty
instance ToJSON Result where
toJSON Result{..} = object [
"results" .= results
, "max_id" .= max_id
, "since_id" .= since_id
, "refresh_url" .= refresh_url
, "next_page" .= next_page
, "results_per_page" .= results_per_page
, "page" .= page
, "completed_in" .= completed_in
, "since_id_str" .= since_id_str
, "max_id_str" .= max_id_str
, "query" .= query
]
toEncoding Result{..} = pairs $
"results" .= results
<> "max_id" .= max_id
<> "since_id" .= since_id
<> "refresh_url" .= refresh_url
<> "next_page" .= next_page
<> "results_per_page" .= results_per_page
<> "page" .= page
<> "completed_in" .= completed_in
<> "since_id_str" .= since_id_str
<> "max_id_str" .= max_id_str
<> "query" .= query
instance FromJSON Result where
parseJSON (Object v) = Result <$>
v .: "results"
<*> v .: "max_id"
<*> v .: "since_id"
<*> v .: "refresh_url"
<*> v .: "next_page"
<*> v .: "results_per_page"
<*> v .: "page"
<*> v .: "completed_in"
<*> v .: "since_id_str"
<*> v .: "max_id_str"
<*> v .: "query"
parseJSON _ = empty
#ifdef HAS_BOTH_AESON_AND_BENCHMARKS
instance B.ToJSON Metadata where
toJSON Metadata{..} = B.object [
"result_type" B..= result_type
]
toEncoding Metadata{..} = B.pairs $
"result_type" B..= result_type
instance B.FromJSON Metadata where
parseJSON (B.Object v) = Metadata <$> v B..: "result_type"
parseJSON _ = empty
instance B.ToJSON Geo where
toJSON Geo{..} = B.object [
"type_" B..= type_
, "coordinates" B..= coordinates
]
toEncoding Geo{..} = B.pairs $
"type_" B..= type_
<> "coordinates" B..= coordinates
instance B.FromJSON Geo where
parseJSON (B.Object v) = Geo <$>
v B..: "type_"
<*> v B..: "coordinates"
parseJSON _ = empty
instance B.ToJSON Story where
toJSON Story{..} = B.object [
"from_user_id_str" B..= from_user_id_str
, "profile_image_url" B..= profile_image_url
, "created_at" B..= created_at
, "from_user" B..= from_user
, "id_str" B..= id_str
, "metadata" B..= metadata
, "to_user_id" B..= to_user_id
, "text" B..= text
, "id" B..= id_
, "from_user_id" B..= from_user_id
, "geo" B..= geo
, "iso_language_code" B..= iso_language_code
, "to_user_id_str" B..= to_user_id_str
, "source" B..= source
]
toEncoding Story{..} = B.pairs $
"from_user_id_str" B..= from_user_id_str
<> "profile_image_url" B..= profile_image_url
<> "created_at" B..= created_at
<> "from_user" B..= from_user
<> "id_str" B..= id_str
<> "metadata" B..= metadata
<> "to_user_id" B..= to_user_id
<> "text" B..= text
<> "id" B..= id_
<> "from_user_id" B..= from_user_id
<> "geo" B..= geo
<> "iso_language_code" B..= iso_language_code
<> "to_user_id_str" B..= to_user_id_str
<> "source" B..= source
instance B.FromJSON Story where
parseJSON (B.Object v) = Story <$>
v B..: "from_user_id_str"
<*> v B..: "profile_image_url"
<*> v B..: "created_at"
<*> v B..: "from_user"
<*> v B..: "id_str"
<*> v B..: "metadata"
<*> v B..: "to_user_id"
<*> v B..: "text"
<*> v B..: "id"
<*> v B..: "from_user_id"
<*> v B..: "geo"
<*> v B..: "iso_language_code"
<*> v B..: "to_user_id_str"
<*> v B..: "source"
parseJSON _ = empty
instance B.ToJSON Result where
toJSON Result{..} = B.object [
"results" B..= results
, "max_id" B..= max_id
, "since_id" B..= since_id
, "refresh_url" B..= refresh_url
, "next_page" B..= next_page
, "results_per_page" B..= results_per_page
, "page" B..= page
, "completed_in" B..= completed_in
, "since_id_str" B..= since_id_str
, "max_id_str" B..= max_id_str
, "query" B..= query
]
toEncoding Result{..} = B.pairs $
"results" B..= results
<> "max_id" B..= max_id
<> "since_id" B..= since_id
<> "refresh_url" B..= refresh_url
<> "next_page" B..= next_page
<> "results_per_page" B..= results_per_page
<> "page" B..= page
<> "completed_in" B..= completed_in
<> "since_id_str" B..= since_id_str
<> "max_id_str" B..= max_id_str
<> "query" B..= query
instance B.FromJSON Result where
parseJSON (B.Object v) = Result <$>
v B..: "results"
<*> v B..: "max_id"
<*> v B..: "since_id"
<*> v B..: "refresh_url"
<*> v B..: "next_page"
<*> v B..: "results_per_page"
<*> v B..: "page"
<*> v B..: "completed_in"
<*> v B..: "since_id_str"
<*> v B..: "max_id_str"
<*> v B..: "query"
parseJSON _ = empty
#endif
| tolysz/prepare-ghcjs | spec-lts8/aeson/examples/Twitter/Manual.hs | bsd-3-clause | 8,162 | 0 | 34 | 2,608 | 2,146 | 1,117 | 1,029 | 125 | 0 |
{-# OPTIONS_GHC -Wall #-}
module Type.Type where
import Control.Monad.State (StateT, liftIO)
import qualified Control.Monad.State as State
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Traversable as Traverse (traverse)
import qualified Data.UnionFind.IO as UF
import qualified AST.Type as T
import qualified AST.Variable as Var
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Type as Error
import qualified Reporting.Region as R
-- CONCRETE TYPES
type Type =
TermN Variable
type Variable =
UF.Point Descriptor
type TypeConstraint =
Constraint Type Variable
type TypeScheme =
Scheme Type Variable
-- TYPE PRIMITIVES
data Term1 a
= App1 a a
| Fun1 a a
| EmptyRecord1
| Record1 (Map.Map String a) a
data TermN a
= PlaceHolder String
| AliasN Var.Canonical [(String, TermN a)] (TermN a)
| VarN a
| TermN (Term1 (TermN a))
record :: Map.Map String (TermN a) -> TermN a -> TermN a
record fs rec =
TermN (Record1 fs rec)
-- DESCRIPTORS
data Descriptor = Descriptor
{ _content :: Content
, _rank :: Int
, _mark :: Int
, _copy :: Maybe Variable
}
data Content
= Structure (Term1 Variable)
| Atom Var.Canonical
| Var Flex (Maybe Super) (Maybe String)
| Alias Var.Canonical [(String,Variable)] Variable
| Error
data Flex
= Rigid
| Flex
data Super
= Number
| Comparable
| Appendable
| CompAppend
deriving (Eq)
noRank :: Int
noRank = -1
outermostRank :: Int
outermostRank = 0
noMark :: Int
noMark = 0
initialMark :: Int
initialMark = 1
occursMark :: Int
occursMark =
-1
getVarNamesMark :: Int
getVarNamesMark =
-2
-- CONSTRAINTS
data Constraint a b
= CTrue
| CSaveEnv
| CEqual Error.Hint R.Region a a
| CAnd [Constraint a b]
| CLet [Scheme a b] (Constraint a b)
| CInstance R.Region SchemeName a
type SchemeName = String
data Scheme a b = Scheme
{ _rigidQuantifiers :: [b]
, _flexibleQuantifiers :: [b]
, _constraint :: Constraint a b
, _header :: Map.Map String (A.Located a)
}
-- TYPE HELPERS
infixr 9 ==>
(==>) :: Type -> Type -> Type
(==>) a b =
TermN (Fun1 a b)
(<|) :: TermN a -> TermN a -> TermN a
(<|) f a =
TermN (App1 f a)
-- VARIABLE CREATION
mkDescriptor :: Content -> Descriptor
mkDescriptor content =
Descriptor
{ _content = content
, _rank = noRank
, _mark = noMark
, _copy = Nothing
}
mkAtom :: Var.Canonical -> IO Variable
mkAtom name =
UF.fresh $ mkDescriptor (Atom name)
mkVar :: Maybe Super -> IO Variable
mkVar maybeSuper =
UF.fresh $ mkDescriptor (Var Flex maybeSuper Nothing)
mkNamedVar :: String -> IO Variable
mkNamedVar name =
UF.fresh $ mkDescriptor (Var Flex (toSuper name) Nothing)
mkRigid :: String -> IO Variable
mkRigid name =
UF.fresh $ mkDescriptor (Var Rigid (toSuper name) (Just name))
toSuper :: String -> Maybe Super
toSuper name =
if List.isPrefixOf "number" name then
Just Number
else if List.isPrefixOf "comparable" name then
Just Comparable
else if List.isPrefixOf "appendable" name then
Just Appendable
else
Nothing
-- CONSTRAINT HELPERS
monoscheme :: Map.Map String (A.Located a) -> Scheme a b
monoscheme headers =
Scheme [] [] CTrue headers
infixl 8 /\
(/\) :: Constraint a b -> Constraint a b -> Constraint a b
(/\) c1 c2 =
case (c1, c2) of
(CTrue, _) -> c2
(_, CTrue) -> c1
_ -> CAnd [c1,c2]
-- ex qs constraint == exists qs. constraint
ex :: [Variable] -> TypeConstraint -> TypeConstraint
ex fqs constraint =
CLet [Scheme [] fqs constraint Map.empty] CTrue
-- fl qs constraint == forall qs. constraint
fl :: [Variable] -> TypeConstraint -> TypeConstraint
fl rqs constraint =
CLet [Scheme rqs [] constraint Map.empty] CTrue
exists :: (Type -> IO TypeConstraint) -> IO TypeConstraint
exists f =
do v <- mkVar Nothing
ex [v] <$> f (VarN v)
existsNumber :: (Type -> IO TypeConstraint) -> IO TypeConstraint
existsNumber f =
do v <- mkVar (Just Number)
ex [v] <$> f (VarN v)
-- CONVERT TO SOURCE TYPES
-- TODO: Attach resulting type to the descriptor so that you
-- never have to do extra work, particularly nice for aliased types
toSrcType :: Variable -> IO T.Canonical
toSrcType variable =
do usedNames <- getVarNames variable
State.evalStateT (variableToSrcType variable) (makeNameState usedNames)
variableToSrcType :: Variable -> StateT NameState IO T.Canonical
variableToSrcType variable =
do descriptor <- liftIO $ UF.descriptor variable
let mark = _mark descriptor
if mark == occursMark
then
return (T.Var "∞")
else
do liftIO $ UF.modifyDescriptor variable (\desc -> desc { _mark = occursMark })
srcType <- contentToSrcType variable (_content descriptor)
liftIO $ UF.modifyDescriptor variable (\desc -> desc { _mark = mark })
return srcType
contentToSrcType :: Variable -> Content -> StateT NameState IO T.Canonical
contentToSrcType variable content =
case content of
Structure term ->
termToSrcType term
Atom name ->
return (T.Type name)
Var _ _ (Just name) ->
return (T.Var name)
Var flex maybeSuper Nothing ->
do freshName <- getFreshName maybeSuper
liftIO $ UF.modifyDescriptor variable $ \desc ->
desc { _content = Var flex maybeSuper (Just freshName) }
return (T.Var freshName)
Alias name args realVariable ->
do srcArgs <- mapM (\(arg,tvar) -> (,) arg <$> variableToSrcType tvar) args
srcType <- variableToSrcType realVariable
return (T.Aliased name srcArgs (T.Filled srcType))
Error ->
return (T.Var "?")
termToSrcType :: Term1 Variable -> StateT NameState IO T.Canonical
termToSrcType term =
case term of
App1 func arg ->
do srcFunc <- variableToSrcType func
srcArg <- variableToSrcType arg
case srcFunc of
T.App f args ->
return (T.App f (args ++ [srcArg]))
_ ->
return (T.App srcFunc [srcArg])
Fun1 a b ->
T.Lambda
<$> variableToSrcType a
<*> variableToSrcType b
EmptyRecord1 ->
return $ T.Record [] Nothing
Record1 fields extension ->
do srcFields <- Map.toList <$> Traverse.traverse variableToSrcType fields
srcExt <- T.iteratedDealias <$> variableToSrcType extension
return $
case srcExt of
T.Record subFields subExt ->
T.Record (subFields ++ srcFields) subExt
T.Var _ ->
T.Record srcFields (Just srcExt)
_ ->
error "Used toSrcType on a type that is not well-formed"
-- MANAGE FRESH VARIABLE NAMES
data NameState = NameState
{ _freeNames :: [String]
, _numberPrimes :: Int
, _comparablePrimes :: Int
, _appendablePrimes :: Int
, _compAppendPrimes :: Int
}
makeNameState :: Set.Set String -> NameState
makeNameState usedNames =
let
makeName suffix =
map (:suffix) ['a'..'z']
allNames =
concatMap makeName ("" : map show [ (1 :: Int) .. ])
freeNames =
filter (\name -> Set.notMember name usedNames) allNames
in
NameState freeNames 0 0 0 0
getFreshName :: (Monad m) => Maybe Super -> StateT NameState m String
getFreshName maybeSuper =
case maybeSuper of
Nothing ->
do names <- State.gets _freeNames
State.modify (\state -> state { _freeNames = tail names })
return (head names)
Just Number ->
do primes <- State.gets _numberPrimes
State.modify (\state -> state { _numberPrimes = primes + 1 })
return ("number" ++ replicate primes '\'')
Just Comparable ->
do primes <- State.gets _comparablePrimes
State.modify (\state -> state { _comparablePrimes = primes + 1 })
return ("comparable" ++ replicate primes '\'')
Just Appendable ->
do primes <- State.gets _appendablePrimes
State.modify (\state -> state { _appendablePrimes = primes + 1 })
return ("appendable" ++ replicate primes '\'')
Just CompAppend ->
do primes <- State.gets _compAppendPrimes
State.modify (\state -> state { _compAppendPrimes = primes + 1 })
return ("compappend" ++ replicate primes '\'')
-- GET ALL VARIABLE NAMES
getVarNames :: Variable -> IO (Set.Set String)
getVarNames var =
do desc <- UF.descriptor var
if _mark desc == getVarNamesMark
then
return Set.empty
else
do UF.setDescriptor var (desc { _mark = getVarNamesMark })
getVarNamesHelp (_content desc)
getVarNamesHelp :: Content -> IO (Set.Set String)
getVarNamesHelp content =
case content of
Var _ _ (Just name) ->
return (Set.singleton name)
Var _ _ Nothing ->
return Set.empty
Structure term ->
getVarNamesTerm term
Alias _name args realVar ->
do let argSet = Set.fromList (map fst args)
realSet <- getVarNames realVar
sets <- mapM (getVarNames . snd) args
return (Set.unions (realSet : argSet : sets))
Atom _ ->
return Set.empty
Error ->
return Set.empty
getVarNamesTerm :: Term1 Variable -> IO (Set.Set String)
getVarNamesTerm term =
let go = getVarNames in
case term of
App1 a b ->
Set.union <$> go a <*> go b
Fun1 a b ->
Set.union <$> go a <*> go b
EmptyRecord1 ->
return Set.empty
Record1 fields extension ->
do fieldVars <- Set.unions <$> mapM go (Map.elems fields)
Set.union fieldVars <$> go extension
| laszlopandy/elm-compiler | src/Type/Type.hs | bsd-3-clause | 9,968 | 0 | 18 | 2,838 | 3,203 | 1,637 | 1,566 | 280 | 7 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Register
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module deals with registering and unregistering packages. There are a
-- couple ways it can do this, one is to do it directly. Another is to generate
-- a script that can be run later to do it. The idea here being that the user
-- is shielded from the details of what command to use for package registration
-- for a particular compiler. In practice this aspect was not especially
-- popular so we also provide a way to simply generate the package registration
-- file which then must be manually passed to @ghc-pkg@. It is possible to
-- generate registration information for where the package is to be installed,
-- or alternatively to register the package in place in the build tree. The
-- latter is occasionally handy, and will become more important when we try to
-- build multi-package systems.
--
-- This module does not delegate anything to the per-compiler modules but just
-- mixes it all in in this module, which is rather unsatisfactory. The script
-- generation and the unregister feature are not well used or tested.
module Distribution.Simple.Register (
register,
unregister,
internalPackageDBPath,
initPackageDB,
doesPackageDBExist,
createPackageDB,
deletePackageDB,
abiHash,
invokeHcPkg,
registerPackage,
HcPkg.RegisterOptions(..),
HcPkg.defaultRegisterOptions,
generateRegistrationInfo,
inplaceInstalledPackageInfo,
absoluteInstalledPackageInfo,
generalInstalledPackageInfo,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.TargetInfo
import Distribution.Types.LocalBuildInfo
import Distribution.Types.ComponentLocalBuildInfo
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.BuildTarget
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
import qualified Distribution.Simple.PackageIndex as Index
import Distribution.Backpack.DescribeUnitId
import Distribution.Simple.Compiler
import Distribution.Simple.Program
import Distribution.Simple.Program.Script
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Setup
import Distribution.PackageDescription
import Distribution.Package
import qualified Distribution.InstalledPackageInfo as IPI
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import Distribution.Simple.Utils
import Distribution.Utils.MapAccum
import Distribution.System
import Distribution.Text
import Distribution.Types.ComponentName
import Distribution.Verbosity as Verbosity
import Distribution.Version
import Distribution.Compat.Graph (IsNode(nodeKey))
import System.FilePath ((</>), (<.>), isAbsolute)
import System.Directory
import Data.List (partition)
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
-- -----------------------------------------------------------------------------
-- Registration
register :: PackageDescription -> LocalBuildInfo
-> RegisterFlags -- ^Install in the user's database?; verbose
-> IO ()
register pkg_descr lbi0 flags =
-- Duncan originally asked for us to not register/install files
-- when there was no public library. But with per-component
-- configure, we legitimately need to install internal libraries
-- so that we can get them. So just unconditionally install.
doRegister
where
doRegister = do
targets <- readTargetInfos verbosity pkg_descr lbi0 (regArgs flags)
-- It's important to register in build order, because ghc-pkg
-- will complain if a dependency is not registered.
let componentsToRegister
= neededTargetsInBuildOrder' pkg_descr lbi0 (map nodeKey targets)
(_, ipi_mbs) <-
mapAccumM `flip` installedPkgs lbi0 `flip` componentsToRegister $ \index tgt ->
case targetComponent tgt of
CLib lib -> do
let clbi = targetCLBI tgt
lbi = lbi0 { installedPkgs = index }
ipi <- generateOne pkg_descr lib lbi clbi flags
return (Index.insert ipi index, Just ipi)
_ -> return (index, Nothing)
registerAll pkg_descr lbi0 flags (catMaybes ipi_mbs)
where
verbosity = fromFlag (regVerbosity flags)
generateOne :: PackageDescription -> Library -> LocalBuildInfo -> ComponentLocalBuildInfo
-> RegisterFlags
-> IO InstalledPackageInfo
generateOne pkg lib lbi clbi regFlags
= do
absPackageDBs <- absolutePackageDBPaths packageDbs
installedPkgInfo <- generateRegistrationInfo
verbosity pkg lib lbi clbi inplace reloc distPref
(registrationPackageDB absPackageDBs)
info verbosity (IPI.showInstalledPackageInfo installedPkgInfo)
return installedPkgInfo
where
inplace = fromFlag (regInPlace regFlags)
reloc = relocatable lbi
-- FIXME: there's really no guarantee this will work.
-- registering into a totally different db stack can
-- fail if dependencies cannot be satisfied.
packageDbs = nub $ withPackageDB lbi
++ maybeToList (flagToMaybe (regPackageDB regFlags))
distPref = fromFlag (regDistPref regFlags)
verbosity = fromFlag (regVerbosity regFlags)
registerAll :: PackageDescription -> LocalBuildInfo -> RegisterFlags
-> [InstalledPackageInfo]
-> IO ()
registerAll pkg lbi regFlags ipis
= do
when (fromFlag (regPrintId regFlags)) $ do
for_ ipis $ \installedPkgInfo ->
-- Only print the public library's IPI
when (packageId installedPkgInfo == packageId pkg
&& IPI.sourceLibName installedPkgInfo == Nothing) $
putStrLn (display (IPI.installedUnitId installedPkgInfo))
-- Three different modes:
case () of
_ | modeGenerateRegFile -> writeRegistrationFileOrDirectory
| modeGenerateRegScript -> writeRegisterScript
| otherwise -> do
for_ ipis $ \ipi -> do
setupMessage' verbosity "Registering" (packageId pkg)
(libraryComponentName (IPI.sourceLibName ipi))
(Just (IPI.instantiatedWith ipi))
registerPackage verbosity (compiler lbi) (withPrograms lbi)
packageDbs ipi HcPkg.defaultRegisterOptions
where
modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags))
regFile = fromMaybe (display (packageId pkg) <.> "conf")
(fromFlag (regGenPkgConf regFlags))
modeGenerateRegScript = fromFlag (regGenScript regFlags)
-- FIXME: there's really no guarantee this will work.
-- registering into a totally different db stack can
-- fail if dependencies cannot be satisfied.
packageDbs = nub $ withPackageDB lbi
++ maybeToList (flagToMaybe (regPackageDB regFlags))
verbosity = fromFlag (regVerbosity regFlags)
writeRegistrationFileOrDirectory = do
-- Handles overwriting both directory and file
deletePackageDB regFile
case ipis of
[installedPkgInfo] -> do
info verbosity ("Creating package registration file: " ++ regFile)
writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo)
_ -> do
info verbosity ("Creating package registration directory: " ++ regFile)
createDirectory regFile
let num_ipis = length ipis
lpad m xs = replicate (m - length ys) '0' ++ ys
where ys = take m xs
number i = lpad (length (show num_ipis)) (show i)
for_ (zip ([1..] :: [Int]) ipis) $ \(i, installedPkgInfo) ->
writeUTF8File (regFile </> (number i ++ "-" ++ display (IPI.installedUnitId installedPkgInfo)))
(IPI.showInstalledPackageInfo installedPkgInfo)
writeRegisterScript =
case compilerFlavor (compiler lbi) of
JHC -> notice verbosity "Registration scripts not needed for jhc"
UHC -> notice verbosity "Registration scripts not needed for uhc"
_ -> withHcPkg verbosity
"Registration scripts are not implemented for this compiler"
(compiler lbi) (withPrograms lbi)
(writeHcPkgRegisterScript verbosity ipis packageDbs)
generateRegistrationInfo :: Verbosity
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Bool
-> Bool
-> FilePath
-> PackageDB
-> IO InstalledPackageInfo
generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do
--TODO: eliminate pwd!
pwd <- getCurrentDirectory
installedPkgInfo <-
if inplace
-- NB: With an inplace installation, the user may run './Setup
-- build' to update the library files, without reregistering.
-- In this case, it is critical that the ABI hash not flip.
then return (inplaceInstalledPackageInfo pwd distPref
pkg (mkAbiHash "inplace") lib lbi clbi)
else do
abi_hash <- abiHash verbosity pkg distPref lbi lib clbi
if reloc
then relocRegistrationInfo verbosity
pkg lib lbi clbi abi_hash packageDb
else return (absoluteInstalledPackageInfo
pkg abi_hash lib lbi clbi)
return installedPkgInfo
-- | Compute the 'AbiHash' of a library that we built inplace.
abiHash :: Verbosity
-> PackageDescription
-> FilePath
-> LocalBuildInfo
-> Library
-> ComponentLocalBuildInfo
-> IO AbiHash
abiHash verbosity pkg distPref lbi lib clbi =
case compilerFlavor comp of
GHC | compilerVersion comp >= mkVersion [6,11] -> do
fmap mkAbiHash $ GHC.libAbiHash verbosity pkg lbi' lib clbi
GHCJS -> do
fmap mkAbiHash $ GHCJS.libAbiHash verbosity pkg lbi' lib clbi
_ -> return (mkAbiHash "")
where
comp = compiler lbi
lbi' = lbi {
withPackageDB = withPackageDB lbi
++ [SpecificPackageDB (internalPackageDBPath lbi distPref)]
}
relocRegistrationInfo :: Verbosity
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> AbiHash
-> PackageDB
-> IO InstalledPackageInfo
relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb =
case (compilerFlavor (compiler lbi)) of
GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb
return (relocatableInstalledPackageInfo
pkg abi_hash lib lbi clbi fs)
_ -> die' verbosity
"Distribution.Simple.Register.relocRegistrationInfo: \
\not implemented for this compiler"
initPackageDB :: Verbosity -> Compiler -> ProgramDb -> FilePath -> IO ()
initPackageDB verbosity comp progdb dbPath =
createPackageDB verbosity comp progdb False dbPath
-- | Create an empty package DB at the specified location.
createPackageDB :: Verbosity -> Compiler -> ProgramDb -> Bool
-> FilePath -> IO ()
createPackageDB verbosity comp progdb preferCompat dbPath =
case compilerFlavor comp of
GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath
GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath
LHC -> HcPkg.init (LHC.hcPkgInfo progdb) verbosity False dbPath
UHC -> return ()
HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath
_ -> die' verbosity $
"Distribution.Simple.Register.createPackageDB: "
++ "not implemented for this compiler"
doesPackageDBExist :: FilePath -> NoCallStackIO Bool
doesPackageDBExist dbPath = do
-- currently one impl for all compiler flavours, but could change if needed
dir_exists <- doesDirectoryExist dbPath
if dir_exists
then return True
else doesFileExist dbPath
deletePackageDB :: FilePath -> NoCallStackIO ()
deletePackageDB dbPath = do
-- currently one impl for all compiler flavours, but could change if needed
dir_exists <- doesDirectoryExist dbPath
if dir_exists
then removeDirectoryRecursive dbPath
else do file_exists <- doesFileExist dbPath
when file_exists $ removeFile dbPath
-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the
-- provided command-line arguments to it.
invokeHcPkg :: Verbosity -> Compiler -> ProgramDb -> PackageDBStack
-> [String] -> IO ()
invokeHcPkg verbosity comp progdb dbStack extraArgs =
withHcPkg verbosity "invokeHcPkg" comp progdb
(\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs)
withHcPkg :: Verbosity -> String -> Compiler -> ProgramDb
-> (HcPkg.HcPkgInfo -> IO a) -> IO a
withHcPkg verbosity name comp progdb f =
case compilerFlavor comp of
GHC -> f (GHC.hcPkgInfo progdb)
GHCJS -> f (GHCJS.hcPkgInfo progdb)
LHC -> f (LHC.hcPkgInfo progdb)
_ -> die' verbosity ("Distribution.Simple.Register." ++ name ++ ":\
\not implemented for this compiler")
registerPackage :: Verbosity
-> Compiler
-> ProgramDb
-> PackageDBStack
-> InstalledPackageInfo
-> HcPkg.RegisterOptions
-> IO ()
registerPackage verbosity comp progdb packageDbs installedPkgInfo registerOptions =
case compilerFlavor comp of
GHC -> GHC.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions
GHCJS -> GHCJS.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions
_ | HcPkg.registerMultiInstance registerOptions
-> die' verbosity "Registering multiple package instances is not yet supported for this compiler"
LHC -> LHC.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions
UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo
JHC -> notice verbosity "Registering for jhc (nothing to do)"
HaskellSuite {} ->
HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo
_ -> die' verbosity "Registering is not implemented for this compiler"
writeHcPkgRegisterScript :: Verbosity
-> [InstalledPackageInfo]
-> PackageDBStack
-> HcPkg.HcPkgInfo
-> IO ()
writeHcPkgRegisterScript verbosity ipis packageDbs hpi = do
let genScript installedPkgInfo =
let invocation = HcPkg.registerInvocation hpi Verbosity.normal
packageDbs installedPkgInfo
HcPkg.defaultRegisterOptions
in invocationAsSystemScript buildOS invocation
scripts = map genScript ipis
-- TODO: Do something more robust here
regScript = unlines scripts
info verbosity ("Creating package registration script: " ++ regScriptFileName)
writeUTF8File regScriptFileName regScript
setFileExecutable regScriptFileName
regScriptFileName :: FilePath
regScriptFileName = case buildOS of
Windows -> "register.bat"
_ -> "register.sh"
-- -----------------------------------------------------------------------------
-- Making the InstalledPackageInfo
-- | Construct 'InstalledPackageInfo' for a library in a package, given a set
-- of installation directories.
--
generalInstalledPackageInfo
:: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to
-- absolute paths.
-> PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstallDirs FilePath
-> InstalledPackageInfo
generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs =
IPI.InstalledPackageInfo {
IPI.sourcePackageId = packageId pkg,
IPI.installedUnitId = componentUnitId clbi,
IPI.installedComponentId_ = componentComponentId clbi,
IPI.instantiatedWith = componentInstantiatedWith clbi,
IPI.sourceLibName = libName lib,
IPI.compatPackageKey = componentCompatPackageKey clbi,
IPI.license = license pkg,
IPI.copyright = copyright pkg,
IPI.maintainer = maintainer pkg,
IPI.author = author pkg,
IPI.stability = stability pkg,
IPI.homepage = homepage pkg,
IPI.pkgUrl = pkgUrl pkg,
IPI.synopsis = synopsis pkg,
IPI.description = description pkg,
IPI.category = category pkg,
IPI.abiHash = abi_hash,
IPI.indefinite = componentIsIndefinite clbi,
IPI.exposed = libExposed lib,
IPI.exposedModules = componentExposedModules clbi,
IPI.hiddenModules = otherModules bi,
IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo,
IPI.importDirs = [ libdir installDirs | hasModules ],
IPI.libraryDirs = libdirs,
IPI.libraryDynDirs = dynlibdirs,
IPI.dataDir = datadir installDirs,
IPI.hsLibraries = if hasLibrary
then [getHSLibraryName (componentUnitId clbi)]
else [],
IPI.extraLibraries = extraLibs bi,
IPI.extraGHCiLibraries = extraGHCiLibs bi,
IPI.includeDirs = absinc ++ adjustRelIncDirs relinc,
IPI.includes = includes bi,
IPI.depends = depends,
IPI.abiDepends = abi_depends,
IPI.ccOptions = [], -- Note. NOT ccOptions bi!
-- We don't want cc-options to be propagated
-- to C compilations in other packages.
IPI.ldOptions = ldOptions bi,
IPI.frameworks = frameworks bi,
IPI.frameworkDirs = extraFrameworkDirs bi,
IPI.haddockInterfaces = [haddockdir installDirs </> haddockName pkg],
IPI.haddockHTMLs = [htmldir installDirs],
IPI.pkgRoot = Nothing
}
where
bi = libBuildInfo lib
--TODO: unclear what the root cause of the
-- duplication is, but we nub it here for now:
depends = ordNub $ map fst (componentPackageDeps clbi)
abi_depends = map add_abi depends
add_abi uid = IPI.AbiDependency uid abi
where
abi = case Index.lookupUnitId (installedPkgs lbi) uid of
Nothing -> error $
"generalInstalledPackageInfo: missing IPI for " ++ display uid
Just ipi -> IPI.abiHash ipi
(absinc, relinc) = partition isAbsolute (includeDirs bi)
hasModules = not $ null (allLibModules lib clbi)
comp = compiler lbi
hasLibrary = (hasModules || not (null (cSources bi))
|| (not (null (jsSources bi)) &&
compilerFlavor comp == GHCJS))
&& not (componentIsIndefinite clbi)
(libdirs, dynlibdirs)
| not hasLibrary
= (extraLibDirs bi, [])
-- the dynamic-library-dirs defaults to the library-dirs if not specified,
-- so this works whether the dynamic-library-dirs field is supported or not
| libraryDynDirSupported comp
= (libdir installDirs : extraLibDirs bi,
dynlibdir installDirs : extraLibDirs bi)
| otherwise
= (libdir installDirs : dynlibdir installDirs : extraLibDirs bi, [])
-- the compiler doesn't understand the dynamic-library-dirs field so we
-- add the dyn directory to the "normal" list in the library-dirs field
-- | Construct 'InstalledPackageInfo' for a library that is in place in the
-- build tree.
--
-- This function knows about the layout of in place packages.
--
inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree
-> FilePath -- ^ location of the dist tree
-> PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi =
generalInstalledPackageInfo adjustRelativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
adjustRelativeIncludeDirs = map (inplaceDir </>)
libTargetDir = componentBuildDir lbi clbi
installDirs =
(absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest) {
libdir = inplaceDir </> libTargetDir,
dynlibdir = inplaceDir </> libTargetDir,
datadir = inplaceDir </> dataDir pkg,
docdir = inplaceDocdir,
htmldir = inplaceHtmldir,
haddockdir = inplaceHtmldir
}
inplaceDocdir = inplaceDir </> distPref </> "doc"
inplaceHtmldir = inplaceDocdir </> "html" </> display (packageName pkg)
-- | Construct 'InstalledPackageInfo' for the final install location of a
-- library package.
--
-- This function knows about the layout of installed packages.
--
absoluteInstalledPackageInfo :: PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi =
generalInstalledPackageInfo adjustReativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
-- For installed packages we install all include files into one dir,
-- whereas in the build tree they may live in multiple local dirs.
adjustReativeIncludeDirs _
| null (installIncludes bi) = []
| otherwise = [includedir installDirs]
bi = libBuildInfo lib
installDirs = absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest
relocatableInstalledPackageInfo :: PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> FilePath
-> InstalledPackageInfo
relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot =
generalInstalledPackageInfo adjustReativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
-- For installed packages we install all include files into one dir,
-- whereas in the build tree they may live in multiple local dirs.
adjustReativeIncludeDirs _
| null (installIncludes bi) = []
| otherwise = [includedir installDirs]
bi = libBuildInfo lib
installDirs = fmap (("${pkgroot}" </>) . shortRelativePath pkgroot)
$ absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest
-- -----------------------------------------------------------------------------
-- Unregistration
unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO ()
unregister pkg lbi regFlags = do
let pkgid = packageId pkg
genScript = fromFlag (regGenScript regFlags)
verbosity = fromFlag (regVerbosity regFlags)
packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi))
(regPackageDB regFlags)
unreg hpi =
let invocation = HcPkg.unregisterInvocation
hpi Verbosity.normal packageDb pkgid
in if genScript
then writeFileAtomic unregScriptFileName
(BS.Char8.pack $ invocationAsSystemScript buildOS invocation)
else runProgramInvocation verbosity invocation
setupMessage verbosity "Unregistering" pkgid
withHcPkg verbosity "unregistering is only implemented for GHC and GHCJS"
(compiler lbi) (withPrograms lbi) unreg
unregScriptFileName :: FilePath
unregScriptFileName = case buildOS of
Windows -> "unregister.bat"
_ -> "unregister.sh"
internalPackageDBPath :: LocalBuildInfo -> FilePath -> FilePath
internalPackageDBPath lbi distPref =
case compilerFlavor (compiler lbi) of
UHC -> UHC.inplacePackageDbPath lbi
_ -> distPref </> "package.conf.inplace"
| mydaum/cabal | Cabal/Distribution/Simple/Register.hs | bsd-3-clause | 25,393 | 0 | 24 | 7,195 | 4,667 | 2,415 | 2,252 | 435 | 8 |
module Main where
import Lib
main :: IO ()
main = return ()
| danstiner/cryptopals | set1/app/Main.hs | mit | 62 | 0 | 6 | 15 | 27 | 15 | 12 | 4 | 1 |
module TupleIn1 where
f :: (a, ([Int],c)) -> ([Int],c)
f (x, y@(b_1, b_2)) = y
f (x, y@([], m)) = y | kmate/HaRe | old/testing/subIntroPattern/TupleIn1_TokOut.hs | bsd-3-clause | 101 | 0 | 9 | 22 | 85 | 53 | 32 | 4 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
module HasBaseStruct where
import BaseSyntaxStruct
import SpecialNames
class HasBaseStruct r base | r->base where
base :: base -> r
class GetBaseStruct r base | r->base where
basestruct :: r -> Maybe base
--instance HasBaseStruct rec base => HasBaseStruct [rec] [base] where
-- base = map base
-- Decl building
hsTypeDecl sloc tp hstype = base $ HsTypeDecl sloc tp hstype
hsNewTypeDecl sloc c tp consdecl hsnames2
= base $ HsNewTypeDecl sloc c tp consdecl hsnames2
hsDataDecl sloc c tp condecls names2
= base $ HsDataDecl sloc c tp condecls names2
hsClassDecl sloc c typ fdeps decls = base $ HsClassDecl sloc c typ fdeps decls
hsInstDecl sloc optn c typ decls = base $ HsInstDecl sloc optn c typ decls
hsDefaultDecl sloc typ = base $ HsDefaultDecl sloc typ
hsTypeSig sloc hsnames c t = base $ HsTypeSig sloc hsnames c t
hsFunBind sloc hsmatches = base $ HsFunBind sloc hsmatches
hsPatBind sloc pat rhs decls = base $ HsPatBind sloc pat rhs decls
hsInfixDecl sloc fixity hsnames = base $ HsInfixDecl sloc fixity hsnames
hsPrimitiveTypeDecl sloc c name = base $ HsPrimitiveTypeDecl sloc c name
hsPrimitiveBind sloc name t = base $ HsPrimitiveBind sloc name t
hsId n = base $ HsId n
hsEVar name = base $ HsId $ HsVar name
hsECon name = base $ HsId $ HsCon name
hsLit sloc lit = base $ HsLit sloc lit
hsInfixApp e1 op e2 = base $ HsInfixApp e1 op e2
hsApp e1 e2 = base $ HsApp e1 e2
hsNegApp s e = base $ HsNegApp s e
hsLambda pats e = base $ HsLambda pats e
hsLet decls e = base $ HsLet decls e
hsIf e1 e2 e3 = base $ HsIf e1 e2 e3
hsCase e alts = base $ HsCase e alts
hsDo stmts = base $ HsDo stmts
hsTuple exps = base $ HsTuple exps
hsList exps = base $ HsList exps
hsParen e = base $ HsParen e
hsLeftSection e op = base $ HsLeftSection e op
hsRightSection op e = base $ HsRightSection op e
hsRecConstr sloc name fupds = base $ HsRecConstr sloc name fupds
hsRecUpdate sloc e fupds = base $ HsRecUpdate sloc e fupds
hsEnumFrom e = base $ HsEnumFrom e
hsEnumFromTo e1 e2 = base $ HsEnumFromTo e1 e2
hsEnumFromThen e1 e2 = base $ HsEnumFromThen e1 e2
hsEnumFromThenTo e1 e2 e3 = base $ HsEnumFromThenTo e1 e2 e3
hsListComp stms = base $ HsListComp stms
hsExpTypeSig sloc e c t = base $ HsExpTypeSig sloc e c t
hsAsPat hname e = base $ HsAsPat hname e
hsWildCard = base HsWildCard
hsIrrPat e = base $ HsIrrPat e
-- Pat building
hsPId n = base $ HsPId n
hsPVar n = base $ HsPId $ HsVar n
hsPCon n = base $ HsPId $ HsCon n
hsPLit sloc lit = base $ HsPLit sloc lit
hsPNeg sloc lit = base $ HsPNeg sloc lit
hsPSucc sloc n lit = base $ HsPSucc sloc n lit
hsPInfixApp p1 op p2 = base $ HsPInfixApp p1 op p2
hsPApp hname pats = base $ HsPApp hname pats
hsPTuple sloc pats = base $ HsPTuple sloc pats
hsPList sloc pats = base $ HsPList sloc pats
hsPParen p = base $ HsPParen p
hsPRec hname patfields = base $ HsPRec hname patfields
hsPAsPat hname p = base $ HsPAsPat hname p
hsPWildCard = base HsPWildCard
hsPIrrPat p = base $ HsPIrrPat p
-- Kind building
kstar = base Kstar
kpred = base Kpred
karrow x y = base (Kfun x y)
kprop = base Kprop -- P-Logic
-- Type building
hsTyFun t1 t2 = base $ HsTyFun t1 t2
--hsTyTuple ts = base $ HsTyTuple ts
hsTyTuple ts = foldl hsTyApp (hsTyCon (tuple_tycon_name (length ts-1))) ts
hsTyApp f x = base $ HsTyApp f x
hsTyVar name = base $ HsTyVar name
hsTyCon name = base $ HsTyCon name
hsTyForall xs ps t = base $ HsTyForall xs ps t
hsTyId (HsCon c) = hsTyCon c
hsTyId (HsVar v) = hsTyVar v
-- Added because of the stupid monomorphism restriction:
hsWildCard :: HasBaseStruct exp (EI i e p ds t c) => exp
hsPWildCard :: HasBaseStruct pat (PI i p) => pat
kstar, kpred, kprop :: HasBaseStruct kind (K k) => kind
| kmate/HaRe | old/tools/base/AST/HasBaseStruct.hs | bsd-3-clause | 4,371 | 0 | 12 | 1,350 | 1,455 | 708 | 747 | 82 | 1 |
-- Dummy Parsec module
module Parsec where
import Monad
type Parser a = GenParser Char () a
newtype GenParser tok st a = Parser (State tok st -> Consumed (Reply tok st a))
data Consumed a = Consumed a --input is consumed
| Empty !a --no input is consumed
data Reply tok st a = Ok a (State tok st) ParseError --parsing succeeded with "a"
| Error ParseError --parsing failed
data State tok st = State { stateInput :: [tok]
, statePos :: SourcePos
, stateUser :: !st
}
data ParseError = ParseError !SourcePos [Message]
instance Show ParseError where
show _ = "*"
type SourceName = String
type Line = Int
type Column = Int
data SourcePos = SourcePos SourceName !Line !Column
deriving (Eq,Ord)
data Message = SysUnExpect !String --library generated unexpect
| UnExpect !String --unexpected something
| Expect !String --expecting something
| Message !String --raw message
instance Monad (GenParser tok st) where
return = undefined
a >>= b = undefined
instance MonadPlus (GenParser tok st) where
mzero = undefined
(<|>) :: GenParser tok st a -> GenParser tok st a -> GenParser tok st a
p1 <|> p2 = undefined
parseFromFile :: Parser a -> SourceName -> IO (Either ParseError a)
parseFromFile = undefined
many :: GenParser tok st a -> GenParser tok st [a]
many = undefined
try :: GenParser tok st a -> GenParser tok st a
try = undefined
runParser :: GenParser tok st a -> st -> SourceName -> [tok] -> Either ParseError a
runParser = undefined
option :: a -> GenParser tok st a -> GenParser tok st a
option = undefined
| forste/haReFork | tools/base/tests/GhcLibraries/Parsec.hs | bsd-3-clause | 1,941 | 2 | 10 | 701 | 515 | 279 | 236 | 58 | 1 |
{-
%
% (c) The University of Glasgow 2006
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
\section[FastStringEnv]{@FastStringEnv@: FastString environments}
-}
module FastStringEnv (
-- * FastString environments (maps)
FastStringEnv,
-- ** Manipulating these environments
mkFsEnv,
emptyFsEnv, unitFsEnv,
extendFsEnv_C, extendFsEnv_Acc, extendFsEnv,
extendFsEnvList, extendFsEnvList_C,
filterFsEnv,
plusFsEnv, plusFsEnv_C, alterFsEnv,
lookupFsEnv, lookupFsEnv_NF, delFromFsEnv, delListFromFsEnv,
elemFsEnv, mapFsEnv,
-- * Deterministic FastString environments (maps)
DFastStringEnv,
-- ** Manipulating these environments
mkDFsEnv, emptyDFsEnv, dFsEnvElts,
) where
import UniqFM
import UniqDFM
import Maybes
import FastString
-- | A non-deterministic set of FastStrings.
-- See Note [Deterministic UniqFM] in UniqDFM for explanation why it's not
-- deterministic and why it matters. Use DFastStringEnv if the set eventually
-- gets converted into a list or folded over in a way where the order
-- changes the generated code.
type FastStringEnv a = UniqFM a -- Domain is FastString
emptyFsEnv :: FastStringEnv a
mkFsEnv :: [(FastString,a)] -> FastStringEnv a
alterFsEnv :: (Maybe a-> Maybe a) -> FastStringEnv a -> FastString -> FastStringEnv a
extendFsEnv_C :: (a->a->a) -> FastStringEnv a -> FastString -> a -> FastStringEnv a
extendFsEnv_Acc :: (a->b->b) -> (a->b) -> FastStringEnv b -> FastString -> a -> FastStringEnv b
extendFsEnv :: FastStringEnv a -> FastString -> a -> FastStringEnv a
plusFsEnv :: FastStringEnv a -> FastStringEnv a -> FastStringEnv a
plusFsEnv_C :: (a->a->a) -> FastStringEnv a -> FastStringEnv a -> FastStringEnv a
extendFsEnvList :: FastStringEnv a -> [(FastString,a)] -> FastStringEnv a
extendFsEnvList_C :: (a->a->a) -> FastStringEnv a -> [(FastString,a)] -> FastStringEnv a
delFromFsEnv :: FastStringEnv a -> FastString -> FastStringEnv a
delListFromFsEnv :: FastStringEnv a -> [FastString] -> FastStringEnv a
elemFsEnv :: FastString -> FastStringEnv a -> Bool
unitFsEnv :: FastString -> a -> FastStringEnv a
lookupFsEnv :: FastStringEnv a -> FastString -> Maybe a
lookupFsEnv_NF :: FastStringEnv a -> FastString -> a
filterFsEnv :: (elt -> Bool) -> FastStringEnv elt -> FastStringEnv elt
mapFsEnv :: (elt1 -> elt2) -> FastStringEnv elt1 -> FastStringEnv elt2
emptyFsEnv = emptyUFM
unitFsEnv x y = unitUFM x y
extendFsEnv x y z = addToUFM x y z
extendFsEnvList x l = addListToUFM x l
lookupFsEnv x y = lookupUFM x y
alterFsEnv = alterUFM
mkFsEnv l = listToUFM l
elemFsEnv x y = elemUFM x y
plusFsEnv x y = plusUFM x y
plusFsEnv_C f x y = plusUFM_C f x y
extendFsEnv_C f x y z = addToUFM_C f x y z
mapFsEnv f x = mapUFM f x
extendFsEnv_Acc x y z a b = addToUFM_Acc x y z a b
extendFsEnvList_C x y z = addListToUFM_C x y z
delFromFsEnv x y = delFromUFM x y
delListFromFsEnv x y = delListFromUFM x y
filterFsEnv x y = filterUFM x y
lookupFsEnv_NF env n = expectJust "lookupFsEnv_NF" (lookupFsEnv env n)
-- Deterministic FastStringEnv
-- See Note [Deterministic UniqFM] in UniqDFM for explanation why we need
-- DFastStringEnv.
type DFastStringEnv a = UniqDFM a -- Domain is FastString
emptyDFsEnv :: DFastStringEnv a
emptyDFsEnv = emptyUDFM
dFsEnvElts :: DFastStringEnv a -> [a]
dFsEnvElts = eltsUDFM
mkDFsEnv :: [(FastString,a)] -> DFastStringEnv a
mkDFsEnv l = listToUDFM l
| olsner/ghc | compiler/utils/FastStringEnv.hs | bsd-3-clause | 3,729 | 0 | 10 | 920 | 939 | 490 | 449 | 60 | 1 |
-- !!! lazy name conflict reporting for field labels/selectors.
module M where
import Mod163_A
data T = MkT { f :: Int }
| urbanslug/ghc | testsuite/tests/module/mod163.hs | bsd-3-clause | 123 | 0 | 8 | 25 | 23 | 15 | 8 | 3 | 0 |
import Control.Exception
import Control.Concurrent
import GHC.Conc
import Control.Monad
nfib n = if n < 2 then 1 else nfib (n-2) + nfib (n-1)
main = do
t <- mask_ $ forkIO $ forM_ [1..] $ \n -> nfib n `seq` allowInterrupt
killThread t
let loop = do r <- threadStatus t
when (r /= ThreadFinished) $ do yield; loop
loop
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/concurrent/should_run/allowinterrupt001.hs | bsd-3-clause | 349 | 4 | 15 | 91 | 168 | 80 | 88 | 11 | 2 |
module PropTests.PropImports
( module PropTests.PropImports ) where
import BasicPrelude as PropTests.PropImports
import Test.Hspec as PropTests.PropImports (Spec, hspec, describe)
import Test.Hspec.QuickCheck as PropTests.PropImports (modifyMaxSuccess, prop)
import Test.QuickCheck as PropTests.PropImports (forAll,elements,choose)
import FizzBuzz as PropTests.PropImports
import FizzFib as PropTests.PropImports
| mlitchard/swiftfizz | src-test/PropTests/PropImports.hs | isc | 421 | 0 | 5 | 45 | 93 | 62 | 31 | 8 | 0 |
import Data.Int (Int64)
import Data.List (sort)
charScore :: Char -> Int64
charScore c = (fromIntegral $ fromEnum c) - 64
nameScore :: Int64 -> String -> Int64
nameScore i n = i * (sum $ map charScore n)
nameScores :: [String] -> [Int64]
nameScores ns = map (uncurry nameScore) $ zip [1..] ns
readNames :: FilePath -> IO [String]
readNames p = do
str <- readFile p
return $ read $ "[" ++ str ++ "]"
solve :: IO Int64
solve = do
ns <- readNames "names.txt"
return $ sum $ nameScores $ sort ns
main = (putStrLn . show) =<< solve
| pshendry/project-euler-solutions | 0022/solution.hs | mit | 541 | 5 | 10 | 116 | 281 | 130 | 151 | 17 | 1 |
import Data.Maybe
data Tree a = Empty | Branch a (Tree a) (Tree a)
deriving (Show, Eq)
type Pos = (Int, Int)
-- copied from https://wiki.haskell.org/99_questions/Solutions/66
layout :: Tree a -> Tree (a, Pos)
layout t = t'
where (l, t', r) = layout' x' 1 t
x' = maximum l + 1
layout' :: Int -> Int -> Tree a -> ([Int], Tree (a, Pos), [Int])
layout' x y Empty = ([], Empty, [])
layout' x y (Branch a l r) = (ll', Branch (a, (x, y)) l' r', rr')
where (ll, l', lr) = layout' (x - sep) (y + 1) l
(rl, r', rr) = layout' (x + sep) (y + 1) r
sep = maximum (0:zipWith (+) lr rl) `div` 2 + 1
ll' = 0 : overlay (map (+sep) ll) (map (subtract sep) rl)
rr' = 0 : overlay (map (+sep) rr) (map (subtract sep) lr)
overlay :: [a] -> [a] -> [a]
overlay [] ys = ys
overlay xs [] = xs
overlay (x:xs) (y:ys) = x : overlay xs ys
tree65 = Branch 'n'
(Branch 'k'
(Branch 'c'
(Branch 'a' Empty Empty)
(Branch 'e'
(Branch 'd' Empty Empty)
(Branch 'g' Empty Empty)
)
)
(Branch 'm' Empty Empty)
)
(Branch 'u'
(Branch 'p'
Empty
(Branch 'q' Empty Empty)
)
Empty
)
main = do
let value = layout tree65
print value | zeyuanxy/haskell-playground | ninety-nine-haskell-problems/vol7/66.hs | mit | 1,657 | 16 | 12 | 802 | 711 | 349 | 362 | 36 | 2 |
-- QuickCheck
-- ref: https://wiki.haskell.org/QuickCheck_as_a_test_set_generator
-- ref: https://wiki.haskell.org/QuickCheck_/_GADT | Airtnp/Freshman_Simple_Haskell_Lib | Incomplete/QuickCheck.hs | mit | 132 | 0 | 2 | 7 | 5 | 4 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Hpack.ConfigSpec (
spec
, package
, executable
, library
) where
import Helper
import Data.Aeson.QQ
import Data.Aeson.Types
import Data.String.Interpolate.IsString
import Control.Arrow
import System.Directory (createDirectory)
import Data.Yaml
import Data.Either.Compat
import Hpack.Util
import Hpack.Config hiding (package)
import qualified Hpack.Config as Config
package :: Package
package = Config.package "foo" "0.0.0"
executable :: String -> String -> Executable
executable name main_ = Executable name main_ []
library :: Library
library = Library Nothing [] [] []
withPackage :: String -> IO () -> (([String], Package) -> Expectation) -> Expectation
withPackage content beforeAction expectation = withTempDirectory $ \dir_ -> do
let dir = dir_ </> "foo"
createDirectory dir
writeFile (dir </> "package.yaml") content
withCurrentDirectory dir beforeAction
r <- readPackageConfig (dir </> "package.yaml")
either expectationFailure expectation r
withPackageConfig :: String -> IO () -> (Package -> Expectation) -> Expectation
withPackageConfig content beforeAction expectation = withPackage content beforeAction (expectation . snd)
withPackageConfig_ :: String -> (Package -> Expectation) -> Expectation
withPackageConfig_ content = withPackageConfig content (return ())
withPackageWarnings :: String -> IO () -> ([String] -> Expectation) -> Expectation
withPackageWarnings content beforeAction expectation = withPackage content beforeAction (expectation . fst)
withPackageWarnings_ :: String -> ([String] -> Expectation) -> Expectation
withPackageWarnings_ content = withPackageWarnings content (return ())
spec :: Spec
spec = do
describe "renamePackage" $ do
it "renames a package" $ do
renamePackage "bar" package `shouldBe` package {packageName = "bar"}
it "renames dependencies on self" $ do
let packageWithExecutable dependencies = package {packageExecutables = [(section $ executable "main" "Main.hs") {sectionDependencies = dependencies}]}
renamePackage "bar" (packageWithExecutable ["foo"]) `shouldBe` (packageWithExecutable ["bar"]) {packageName = "bar"}
describe "renameDependencies" $ do
let sectionWithDeps dependencies = (section ()) {sectionDependencies = dependencies}
it "renames dependencies" $ do
renameDependencies "bar" "baz" (sectionWithDeps ["foo", "bar"]) `shouldBe` sectionWithDeps ["foo", "baz"]
it "renames dependency in conditionals" $ do
let sectionWithConditional dependencies = (section ()) {
sectionConditionals = [
Conditional {
conditionalCondition = "some condition"
, conditionalThen = sectionWithDeps dependencies
, conditionalElse = Just (sectionWithDeps dependencies)
}
]
}
renameDependencies "bar" "baz" (sectionWithConditional ["foo", "bar"]) `shouldBe` sectionWithConditional ["foo", "baz"]
describe "parseJSON" $ do
context "when parsing (CaptureUnknownFields Section a)" $ do
it "accepts dependencies" $ do
let input = [i|
dependencies: hpack
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionDependencies = ["hpack"]}
it "accepts includes-dirs" $ do
let input = [i|
include-dirs:
- foo
- bar
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionIncludeDirs = ["foo", "bar"]}
it "accepts install-includes" $ do
let input = [i|
install-includes:
- foo.h
- bar.h
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionInstallIncludes = ["foo.h", "bar.h"]}
it "accepts c-sources" $ do
let input = [i|
c-sources:
- foo.c
- bar.c
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionCSources = ["foo.c", "bar.c"]}
it "accepts extra-lib-dirs" $ do
let input = [i|
extra-lib-dirs:
- foo
- bar
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionExtraLibDirs = ["foo", "bar"]}
it "accepts extra-libraries" $ do
let input = [i|
extra-libraries:
- foo
- bar
|]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionExtraLibraries = ["foo", "bar"]}
context "when parsing conditionals" $ do
it "accepts conditionals" $ do
let input = [i|
when:
condition: os(windows)
dependencies: Win32
|]
conditionals = [
Conditional "os(windows)"
(section ()){sectionDependencies = ["Win32"]}
Nothing
]
captureUnknownFieldsValue <$> decodeEither input
`shouldBe` Right (section Empty){sectionConditionals = conditionals}
it "warns on unknown fields" $ do
let input = [i|
foo: 23
when:
- condition: os(windows)
bar: 23
when:
condition: os(windows)
bar2: 23
- condition: os(windows)
baz: 23
|]
captureUnknownFieldsFields <$> (decodeEither input :: Either String (CaptureUnknownFields (Section Empty)))
`shouldBe` Right ["foo", "bar", "bar2", "baz"]
context "when parsing conditionals with else-branch" $ do
it "accepts conditionals with else-branch" $ do
let input = [i|
when:
condition: os(windows)
then:
dependencies: Win32
else:
dependencies: unix
|]
conditionals = [
Conditional "os(windows)"
(section ()){sectionDependencies = ["Win32"]}
(Just (section ()){sectionDependencies = ["unix"]})
]
r :: Either String (Section Empty)
r = captureUnknownFieldsValue <$> decodeEither input
sectionConditionals <$> r `shouldBe` Right conditionals
it "rejects invalid conditionals" $ do
let input = [i|
when:
condition: os(windows)
then:
dependencies: Win32
else: null
|]
r :: Either String (Section Empty)
r = captureUnknownFieldsValue <$> decodeEither input
sectionConditionals <$> r `shouldSatisfy` isLeft
it "warns on unknown fields" $ do
let input = [i|
when:
condition: os(windows)
foo: null
then:
bar: null
else:
baz: null
|]
captureUnknownFieldsFields <$> (decodeEither input :: Either String (CaptureUnknownFields (Section Empty)))
`shouldBe` Right ["foo", "bar", "baz"]
context "when parsing a Dependency" $ do
it "accepts simple dependencies" $ do
parseEither parseJSON "hpack" `shouldBe` Right (Dependency "hpack" Nothing)
it "accepts git dependencies" $ do
let value = [aesonQQ|{
name: "hpack",
git: "https://github.com/sol/hpack",
ref: "master"
}|]
source = GitRef "https://github.com/sol/hpack" "master" Nothing
parseEither parseJSON value `shouldBe` Right (Dependency "hpack" (Just source))
it "accepts github dependencies" $ do
let value = [aesonQQ|{
name: "hpack",
github: "sol/hpack",
ref: "master"
}|]
source = GitRef "https://github.com/sol/hpack" "master" Nothing
parseEither parseJSON value `shouldBe` Right (Dependency "hpack" (Just source))
it "accepts an optional subdirectory for git dependencies" $ do
let value = [aesonQQ|{
name: "warp",
github: "yesodweb/wai",
ref: "master",
subdir: "warp"
}|]
source = GitRef "https://github.com/yesodweb/wai" "master" (Just "warp")
parseEither parseJSON value `shouldBe` Right (Dependency "warp" (Just source))
it "accepts local dependencies" $ do
let value = [aesonQQ|{
name: "hpack",
path: "../hpack"
}|]
source = Local "../hpack"
parseEither parseJSON value `shouldBe` Right (Dependency "hpack" (Just source))
context "when parsing fails" $ do
it "returns an error message" $ do
let value = Number 23
parseEither parseJSON value `shouldBe` (Left "Error in $: expected String or an Object, encountered Number" :: Either String Dependency)
context "when ref is missing" $ do
it "produces accurate error messages" $ do
let value = [aesonQQ|{
name: "hpack",
git: "sol/hpack",
ef: "master"
}|]
parseEither parseJSON value `shouldBe` (Left "Error in $: key \"ref\" not present" :: Either String Dependency)
context "when both git and github are missing" $ do
it "produces accurate error messages" $ do
let value = [aesonQQ|{
name: "hpack",
gi: "sol/hpack",
ref: "master"
}|]
parseEither parseJSON value `shouldBe` (Left "Error in $: neither key \"git\" nor key \"github\" present" :: Either String Dependency)
describe "getModules" $ around withTempDirectory $ do
it "returns Haskell modules in specified source directory" $ \dir -> do
touch (dir </> "src/Foo.hs")
touch (dir </> "src/Bar/Baz.hs")
touch (dir </> "src/Setup.hs")
getModules dir "src" >>= (`shouldMatchList` ["Foo", "Bar.Baz", "Setup"])
context "when source directory is '.'" $ do
it "ignores Setup" $ \dir -> do
touch (dir </> "Foo.hs")
touch (dir </> "Setup.hs")
getModules dir "." `shouldReturn` ["Foo"]
context "when source directory is './.'" $ do
it "ignores Setup" $ \dir -> do
touch (dir </> "Foo.hs")
touch (dir </> "Setup.hs")
getModules dir "./." `shouldReturn` ["Foo"]
describe "determineModules" $ do
it "adds the Paths_* module to the other-modules" $ do
determineModules "foo" [] (Just $ List ["Foo"]) Nothing `shouldBe` (["Foo"], ["Paths_foo"])
it "replaces dashes with underscores in Paths_*" $ do
determineModules "foo-bar" [] (Just $ List ["Foo"]) Nothing `shouldBe` (["Foo"], ["Paths_foo_bar"])
context "when the Paths_* module is part of the exposed-modules" $ do
it "does not add the Paths_* module to the other-modules" $ do
determineModules "foo" [] (Just $ List ["Foo", "Paths_foo"]) Nothing `shouldBe` (["Foo", "Paths_foo"], [])
describe "readPackageConfig" $ do
it "warns on unknown fields" $ do
withPackageWarnings_ [i|
bar: 23
baz: 42
|]
(`shouldBe` [
"Ignoring unknown field \"bar\" in package description"
, "Ignoring unknown field \"baz\" in package description"
]
)
it "warns on unknown fields in when block, list" $ do
withPackageWarnings_ [i|
when:
- condition: impl(ghc)
bar: 23
baz: 42
|]
(`shouldBe` [
"Ignoring unknown field \"bar\" in package description"
, "Ignoring unknown field \"baz\" in package description"
]
)
it "warns on unknown fields in when block, single" $ do
withPackageWarnings_ [i|
when:
condition: impl(ghc)
github: foo/bar
dependencies: ghc-prim
baz: 42
|]
(`shouldBe` [
"Ignoring unknown field \"baz\" in package description"
, "Ignoring unknown field \"github\" in package description"
]
)
it "accepts name" $ do
withPackageConfig_ [i|
name: bar
|]
(packageName >>> (`shouldBe` "bar"))
it "accepts version" $ do
withPackageConfig_ [i|
version: 0.1.0
|]
(packageVersion >>> (`shouldBe` "0.1.0"))
it "accepts synopsis" $ do
withPackageConfig_ [i|
synopsis: some synopsis
|]
(packageSynopsis >>> (`shouldBe` Just "some synopsis"))
it "accepts description" $ do
withPackageConfig_ [i|
description: some description
|]
(packageDescription >>> (`shouldBe` Just "some description"))
it "accepts category" $ do
withPackageConfig_ [i|
category: Data
|]
(`shouldBe` package {packageCategory = Just "Data"})
it "accepts author" $ do
withPackageConfig_ [i|
author: John Doe
|]
(`shouldBe` package {packageAuthor = ["John Doe"]})
it "accepts maintainer" $ do
withPackageConfig_ [i|
maintainer: John Doe <john.doe@example.com>
|]
(`shouldBe` package {packageMaintainer = ["John Doe <john.doe@example.com>"]})
it "accepts copyright" $ do
withPackageConfig_ [i|
copyright: (c) 2015 John Doe
|]
(`shouldBe` package {packageCopyright = ["(c) 2015 John Doe"]})
it "accepts stability" $ do
withPackageConfig_ [i|
stability: experimental
|]
(packageStability >>> (`shouldBe` Just "experimental"))
it "accepts homepage URL" $ do
withPackageConfig_ [i|
github: hspec/hspec
homepage: https://example.com/
|]
(packageHomepage >>> (`shouldBe` Just "https://example.com/"))
it "infers homepage URL from github" $ do
withPackageConfig_ [i|
github: hspec/hspec
|]
(packageHomepage >>> (`shouldBe` Just "https://github.com/hspec/hspec#readme"))
it "omits homepage URL if it is null" $ do
withPackageConfig_ [i|
github: hspec/hspec
homepage: null
|]
(packageHomepage >>> (`shouldBe` Nothing))
it "accepts bug-reports URL" $ do
withPackageConfig_ [i|
github: hspec/hspec
bug-reports: https://example.com/issues
|]
(packageBugReports >>> (`shouldBe` Just "https://example.com/issues"))
it "infers bug-reports URL from github" $ do
withPackageConfig_ [i|
github: hspec/hspec
|]
(packageBugReports >>> (`shouldBe` Just "https://github.com/hspec/hspec/issues"))
it "omits bug-reports URL if it is null" $ do
withPackageConfig_ [i|
github: hspec/hspec
bug-reports: null
|]
(packageBugReports >>> (`shouldBe` Nothing))
it "accepts license" $ do
withPackageConfig_ [i|
license: MIT
|]
(`shouldBe` package {packageLicense = Just "MIT"})
it "infers license file" $ do
withPackageConfig [i|
name: foo
|]
(do
touch "LICENSE"
)
(packageLicenseFile >>> (`shouldBe` Just "LICENSE"))
it "accepts license file" $ do
withPackageConfig_ [i|
license-file: FOO
|]
(packageLicenseFile >>> (`shouldBe` Just "FOO"))
it "accepts flags" $ do
withPackageConfig_ [i|
flags:
integration-tests:
description: Run the integration test suite
manual: yes
default: no
|]
(packageFlags >>> (`shouldBe` [Flag "integration-tests" (Just "Run the integration test suite") True False]))
it "warns on unknown fields in flag sections" $ do
withPackageWarnings_ [i|
flags:
integration-tests:
description: Run the integration test suite
manual: yes
default: no
foo: 23
|]
(`shouldBe` [
"Ignoring unknown field \"foo\" for flag \"integration-tests\""
]
)
it "accepts extra-source-files" $ do
withPackageConfig [i|
extra-source-files:
- CHANGES.markdown
- README.markdown
|]
(do
touch "CHANGES.markdown"
touch "README.markdown"
)
(packageExtraSourceFiles >>> (`shouldBe` ["CHANGES.markdown", "README.markdown"]))
it "accepts data-files" $ do
withPackageConfig [i|
data-files:
- data/**/*.html
|]
(do
touch "data/foo/index.html"
touch "data/bar/index.html"
)
(packageDataFiles >>> (`shouldMatchList` ["data/foo/index.html", "data/bar/index.html"]))
it "accepts github" $ do
withPackageConfig_ [i|
github: hspec/hspec
|]
(packageSourceRepository >>> (`shouldBe` Just (SourceRepository "https://github.com/hspec/hspec" Nothing)))
it "accepts third part of github URL as subdir" $ do
withPackageConfig_ [i|
github: hspec/hspec/hspec-core
|]
(packageSourceRepository >>> (`shouldBe` Just (SourceRepository "https://github.com/hspec/hspec" (Just "hspec-core"))))
it "accepts arbitrary git URLs as source repository" $ do
withPackageConfig_ [i|
git: https://gitlab.com/gitlab-org/gitlab-ce.git
|]
(packageSourceRepository >>> (`shouldBe` Just (SourceRepository "https://gitlab.com/gitlab-org/gitlab-ce.git" Nothing)))
it "accepts CPP options" $ do
withPackageConfig_ [i|
cpp-options: -DFOO
library:
cpp-options: -DLIB
executables:
foo:
main: Main.hs
cpp-options: -DFOO
tests:
spec:
main: Spec.hs
cpp-options: -DTEST
|]
(`shouldBe` package {
packageLibrary = Just (section library) {sectionCppOptions = ["-DFOO", "-DLIB"]}
, packageExecutables = [(section $ executable "foo" "Main.hs") {sectionCppOptions = ["-DFOO", "-DFOO"]}]
, packageTests = [(section $ executable "spec" "Spec.hs") {sectionCppOptions = ["-DFOO", "-DTEST"]}]
}
)
it "accepts cc-options" $ do
withPackageConfig_ [i|
cc-options: -Wall
library:
cc-options: -fLIB
executables:
foo:
main: Main.hs
cc-options: -O2
tests:
spec:
main: Spec.hs
cc-options: -O0
|]
(`shouldBe` package {
packageLibrary = Just (section library) {sectionCCOptions = ["-Wall", "-fLIB"]}
, packageExecutables = [(section $ executable "foo" "Main.hs") {sectionCCOptions = ["-Wall", "-O2"]}]
, packageTests = [(section $ executable "spec" "Spec.hs") {sectionCCOptions = ["-Wall", "-O0"]}]
}
)
it "accepts ld-options" $ do
withPackageConfig_ [i|
library:
ld-options: -static
|]
(`shouldBe` package {
packageLibrary = Just (section library) {sectionLdOptions = ["-static"]}
}
)
it "accepts buildable" $ do
withPackageConfig_ [i|
buildable: no
library:
buildable: yes
executables:
foo:
main: Main.hs
|]
(`shouldBe` package {
packageLibrary = Just (section library) {sectionBuildable = Just True}
, packageExecutables = [(section $ executable "foo" "Main.hs") {sectionBuildable = Just False}]
}
)
context "when reading library section" $ do
it "warns on unknown fields" $ do
withPackageWarnings_ [i|
library:
bar: 23
baz: 42
|]
(`shouldBe` [
"Ignoring unknown field \"bar\" in library section"
, "Ignoring unknown field \"baz\" in library section"
]
)
it "accepts source-dirs" $ do
withPackageConfig_ [i|
library:
source-dirs:
- foo
- bar
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionSourceDirs = ["foo", "bar"]}))
it "accepts build-tools" $ do
withPackageConfig_ [i|
library:
build-tools:
- alex
- happy
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionBuildTools = ["alex", "happy"]}))
it "accepts default-extensions" $ do
withPackageConfig_ [i|
library:
default-extensions:
- Foo
- Bar
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionDefaultExtensions = ["Foo", "Bar"]}))
it "accepts global default-extensions" $ do
withPackageConfig_ [i|
default-extensions:
- Foo
- Bar
library: {}
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionDefaultExtensions = ["Foo", "Bar"]}))
it "accepts global source-dirs" $ do
withPackageConfig_ [i|
source-dirs:
- foo
- bar
library: {}
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionSourceDirs = ["foo", "bar"]}))
it "accepts global build-tools" $ do
withPackageConfig_ [i|
build-tools:
- alex
- happy
library: {}
|]
(packageLibrary >>> (`shouldBe` Just (section library) {sectionBuildTools = ["alex", "happy"]}))
it "allows to specify exposed" $ do
withPackageConfig_ [i|
library:
exposed: no
|]
(packageLibrary >>> (`shouldBe` Just (section library{libraryExposed = Just False})))
it "allows to specify exposed-modules" $ do
withPackageConfig [i|
library:
source-dirs: src
exposed-modules: Foo
|]
(do
touch "src/Foo.hs"
touch "src/Bar.hs"
)
(packageLibrary >>> (`shouldBe` Just (section library{libraryExposedModules = ["Foo"], libraryOtherModules = ["Bar", "Paths_foo"]}) {sectionSourceDirs = ["src"]}))
it "allows to specify other-modules" $ do
withPackageConfig [i|
library:
source-dirs: src
other-modules: Bar
|]
(do
touch "src/Foo.hs"
touch "src/Bar.hs"
)
(packageLibrary >>> (`shouldBe` Just (section library{libraryExposedModules = ["Foo"], libraryOtherModules = ["Bar"]}) {sectionSourceDirs = ["src"]}))
it "allows to specify reexported-modules" $ do
withPackageConfig_ [i|
library:
reexported-modules: Baz
|]
(packageLibrary >>> (`shouldBe` Just (section library{libraryReexportedModules = ["Baz"]})))
it "allows to specify both exposed-modules and other-modules" $ do
withPackageConfig [i|
library:
source-dirs: src
exposed-modules: Foo
other-modules: Bar
|]
(do
touch "src/Baz.hs"
)
(packageLibrary >>> (`shouldBe` Just (section library{libraryExposedModules = ["Foo"], libraryOtherModules = ["Bar"]}) {sectionSourceDirs = ["src"]}))
context "when neither exposed-modules nor other-modules are specified" $ do
it "exposes all modules" $ do
withPackageConfig [i|
library:
source-dirs: src
|]
(do
touch "src/Foo.hs"
touch "src/Bar.hs"
)
(packageLibrary >>> (`shouldBe` Just (section library{libraryExposedModules = ["Bar", "Foo"]}) {sectionSourceDirs = ["src"]}))
context "when reading executable section" $ do
it "warns on unknown fields" $ do
withPackageWarnings_ [i|
executables:
foo:
main: Main.hs
bar: 42
baz: 23
|]
(`shouldBe` [
"Ignoring unknown field \"bar\" in executable section \"foo\""
, "Ignoring unknown field \"baz\" in executable section \"foo\""
]
)
it "reads executable section" $ do
withPackageConfig_ [i|
executables:
foo:
main: driver/Main.hs
|]
(packageExecutables >>> (`shouldBe` [section $ executable "foo" "driver/Main.hs"]))
it "accepts arbitrary entry points as main" $ do
withPackageConfig_ [i|
executables:
foo:
main: Foo
|]
(packageExecutables >>> (`shouldBe` [
(section $ executable "foo" "Foo.hs") {sectionGhcOptions = ["-main-is Foo"]}
]
))
it "accepts source-dirs" $ do
withPackageConfig_ [i|
executables:
foo:
main: Main.hs
source-dirs:
- foo
- bar
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "Main.hs") {sectionSourceDirs = ["foo", "bar"]}]))
it "accepts build-tools" $ do
withPackageConfig_ [i|
executables:
foo:
main: Main.hs
build-tools:
- alex
- happy
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "Main.hs") {sectionBuildTools = ["alex", "happy"]}]))
it "accepts global source-dirs" $ do
withPackageConfig_ [i|
source-dirs:
- foo
- bar
executables:
foo:
main: Main.hs
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "Main.hs") {sectionSourceDirs = ["foo", "bar"]}]))
it "accepts global build-tools" $ do
withPackageConfig_ [i|
build-tools:
- alex
- happy
executables:
foo:
main: Main.hs
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "Main.hs") {sectionBuildTools = ["alex", "happy"]}]))
it "infers other-modules" $ do
withPackageConfig [i|
executables:
foo:
main: Main.hs
source-dirs: src
|]
(do
touch "src/Main.hs"
touch "src/Foo.hs"
)
(map (executableOtherModules . sectionData) . packageExecutables >>> (`shouldBe` [["Foo"]]))
it "allows to specify other-modules" $ do
withPackageConfig [i|
executables:
foo:
main: Main.hs
source-dirs: src
other-modules: Baz
|]
(do
touch "src/Foo.hs"
touch "src/Bar.hs"
)
(map (executableOtherModules . sectionData) . packageExecutables >>> (`shouldBe` [["Baz"]]))
it "accepts default-extensions" $ do
withPackageConfig_ [i|
executables:
foo:
main: driver/Main.hs
default-extensions:
- Foo
- Bar
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "driver/Main.hs") {sectionDefaultExtensions = ["Foo", "Bar"]}]))
it "accepts global default-extensions" $ do
withPackageConfig_ [i|
default-extensions:
- Foo
- Bar
executables:
foo:
main: driver/Main.hs
|]
(packageExecutables >>> (`shouldBe` [(section $ executable "foo" "driver/Main.hs") {sectionDefaultExtensions = ["Foo", "Bar"]}]))
it "accepts GHC options" $ do
withPackageConfig_ [i|
executables:
foo:
main: driver/Main.hs
ghc-options: -Wall
|]
(`shouldBe` package {packageExecutables = [(section $ executable "foo" "driver/Main.hs") {sectionGhcOptions = ["-Wall"]}]})
it "accepts global GHC options" $ do
withPackageConfig_ [i|
ghc-options: -Wall
executables:
foo:
main: driver/Main.hs
|]
(`shouldBe` package {packageExecutables = [(section $ executable "foo" "driver/Main.hs") {sectionGhcOptions = ["-Wall"]}]})
it "accepts GHC profiling options" $ do
withPackageConfig_ [i|
executables:
foo:
main: driver/Main.hs
ghc-prof-options: -fprof-auto
|]
(`shouldBe` package {packageExecutables = [(section $ executable "foo" "driver/Main.hs") {sectionGhcProfOptions = ["-fprof-auto"]}]})
it "accepts global GHC profiling options" $ do
withPackageConfig_ [i|
ghc-prof-options: -fprof-auto
executables:
foo:
main: driver/Main.hs
|]
(`shouldBe` package {packageExecutables = [(section $ executable "foo" "driver/Main.hs") {sectionGhcProfOptions = ["-fprof-auto"]}]})
context "when reading test section" $ do
it "warns on unknown fields" $ do
withPackageWarnings_ [i|
tests:
foo:
main: Main.hs
bar: 42
baz: 23
|]
(`shouldBe` [
"Ignoring unknown field \"bar\" in test section \"foo\""
, "Ignoring unknown field \"baz\" in test section \"foo\""
]
)
it "reads test section" $ do
withPackageConfig_ [i|
tests:
spec:
main: test/Spec.hs
|]
(`shouldBe` package {packageTests = [section $ executable "spec" "test/Spec.hs"]})
it "accepts single dependency" $ do
withPackageConfig_ [i|
tests:
spec:
main: test/Spec.hs
dependencies: hspec
|]
(`shouldBe` package {packageTests = [(section $ executable "spec" "test/Spec.hs") {sectionDependencies = ["hspec"]}]})
it "accepts list of dependencies" $ do
withPackageConfig_ [i|
tests:
spec:
main: test/Spec.hs
dependencies:
- hspec
- QuickCheck
|]
(`shouldBe` package {packageTests = [(section $ executable "spec" "test/Spec.hs") {sectionDependencies = ["hspec", "QuickCheck"]}]})
context "when both global and section specific dependencies are specified" $ do
it "combines dependencies" $ do
withPackageConfig_ [i|
dependencies:
- base
tests:
spec:
main: test/Spec.hs
dependencies: hspec
|]
(`shouldBe` package {packageTests = [(section $ executable "spec" "test/Spec.hs") {sectionDependencies = ["base", "hspec"]}]})
context "when a specified source directory does not exist" $ do
it "warns" $ do
withPackageWarnings [i|
source-dirs:
- some-dir
- some-existing-dir
library:
source-dirs: some-lib-dir
executables:
main:
main: Main.hs
source-dirs: some-exec-dir
tests:
spec:
main: Main.hs
source-dirs: some-test-dir
|]
(do
touch "some-existing-dir/foo"
)
(`shouldBe` [
"Specified source-dir \"some-dir\" does not exist"
, "Specified source-dir \"some-exec-dir\" does not exist"
, "Specified source-dir \"some-lib-dir\" does not exist"
, "Specified source-dir \"some-test-dir\" does not exist"
]
)
around withTempDirectory $ do
context "when package.yaml can not be parsed" $ do
it "returns an error" $ \dir -> do
let file = dir </> "package.yaml"
writeFile file [i|
foo: bar
foo baz
|]
readPackageConfig file `shouldReturn` Left (file ++ ":3:12: could not find expected ':' while scanning a simple key")
context "when package.yaml is invalid" $ do
it "returns an error" $ \dir -> do
let file = dir </> "package.yaml"
writeFile file [i|
executables:
foo:
ain: driver/Main.hs
|]
readPackageConfig file >>= (`shouldSatisfy` isLeft)
context "when package.yaml does not exist" $ do
it "returns an error" $ \dir -> do
let file = dir </> "package.yaml"
readPackageConfig file `shouldReturn` Left [i|#{file}: Yaml file not found: #{file}|]
context "toJSON :: Conditinal -> Value" $ do
it "serializes conditionals properly" $ do
let s = emptySection { sectionBuildable = Just False }
toJSON (Conditional "os(darwin)" s Nothing)
`shouldBe` object [ "condition" .= String "os(darwin)"
, "buildable" .= Bool False
]
it "serializes conditionals with an else branch properly" $ do
let s = emptySection { sectionBuildable = Just False }
toJSON (Conditional "os(darwin)" s (Just s))
`shouldBe` object [ "condition" .= String "os(darwin)"
, "then" .= object [ "buildable" .= Bool False
]
, "else" .= object [ "buildable" .= Bool False
]
]
context "toJSON :: Section a -> Value" $
it "serializes conditionals properly" $ do
let s = emptySection { sectionConditionals = [ Conditional "os(darwin)" emptySection { sectionBuildable = Just False } Nothing]
}
toJSON s
`shouldBe` object [ "when" .= array [ object [ "condition" .= String "os(darwin)"
, "buildable" .= Bool False
]
]
]
emptySection :: Section ()
emptySection = Section { sectionData = ()
, sectionSourceDirs = []
, sectionDependencies = []
, sectionDefaultExtensions = []
, sectionOtherExtensions = []
, sectionGhcOptions = []
, sectionGhcProfOptions = []
, sectionCppOptions = []
, sectionCCOptions = []
, sectionCSources = []
, sectionExtraLibDirs = []
, sectionExtraLibraries = []
, sectionIncludeDirs = []
, sectionInstallIncludes = []
, sectionLdOptions = []
, sectionBuildable = Nothing
, sectionConditionals = []
, sectionBuildTools = []
}
| yamadapc/hpack-convert | test/Hpack/ConfigSpec.hs | mit | 35,949 | 0 | 32 | 12,898 | 6,958 | 3,759 | 3,199 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, RankNTypes,
ScopedTypeVariables #-}
module Lightstreamer.Http
( Connection(closeConnection)
, ConnectionSettings(..)
, HttpBody(..)
, HttpException(..)
, HttpHeader(..)
, HttpResponse
( resBody
, resHeaders
, resReason
, resStatusCode
)
, TlsSettings(..)
, newConnection
, readStreamedResponse
, sendHttpRequest
, simpleHttpRequest
) where
import Control.Concurrent (ThreadId, forkIO)
import Control.Exception (Exception, SomeException, catch, throwIO, try)
import Control.Monad (unless)
import Control.Monad.IO.Class (MonadIO(..))
import Data.ByteString.Char8 (readInt, pack)
import Data.ByteString.Lazy (toStrict, fromStrict)
import Data.ByteString.Lex.Integral (readHexadecimal)
import Data.Conduit (Consumer, Conduit, Producer, ($$+), ($$+-), ($=+), await, leftover, yield)
import Data.Default (def)
import Data.List (find)
import Data.Typeable (Typeable)
import Lightstreamer.Error (showException)
import Lightstreamer.Request (RequestConverter(..), StandardHeaders
, createStandardHeaders, serializeHttpRequest)
import System.X509 (getSystemCertificateStore)
import qualified Data.ByteString as B
import qualified Data.Conduit.Binary as CB
import qualified Data.Word8 as W
import qualified Network.Socket as S
import qualified Network.Socket.ByteString as SB
import qualified Network.TLS as TLS
import qualified Network.TLS.Extra as TLS
data ConnectionSettings = ConnectionSettings
{ csHost :: String
, csPort :: Int
, csTlsSettings :: Maybe TlsSettings
}
data TlsSettings = TlsSettings
{ tlsDisableCertificateValidation :: Bool
}
data Connection = Connection
{ closeConnection :: IO ()
, readBytes :: IO B.ByteString
, standardHeaders :: StandardHeaders
, writeBytes :: B.ByteString -> IO ()
}
data HttpException = HttpException B.ByteString deriving (Show, Typeable)
instance Exception HttpException where
data HttpHeader = HttpHeader B.ByteString B.ByteString deriving Show
data HttpBody = StreamingBody ThreadId | ContentBody B.ByteString | None
data HttpResponse = HttpResponse
{ resBody :: HttpBody
, resHeaders :: [HttpHeader]
, resReason :: B.ByteString
, resStatusCode :: Int
}
newConnection :: ConnectionSettings -> IO Connection
newConnection settings = do
addr <- S.inet_addr (csHost settings)
let sockAddr = S.SockAddrInet (fromInteger . toInteger $ csPort settings) addr
sock <- S.socket S.AF_INET S.Stream 6 --6 = tcp
S.setSocketOption sock S.NoDelay 1
S.connect sock sockAddr
maybe (mkConnection sock) (mkTlsConnection (csHost settings) sock) $ csTlsSettings settings
where
mkConnection sock = return Connection
{ closeConnection = S.close sock
, readBytes = SB.recv sock 8192
, standardHeaders = createStandardHeaders $ csHost settings
, writeBytes = SB.sendAll sock
}
mkTlsConnection host sock tls = do
certStore <- getSystemCertificateStore
context <- TLS.contextNew sock $ mkClientParams certStore
TLS.handshake context
return Connection
{ closeConnection =
-- Closing an SSL connection gracefully involves writing/reading
-- on the socket. But when this is called the socket might be
-- already closed, and we get a @ResourceVanished@.
catch (TLS.bye context >> TLS.contextClose context) (\(_ :: SomeException) -> return ())
, readBytes = TLS.recvData context
, writeBytes = TLS.sendData context . fromStrict
, standardHeaders = createStandardHeaders $ csHost settings
}
where
mkClientParams certStore =
(TLS.defaultParamsClient host (pack . show $ csPort settings))
{ TLS.clientSupported =
def { TLS.supportedCiphers = TLS.ciphersuite_all }
, TLS.clientShared = def
{ TLS.sharedCAStore = certStore
, TLS.sharedValidationCache = validationCache
}
}
where validationCache
| tlsDisableCertificateValidation tls =
TLS.ValidationCache (\_ _ _ -> return TLS.ValidationCachePass)
(\_ _ _ -> return ())
| otherwise = def
sendHttpRequest :: RequestConverter r => Connection -> r -> IO ()
sendHttpRequest conn req = writeBytes conn . serializeHttpRequest $
convertToHttp req (standardHeaders conn)
connectionProducer :: Connection -> Producer IO B.ByteString
connectionProducer conn = loop
where loop = do
bytes <- liftIO $ readBytes conn
unless (B.null bytes) $ yield bytes >> loop
readStreamedResponse :: Connection
-> Maybe ThreadId
-> (B.ByteString -> IO ())
-> Consumer [B.ByteString] IO ()
-> IO (Either B.ByteString HttpResponse)
readStreamedResponse conn tId errHandle streamSink = do
(rSrc, res) <- connectionProducer conn $$+ readHttpHeader
case find contentHeader $ resHeaders res of
Just (HttpHeader "Content-Length" val) -> do
body <- rSrc $$+- (CB.take . maybe 0 fst $ readInt val)
return $ Right res { resBody = ContentBody $ toStrict body }
Just (HttpHeader "Transfer-Encoding" _) -> do
let action = try (rSrc $=+ chunkConduit B.empty $$+- streamSink) >>=
either (errHandle . showException) return
a <- maybe (forkIO action) ((>>) action . return) tId
return $ Right res { resBody = StreamingBody a }
_ -> throwHttpException "Could not determine body type of response."
where
contentHeader (HttpHeader "Content-Length" _) = True
contentHeader (HttpHeader "Transfer-Encoding" _) = True
contentHeader _ = False
simpleHttpRequest :: RequestConverter r => Connection -> r -> IO (Either B.ByteString HttpResponse)
simpleHttpRequest conn req = do
sendHttpRequest conn req
(rSrc, res) <- connectionProducer conn $$+ readHttpHeader
case find contentHeader $ resHeaders res of
Just (HttpHeader "Content-Length" val) -> do
body <- rSrc $$+- (CB.take . maybe 0 fst $ readInt val)
return $ Right res { resBody = ContentBody $ toStrict body }
_ -> throwHttpException "Unexpected response body."
where
contentHeader (HttpHeader "Content-Length" _) = True
contentHeader _ = False
readHttpHeader :: Consumer B.ByteString IO HttpResponse
readHttpHeader = loop [] Nothing
where
loop acc res = await >>= maybe (complete acc res) (build acc res)
complete [rest] (Just res) = do
leftover rest
return res
complete _ (Just _) = throwHttpException "Unexpected response."
complete _ Nothing = throwHttpException "No response provided."
-- builds header collection
-- @acc - collection of partial buffers that will be combined upon a new line char
-- @res - Http Response being built
-- @more - buffer from most recent await
build acc res more =
case B.uncons p2 of
-- dropping \r
Just (_, rest)
| B.null p1 -> complete [B.drop 1 rest] res
| otherwise ->
case parse (B.concat . reverse $ p1:acc) res of
Left err -> throwHttpException err
Right res' -> build [] res' $ B.drop 1 rest -- drop 1 = \n
Nothing -> loop (p1:acc) res
where
(p1, p2) = B.break (==W._cr) more
parse bytes Nothing =
if B.count W._space (B.take 15 bytes) >= 2 then
let (_, rest) = B.break (==W._space) bytes
in let (code, rest') = B.break (==W._space) (B.drop 1 rest)
in Right $ Just HttpResponse
{ resStatusCode = maybe 0 fst $ readInt code
, resReason = B.drop 1 rest'
, resHeaders = []
, resBody = None
}
else Left $ B.concat ["Invalid HTTP response. :", bytes]
parse bytes (Just a) =
let header = let (name, value) = B.break (==W._colon) bytes
in HttpHeader name (B.dropWhile (==W._space) $ B.drop 1 value)
in Right $ Just a { resHeaders = header : resHeaders a }
throwHttpException :: MonadIO m => B.ByteString -> m a
throwHttpException = liftIO . throwIO . HttpException
chunkConduit :: B.ByteString -> Conduit B.ByteString IO [B.ByteString]
chunkConduit partial = await >>= maybe (return ()) build
where
build
| partial /= B.empty = yieldChunks . B.append partial
| otherwise = yieldChunks
yieldChunks bytes =
case readChunks bytes of
Left err -> throwHttpException err
Right (chunks, rest) -> do
yield chunks
chunkConduit rest
readChunks :: B.ByteString -> Either B.ByteString ([B.ByteString], B.ByteString)
readChunks = loop []
where
loop acc buf
| buf == B.empty = retRight acc B.empty
| otherwise =
if p1 == B.empty then Left "Invalid chunk stream."
else case hexToDec p1 of
Left err -> Left err
Right 0 -> retRight acc B.empty
Right size ->
if B.length p2 > size then
let (chunk, rest) = B.splitAt size $ B.drop 2 p2
in loop (chunk:acc) $ B.drop 2 rest
else retRight acc buf
where (p1, p2) = B.break (==W._cr) buf
retRight acc rest = Right (reverse acc, rest)
hexToDec = maybe (Left "Invalid hexidecimal number.") (Right . fst) . readHexadecimal
| jm4games/lightstreamer | src/Lightstreamer/Http.hs | mit | 10,613 | 0 | 21 | 3,603 | 2,810 | 1,462 | 1,348 | 201 | 7 |
module App.Build (build) where
import App.Config
import Dagbladet.Headline
import Control.Applicative
import Data.List (nub, sort)
import qualified Codec.Compression.GZip as GZip
import qualified Data.ByteString as SB
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Directory
import System.FilePath
------------------------------------------------------------------------
-- | Scan cached source and build a corpus of headlines.
build :: AppConfig -> IO ()
build cfg =
getHeadlines (appCacheDir cfg) >>=
writeHeadlines (appDataDir cfg </> "corpus")
------------------------------------------------------------------------
getHeadlines :: FilePath -> IO [Headline]
getHeadlines rootDir =
concatMap parseHeadlines <$> (mapM readCached =<< getChildren rootDir)
readCached :: FilePath -> IO SB.ByteString
readCached fileName = strictify . GZip.decompress <$> LB.readFile fileName
where strictify = SB.concat . LB.toChunks
writeHeadlines :: FilePath -> [Headline] -> IO ()
writeHeadlines fileName =
T.writeFile fileName . T.unlines . map fmt1 . sort . nub
------------------------------------------------------------------------
fmt1 :: Headline -> T.Text
fmt1 x = record [ hPubDate x, hUrl x, hText x ]
record :: [T.Text] -> T.Text
record = T.intercalate (T.pack ",")
------------------------------------------------------------------------
getChildren :: FilePath -> IO [FilePath]
getChildren root =
(map (root </>) . filter (`notElem` [".", ".."]))
`fmap` getDirectoryContents root
| joachifm/dagbladet | src/App/Build.hs | mit | 1,586 | 0 | 10 | 212 | 430 | 238 | 192 | 33 | 1 |
module Players (
createPlayer,
getPlayerDeck,
getPlayerName,
getPlayerPos,
givePlayerPos,
createPos,
Player,
PlayerPosition
) where
import Cards
-- Defining the possible player positions
-- Nedeed Enum, Eq and Ord for comparisons and ordenation
data PlayerPosition = ScumMaster | Scum | Person | VicePresident | President
deriving(Enum, Eq, Ord, Show)
-- Definition of a user: Has a 'name' and a bunch of 'cards'
data Player = Player {name :: String, userDeck :: Deck, position :: PlayerPosition}
deriving(Eq, Show)
createPlayer :: String -> Deck -> Player
createPlayer nam dec = Player {name = nam, userDeck = dec, position = Person}
givePlayerPos :: Player -> PlayerPosition -> Player
givePlayerPos player pos = Player {name = name player, userDeck = userDeck player,
position = pos}
-- This function is a gambiarra
-- Parses a PlayerPosition
createPos :: String -> PlayerPosition
createPos s = case s of
"SM" -> ScumMaster
"SC" -> Scum
"PR" -> Person
"VP" -> VicePresident
"P" -> President
-- Get methods
getPlayerDeck :: Player -> Deck
getPlayerDeck player = userDeck player
getPlayerName :: Player -> String
getPlayerName player = name player
getPlayerPos :: Player -> PlayerPosition
getPlayerPos player = position player
| MFire30/haskellPresidentCardgame | app/src/Players.hs | mit | 1,268 | 0 | 8 | 229 | 318 | 181 | 137 | 32 | 5 |
-- | Handles communicating with the server, to initialize the SoH
-- container.
module Model.Server
( listContainers
, createContainer
, getContainerDetailById
, getContainerDetailByReceipt
, stopContainerById
, stopContainerByReceipt
, pollForContainerAddress
, schedulerHost
, lookupPort
, AjaxException(..)
) where
import Control.Concurrent (threadDelay)
import Control.Exception (throwIO, Exception)
import Control.Lens
import qualified Data.Aeson as Aeson
import Data.ByteString.Lazy (toStrict)
import Data.List (find)
import Data.Text (pack)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.Typeable (Typeable)
import qualified Data.UUID.Types as UUID
import Import
import qualified JavaScript.JQuery as JQ
import qualified JavaScript.JQuery.Internal as JQ
import SchoolOfHaskell.Scheduler.API
listContainers :: IO [ContainerId]
listContainers =
sendRequestJsonResponse "containers" "" JQ.GET
createContainer :: ContainerSpec -> IO ContainerReceipt
createContainer spec =
sendRequestJsonResponse "containers" (encode spec) JQ.POST
getContainerDetail :: Text -> IO ContainerDetail
getContainerDetail k =
sendRequestJsonResponse ("containers/" <> encodeURIComponent k) "" JQ.GET
stopContainer :: Text -> IO ()
stopContainer k =
sendRequestJsonResponse ("containers/" <> encodeURIComponent k) "" JQ.DELETE
getContainerDetailById :: ContainerId -> IO ContainerDetail
getContainerDetailById cid =
getContainerDetail (cid ^. ciID)
getContainerDetailByReceipt :: ContainerReceipt -> IO ContainerDetail
getContainerDetailByReceipt cr =
getContainerDetail (pack (UUID.toString (cr ^. crID)))
stopContainerById :: ContainerId -> IO ()
stopContainerById cid =
stopContainer (cid ^. ciID)
stopContainerByReceipt :: ContainerReceipt -> IO ()
stopContainerByReceipt cr =
stopContainer (pack (UUID.toString (cr ^. crID)))
pollForContainerAddress :: Int -> IO ContainerDetail -> IO (Text, PortMappings)
pollForContainerAddress n getContainer
| n <= 0 = fail "Ran out of retries while initializing soh-runner container"
| otherwise = do
detail <- getContainer
case detail ^. cdAddress of
Nothing -> do
-- Container is pending - wait a bit and try again.
threadDelay (1000 * 1000)
pollForContainerAddress (n - 1) getContainer
Just address -> return address
-- TODO: allow page to determine scheduler Host.
-- | isNull schedulerHost' || isUndefined schedulerHost' =
-- | otherwise = Just (fromJSString schedulerHost')
--
-- foreign import javascript unsafe
-- "window['schedulerHost']"
-- schedulerHost' :: JSString
-- FIXME: when looking up the backend port, there is no reasonable
-- recovery if it isn't in the association list. So, once we have
-- logic for connection retry, this will need to be a variety of
-- exception which aborts retry.
--
-- (Not a big deal though, it shouldn't occur).
lookupPort :: Int -> PortMappings -> Int
lookupPort innerPort (PortMappings xs) =
fromMaybe (error ("Couldn't find port mapping for " ++ show innerPort))
(snd <$> find ((innerPort ==) . fst) xs)
sendRequestJsonResponse :: Aeson.FromJSON a => Text -> JSString -> JQ.Method -> IO a
sendRequestJsonResponse route body method =
decode <$> sendRequest route body method
sendRequest :: Text -> JSString -> JQ.Method -> IO JSString
sendRequest route body method =
ajax (schedulerHost <> "/" <> route) body settings
where
settings = JQ.AjaxSettings
{ JQ.asContentType = "application/json"
, JQ.asCache = False
, JQ.asIfModified = False
, JQ.asMethod = method
}
encode :: Aeson.ToJSON a => a -> JSString
encode = toJSString . decodeUtf8 . toStrict . Aeson.encode
decode :: Aeson.FromJSON a => JSString -> a
decode s =
case Aeson.eitherDecodeStrict (encodeUtf8 (fromJSString s)) of
Left e -> error e
Right x -> x
-- Copied from ghcjs-jquery with the following modifications:
--
-- * Throws errors when status is >= 300.
--
-- * Uses JSStrings instead of converting to and from Text.
--
-- * Sends a raw body rather than parameters.
--
-- * 'Accept' : 'application/json'
--
ajax :: Text -> JSString -> JQ.AjaxSettings -> IO JSString
ajax url d s = do
os <- toJSRef s
setProp ("data"::JSString) d os
setProp ("processData"::JSString) (toJSBool False) os
headers <- newObj
setProp ("headers"::JSString) headers os
setProp ("Accept"::JSString) ("application/json"::JSString) headers
arr <- JQ.jq_ajax (toJSString url) os
status <- fromMaybe 0 <$> (fromJSRef =<< getProp ("status"::JSString) arr)
if status >= 300
then do
statusTextRef <- getProp ("statusText"::JSString) arr
-- NOTE: without these checks, I get weird runtime exceptions
-- for some statuses (e.g. 502)..
statusText <- if isNull statusTextRef || isUndefined statusTextRef
then return ""
else fromMaybe "" <$> fromJSRef statusTextRef
throwIO (AjaxException status statusText)
else getProp ("data"::JSString) arr
data AjaxException = AjaxException
{ aeStatus :: Int
, aeStatusText :: Text
} deriving (Show, Typeable)
instance Exception AjaxException
encodeURIComponent :: Text -> Text
encodeURIComponent = fromJSString . encodeURIComponent' . toJSString
foreign import javascript unsafe "encodeURIComponent"
encodeURIComponent' :: JSString -> JSString
| fpco/schoolofhaskell | soh-client/src/Model/Server.hs | mit | 5,475 | 4 | 16 | 1,065 | 1,290 | 685 | 605 | 108 | 3 |
-- | Turn the validation functions into actual 'Validator's.
--
-- This is frankly a lot of busywork. It can perhaps be moved into the
-- validator modules themselves once we're sure this is the right design.
module JSONSchema.Validator.Draft4
( module JSONSchema.Validator.Draft4
, module Export
) where
import Import
import qualified Data.HashMap.Strict as HM
import qualified Data.List.NonEmpty as NE
import JSONSchema.Validator.Draft4.Any as Export
import JSONSchema.Validator.Draft4.Array as Export
import JSONSchema.Validator.Draft4.Number as Export
import JSONSchema.Validator.Draft4.Object as Export
import JSONSchema.Validator.Draft4.String as Export
import JSONSchema.Validator.Reference (BaseURI(..), Scope(..))
import JSONSchema.Validator.Types (Validator(..))
-- | For internal use.
--
-- Take a validation function, a possibly existing validator, and some data.
-- If the validator is exists and can validate the type of data we have,
-- attempt to do so and return any failures.
run :: FromJSON b => (a -> b -> Maybe c) -> Maybe a -> Value -> [c]
run _ Nothing _ = mempty
run f (Just a) b =
case fromJSONEither b of
Left _ -> mempty
Right c -> maybeToList (f a c)
-- | For internal use.
noEmbedded :: a -> ([b], [b])
noEmbedded = const (mempty, mempty)
--------------------------------------------------
-- * Numbers
--------------------------------------------------
multipleOfValidator :: Validator a (Maybe MultipleOf) MultipleOfInvalid
multipleOfValidator = Validator noEmbedded (run multipleOfVal)
maximumValidator :: Validator a (Maybe Maximum) MaximumInvalid
maximumValidator = Validator noEmbedded (run maximumVal)
minimumValidator :: Validator a (Maybe Minimum) MinimumInvalid
minimumValidator = Validator noEmbedded (run minimumVal)
--------------------------------------------------
-- * Strings
--------------------------------------------------
maxLengthValidator :: Validator a (Maybe MaxLength) MaxLengthInvalid
maxLengthValidator = Validator noEmbedded (run maxLengthVal)
minLengthValidator :: Validator a (Maybe MinLength) MinLengthInvalid
minLengthValidator = Validator noEmbedded (run minLengthVal)
patternValidator :: Validator a (Maybe PatternValidator) PatternInvalid
patternValidator = Validator noEmbedded (run patternVal)
--------------------------------------------------
-- * Arrays
--------------------------------------------------
maxItemsValidator :: Validator a (Maybe MaxItems) MaxItemsInvalid
maxItemsValidator = Validator noEmbedded (run maxItemsVal)
minItemsValidator :: Validator a (Maybe MinItems) MinItemsInvalid
minItemsValidator = Validator noEmbedded (run minItemsVal)
uniqueItemsValidator :: Validator a (Maybe UniqueItems) UniqueItemsInvalid
uniqueItemsValidator = Validator noEmbedded (run uniqueItemsVal)
-- TODO: Add tests to the language agnostic test suite to
-- make sure @"additionalItems"@ subschemas are embedded correctly.
itemsRelatedValidator
:: (schema -> Value -> [err])
-> Validator schema (ItemsRelated schema) (ItemsRelatedInvalid err)
itemsRelatedValidator f =
Validator
(\a -> ( mempty
, case _irItems a of
Just (ItemsObject b) -> pure b
Just (ItemsArray cs) -> cs
Nothing -> mempty
<> case _irAdditional a of
Just (AdditionalObject b) -> pure b
_ -> mempty
))
(\a b -> case fromJSONEither b of
Left _ -> mempty
Right c -> itemsRelatedVal f a c)
--------------------------------------------------
-- * Objects
--------------------------------------------------
maxPropertiesValidator
:: Validator
a
(Maybe MaxProperties)
MaxPropertiesInvalid
maxPropertiesValidator = Validator noEmbedded (run maxPropertiesVal)
minPropertiesValidator
:: Validator
a
(Maybe MinProperties)
MinPropertiesInvalid
minPropertiesValidator = Validator noEmbedded (run minPropertiesVal)
requiredValidator :: Validator a (Maybe Required) RequiredInvalid
requiredValidator = Validator noEmbedded (run requiredVal)
dependenciesValidator
:: (schema -> Value -> [err])
-> Validator
schema
(Maybe (DependenciesValidator schema))
(DependenciesInvalid err)
dependenciesValidator f =
Validator
(maybe mempty ( (\a -> (mempty, a))
. catMaybes . fmap checkDependency
. HM.elems . _unDependenciesValidator
))
(run (dependenciesVal f))
where
checkDependency :: Dependency schema -> Maybe schema
checkDependency (PropertyDependency _) = Nothing
checkDependency (SchemaDependency s) = Just s
propertiesRelatedValidator
:: (schema -> Value -> [err])
-> Validator
schema
(PropertiesRelated schema)
(PropertiesRelatedInvalid err)
propertiesRelatedValidator f =
Validator
(\a -> ( mempty
, HM.elems (fromMaybe mempty (_propProperties a))
<> HM.elems (fromMaybe mempty (_propPattern a))
<> case _propAdditional a of
Just (AdditionalPropertiesObject b) -> [b]
_ -> mempty
))
(\a b -> case fromJSONEither b of
Left _ -> mempty
Right c -> maybeToList (propertiesRelatedVal f a c))
newtype Definitions schema
= Definitions { _unDefinitions :: HashMap Text schema }
deriving (Eq, Show)
instance FromJSON schema => FromJSON (Definitions schema) where
parseJSON = withObject "Definitions" $ \o ->
Definitions <$> o .: "definitions"
definitionsEmbedded
:: Validator
schema
(Maybe (Definitions schema))
err
definitionsEmbedded =
Validator
(\a -> case a of
Just (Definitions b) -> (mempty, HM.elems b)
Nothing -> (mempty, mempty))
(const (const mempty))
--------------------------------------------------
-- * Any
--------------------------------------------------
refValidator
:: (FromJSON schema, ToJSON schema)
=> (Text -> Maybe schema)
-> (BaseURI -> schema -> BaseURI)
-> (VisitedSchemas -> Scope schema -> schema -> Value -> [err])
-> VisitedSchemas
-> Scope schema
-> Validator a (Maybe Ref) (RefInvalid err)
refValidator getRef updateScope f visited scope =
Validator
noEmbedded
(run (refVal getRef updateScope f visited scope))
enumValidator :: Validator a (Maybe EnumValidator) EnumInvalid
enumValidator = Validator noEmbedded (run enumVal)
typeValidator :: Validator a (Maybe TypeContext) TypeValidatorInvalid
typeValidator = Validator noEmbedded (run typeVal)
allOfValidator
:: (schema -> Value -> [err])
-> Validator schema (Maybe (AllOf schema)) (AllOfInvalid err)
allOfValidator f =
Validator
(\a -> case a of
Just (AllOf b) -> (NE.toList b, mempty)
Nothing -> (mempty, mempty))
(run (allOfVal f))
anyOfValidator
:: (schema -> Value -> [err])
-> Validator schema (Maybe (AnyOf schema)) (AnyOfInvalid err)
anyOfValidator f =
Validator
(\a -> case a of
Just (AnyOf b) -> (NE.toList b, mempty)
Nothing -> (mempty, mempty))
(run (anyOfVal f))
oneOfValidator
:: ToJSON schema
=> (schema -> Value -> [err])
-> Validator schema (Maybe (OneOf schema)) (OneOfInvalid err)
oneOfValidator f =
Validator
(\a -> case a of
Just (OneOf b) -> (NE.toList b, mempty)
Nothing -> (mempty, mempty))
(run (oneOfVal f))
notValidator
:: ToJSON schema
=> (schema -> Value -> [err])
-> Validator
schema
(Maybe (NotValidator schema))
NotValidatorInvalid
notValidator f =
Validator
(\a -> case a of
Just (NotValidator b) -> (pure b, mempty)
Nothing -> (mempty, mempty))
(run (notVal f))
| seagreen/hjsonschema | src/JSONSchema/Validator/Draft4.hs | mit | 8,377 | 0 | 15 | 2,273 | 2,143 | 1,129 | 1,014 | 175 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module TestImport
( module Yesod.Test
, module Model
, module Database.Persist
, runDB
, Specs
) where
import Yesod.Test
import Database.Persist.MongoDB hiding (master)
import Database.Persist hiding (get)
import Model
type Specs = SpecsConn Connection
runDB :: Action IO a -> OneSpec Connection a
runDB = runDBRunner runMongoDBPoolDef
| ShaneKilkelly/YesodExample | tests/TestImport.hs | mit | 399 | 0 | 6 | 79 | 96 | 58 | 38 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Hakyll.Web.Routes.Permalink
( convertPermalink
, permalinkRoute
, permalinkRouteWithDefault
, stripIndexFile
) where
import Data.List (stripPrefix)
import Data.List.Extra (stripSuffix)
import Data.Maybe (fromMaybe)
import Hakyll
import System.FilePath ((</>))
-- By a quirk of history, permalinks for PLFA are written as, e.g., "/DeBruijn/".
-- We convert these to links by stripping the "/" prefix, and adding "index.html".
convertPermalink :: FilePath -> FilePath
convertPermalink link = fromMaybe link (stripPrefix "/" link) </> "index.html"
permalinkRoute :: Routes
permalinkRoute = permalinkRouteWithDefault (error "Missing field 'permalink'.")
permalinkRouteWithDefault :: Routes -> Routes
permalinkRouteWithDefault def = metadataRoute $ \metadata ->
maybe def (constRoute . convertPermalink) (lookupString "permalink" metadata)
-- Removes "index.html" from URLs.
stripIndexFile :: String -> String
stripIndexFile = withUrls dir
where
dir link = fromMaybe link (stripSuffix "index.html" link)
| wenkokke/sf | hs/Hakyll/Web/Routes/Permalink.hs | mit | 1,115 | 0 | 9 | 200 | 213 | 120 | 93 | 21 | 1 |
module GHCJS.DOM.SVGPaint (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/SVGPaint.hs | mit | 38 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Oden.Parser.ParseExprSpec where
import Test.Hspec
import Oden.Identifier
import Oden.Parser
import Oden.SourceInfo
import Oden.Syntax
import Oden.Assertions
src :: Line -> Column -> SourceInfo
src l c = SourceInfo (Position "<stdin>" l c)
spec :: Spec
spec = describe "parseExpr" $ do
it "parses identifier" $
parseExpr "foo"
`shouldSucceedWith`
Symbol (src 1 1) (Identifier "foo")
it "parses single member access" $
parseExpr "foo.bar"
`shouldSucceedWith`
MemberAccess
(src 1 1)
(Symbol (src 1 1) (Identifier "foo"))
(Symbol (src 1 5) (Identifier "bar"))
it "parses multiple member accesses" $
parseExpr "foo.bar.baz"
`shouldSucceedWith`
MemberAccess
(src 1 1)
(MemberAccess
(src 1 1)
(Symbol (src 1 1) (Identifier "foo"))
(Symbol (src 1 5) (Identifier "bar")))
(Symbol (src 1 9) (Identifier "baz"))
it "parses protocol method reference" $
parseExpr "Foo::bar"
`shouldSucceedWith`
ProtocolMethodReference
(src 1 1)
(Symbol (src 1 1) (Identifier "Foo"))
(Symbol (src 1 6) (Identifier "bar"))
it "parses integer literal" $
parseExpr "123"
`shouldSucceedWith`
Literal (src 1 1) (Int 123)
it "parses float literal" $
parseExpr "123.91239129312931293"
`shouldSucceedWith`
Literal (src 1 1) (Float 123.91239129312931293)
it "parses false literal" $
parseExpr "false"
`shouldSucceedWith`
Literal (src 1 1) (Bool False)
it "parses true literal" $
parseExpr "true"
`shouldSucceedWith`
Literal (src 1 1) (Bool True)
it "parses string literal" $
parseExpr "\"foo bar 123\""
`shouldSucceedWith`
Literal (src 1 1) (String "foo bar 123")
it "parses fn expression" $
parseExpr "(x) -> x"
`shouldSucceedWith`
Fn (src 1 1) [NameBinding (src 1 2) (Identifier "x")] (Symbol (src 1 8) (Identifier "x"))
it "parses multi-arg fn expression" $
parseExpr "(x, y, z) -> x"
`shouldSucceedWith`
Fn
(src 1 1)
[NameBinding (src 1 2) (Identifier "x"), NameBinding (src 1 5) (Identifier "y"), NameBinding (src 1 8) (Identifier "z")]
(Symbol (src 1 14) (Identifier "x"))
it "parses no-arg fn expression" $
parseExpr "() -> x"
`shouldSucceedWith`
Fn (src 1 1) [] (Symbol (src 1 7) (Identifier "x"))
it "parses if expression" $
parseExpr "if a then b else c"
`shouldSucceedWith`
If
(src 1 1)
(Symbol (src 1 4) (Identifier "a"))
(Symbol (src 1 11) (Identifier "b"))
(Symbol (src 1 18) (Identifier "c"))
it "parses empty block as unit literal" $
parseExpr "()"
`shouldSucceedWith`
Literal (src 1 1) Unit
it "parses expression in parens" $
parseExpr "(())"
`shouldSucceedWith`
Literal (src 1 2) Unit
it "parses tuple with two elements" $
parseExpr "(1, ())"
`shouldSucceedWith`
Tuple
(src 1 1)
(Literal (src 1 2) (Int 1))
(Literal (src 1 5) Unit)
[]
it "parses tuple with three elements" $
parseExpr "(1, (), 2)"
`shouldSucceedWith`
Tuple
(src 1 1)
(Literal (src 1 2) (Int 1))
(Literal (src 1 5) Unit)
[Literal (src 1 9) (Int 2)]
it "parses block of symbols" $
parseExpr "{\n x\n y\n z\n}"
`shouldSucceedWith`
Block (src 1 1) [
Symbol (src 2 3) (Identifier "x"),
Symbol (src 3 3) (Identifier "y"),
Symbol (src 4 3) (Identifier "z")
]
it "parses let binding and block of symbol" $
parseExpr "let x = y in { x }"
`shouldSucceedWith`
Let
(src 1 1)
[LetPair (src 1 5) (NameBinding (src 1 5) (Identifier "x")) (Symbol (src 1 9) (Identifier "y"))]
(Block (src 1 14) [Symbol (src 1 16) (Identifier "x")])
it "parses block with let binding and block of symbol" $
parseExpr "{\n let x = y in {\n x\n }\n}"
`shouldSucceedWith`
Block (src 1 1) [
Let
(src 2 3)
[LetPair (src 2 7) (NameBinding (src 2 7) (Identifier "x")) (Symbol (src 2 11) (Identifier "y"))]
(Block (src 2 16) [Symbol (src 3 5) (Identifier "x")])
]
it "fails on if expression with newlines" pending
it "fails on let expression with newlines" pending
it "parses let expression" $
parseExpr "let x = y in z"
`shouldSucceedWith`
Let
(src 1 1)
[LetPair (src 1 5) (NameBinding (src 1 5) (Identifier "x")) (Symbol (src 1 9) (Identifier "y"))]
(Symbol (src 1 14) (Identifier "z"))
it "parses unary negative operator application" $
parseExpr "-x"
`shouldSucceedWith`
UnaryOp
(src 1 1)
Negate
(Symbol (src 1 2) (Identifier "x"))
it "parses binary operator application" $
parseExpr "x + y"
`shouldSucceedWith`
BinaryOp
(src 1 3)
Add
(Symbol (src 1 1) (Identifier "x"))
(Symbol (src 1 5) (Identifier "y"))
it "parses string concat application" $
parseExpr "x ++ y"
`shouldSucceedWith`
BinaryOp
(src 1 3)
MonoidApply
(Symbol (src 1 1) (Identifier "x"))
(Symbol (src 1 6) (Identifier "y"))
it "parses fn application with string concatenation" $
parseExpr "foo(y) ++ \"!\""
`shouldSucceedWith`
BinaryOp
(src 1 8)
MonoidApply
(Application
(src 1 4)
(Symbol (src 1 1) (Identifier "foo"))
[Symbol (src 1 5) (Identifier "y")])
(Literal (src 1 11) (String "!"))
it "parses package member fn application" $
parseExpr "foo.Bar(y)"
`shouldSucceedWith`
Application
(src 1 8)
(MemberAccess
(src 1 1)
(Symbol (src 1 1) (Identifier "foo"))
(Symbol (src 1 5) (Identifier "Bar")))
[Symbol (src 1 9) (Identifier "y")]
it "parses fn application member access" $
pending
{-
parseExpr "foo(y).Bar"
`shouldSucceedWith`
MemberAccess
(src 1 8)
(Application
(src 1 1)
(Symbol (src 1 1) (Identifier "foo"))
[Symbol (src 1 5) (Identifier "y")])
(Symbol (src 1 8) (Identifier "Bar"))
-}
it "parses protocol method application" $
parseExpr "Foo::bar(y)"
`shouldSucceedWith`
Application
(src 1 9)
(ProtocolMethodReference
(src 1 1)
(Symbol (src 1 1) (Identifier "Foo"))
(Symbol (src 1 6) (Identifier "bar")))
[Symbol (src 1 10) (Identifier "y")]
it "parses single-arg fn application" $
parseExpr "x(y)"
`shouldSucceedWith`
Application
(src 1 2)
(Symbol (src 1 1) (Identifier "x"))
[Symbol (src 1 3) (Identifier "y")]
it "parses single-arg fn application" $
parseExpr "((x) -> x)(y)"
`shouldSucceedWith`
Application
(src 1 11)
(Fn (src 1 2) [NameBinding (src 1 3) (Identifier "x")] (Symbol (src 1 9) (Identifier "x")))
[Symbol (src 1 12) (Identifier "y")]
it "ignores whitespace" $
parseExpr "x( \n\n y \r\n\t )"
`shouldSucceedWith`
Application
(src 1 2)
(Symbol (src 1 1) (Identifier "x"))
[Symbol (src 3 2) (Identifier "y")]
it "ignores comments" $
parseExpr "\n// foobar\nx"
`shouldSucceedWith`
Symbol (src 3 1) (Identifier "x")
it "ignores multi-line comments" $
parseExpr "/*\n\n foo //whatever\tbar\n*/x"
`shouldSucceedWith`
Symbol (src 4 3) (Identifier "x")
it "parses slice literal" $
parseExpr "[]{x, y, z}"
`shouldSucceedWith`
Slice (src 1 1) [
Symbol (src 1 4) (Identifier "x"),
Symbol (src 1 7) (Identifier "y"),
Symbol (src 1 10) (Identifier "z")
]
it "parses slice subscript" $
parseExpr "a[b]"
`shouldSucceedWith`
Subscript (src 1 1)
(Symbol (src 1 1) (Identifier "a"))
[Singular (Symbol (src 1 3) (Identifier "b"))]
it "parses sublices with closed beginning and end" $
parseExpr "a[b:c]"
`shouldSucceedWith`
Subscript (src 1 1)
(Symbol (src 1 1) (Identifier "a"))
[Range (Symbol (src 1 3) (Identifier "b"))
(Symbol (src 1 5) (Identifier "c"))]
it "parses subslices with open start" $
parseExpr "a[:c]"
`shouldSucceedWith`
Subscript (src 1 1)
(Symbol (src 1 1) (Identifier "a"))
[RangeTo (Symbol (src 1 4) (Identifier "c"))]
it "parses subslices with open ending" $
parseExpr "a[b:]"
`shouldSucceedWith`
Subscript (src 1 1)
(Symbol (src 1 1) (Identifier "a"))
[RangeFrom (Symbol (src 1 3) (Identifier "b"))]
it "fails on subslices with open start and end" $
shouldFail $ parseExpr "a[:]"
it "parses record initializer" $
parseExpr "{ size = 1 }"
`shouldSucceedWith`
RecordInitializer (src 1 1)
[FieldInitializer (src 1 3) (Identifier "size") (Literal (src 1 10) (Int 1))]
it "parses record initializer containing multiple fields" $
parseExpr "{ size = 1, foo = \"foo\" }"
`shouldSucceedWith`
RecordInitializer (src 1 1)
[FieldInitializer (src 1 3) (Identifier "size") (Literal (src 1 10) (Int 1))
,FieldInitializer (src 1 13) (Identifier "foo") (Literal (src 1 19) (String "foo"))]
| oden-lang/oden | test/Oden/Parser/ParseExprSpec.hs | mit | 9,036 | 0 | 17 | 2,430 | 3,315 | 1,620 | 1,695 | 268 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Smutt.HTTP.Header.MediaType (
MediaType (..)
, readMediaType
) where
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as T
import Control.Arrow
import Control.Monad
data MediaType = MediaType {
majorType :: Text
, subType :: Text
, parameters :: [(Text,Text)]
} deriving (Eq)
readMediaType :: Text -> Maybe MediaType
readMediaType "" = Nothing
readMediaType str =
if T.null subType
then Nothing
else Just $ MediaType majorType subType parameters
where
(mediaType: parameterStrings) = T.split (==';') str
(majorType, subType) = join (***) T.toCaseFold $ T.drop 1 <$> T.break (=='/') mediaType
parameters = map fixParamaterString parameterStrings
fixParamaterString = join (***) T.strip . (T.drop 1 <$>) . T.break (=='=')
| black0range/Smutt | src/Smutt/HTTP/Header/MediaType.hs | mit | 855 | 0 | 11 | 186 | 267 | 153 | 114 | 23 | 2 |
module RemoveDuplicates where
import Data.List
distinct :: [Int] -> [Int]
distinct = nub | cojoj/Codewars | Haskell/Codewars.hsproj/RemoveDuplicates.hs | mit | 94 | 0 | 6 | 18 | 29 | 18 | 11 | 4 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.GLUT.Debugging
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/GLUT/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : stable
-- Portability : portable
--
-- This module contains a simple utility routine to report any pending GL
-- errors.
--
--------------------------------------------------------------------------------
module Graphics.UI.GLUT.Debugging (
reportErrors
) where
import Data.StateVar
import Graphics.Rendering.OpenGL ( Error(..), errors )
import System.Environment
import System.IO
--------------------------------------------------------------------------------
-- | Report any pending GL errors to stderr (which is typically the console).
-- If there are no pending errors, this routine does nothing. Note that the
-- error flags are reset after this action, i.e. there are no pending errors
-- left afterwards.
reportErrors :: IO ()
reportErrors = get errors >>= mapM_ reportError
reportError :: Error -> IO ()
reportError (Error _ msg) = do
pn <- getProgName
hPutStrLn stderr ("GLUT: Warning in " ++ pn ++ ": GL error: " ++ msg)
| ducis/haAni | hs/common/Graphics/UI/GLUT/Debugging.hs | gpl-2.0 | 1,248 | 0 | 11 | 193 | 150 | 90 | 60 | 12 | 1 |
{-| Solver for N+1 cluster errors
-}
{-
Copyright (C) 2009, 2010, 2011 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Main (main) where
import Data.List
import Data.Maybe (isJust, fromJust)
import Monad
import System (exitWith, ExitCode(..))
import System.IO
import qualified System
import qualified Ganeti.HTools.Cluster as Cluster
import Ganeti.HTools.CLI
import Ganeti.HTools.IAlloc
import Ganeti.HTools.Types
import Ganeti.HTools.Loader (RqType(..), Request(..), ClusterData(..))
import Ganeti.HTools.ExtLoader (loadExternalData)
-- | Options list and functions
options :: [OptType]
options =
[ oPrintNodes
, oDataFile
, oNodeSim
, oShowVer
, oShowHelp
]
processResults :: (Monad m) =>
RqType -> Cluster.AllocSolution
-> m Cluster.AllocSolution
processResults _ (Cluster.AllocSolution { Cluster.asSolutions = [],
Cluster.asLog = msgs }) =
fail $ intercalate ", " msgs
processResults (Evacuate _) as = return as
processResults _ as =
case Cluster.asSolutions as of
_:[] -> return as
_ -> fail "Internal error: multiple allocation solutions"
-- | Process a request and return new node lists
processRequest :: Request
-> Result Cluster.AllocSolution
processRequest request =
let Request rqtype (ClusterData gl nl il _) = request
in case rqtype of
Allocate xi reqn -> Cluster.tryMGAlloc gl nl il xi reqn
Relocate idx reqn exnodes -> Cluster.tryMGReloc gl nl il
idx reqn exnodes
Evacuate exnodes -> Cluster.tryMGEvac gl nl il exnodes
-- | Reads the request from the data file(s)
readRequest :: Options -> [String] -> IO Request
readRequest opts args = do
when (null args) $ do
hPutStrLn stderr "Error: this program needs an input file."
exitWith $ ExitFailure 1
input_data <- readFile (head args)
r1 <- case (parseData input_data) of
Bad err -> do
hPutStrLn stderr $ "Error: " ++ err
exitWith $ ExitFailure 1
Ok rq -> return rq
r2 <- if isJust (optDataFile opts) || (not . null . optNodeSim) opts
then do
cdata <- loadExternalData opts
let Request rqt _ = r1
return $ Request rqt cdata
else return r1
return r2
-- | Main function.
main :: IO ()
main = do
cmd_args <- System.getArgs
(opts, args) <- parseOpts cmd_args "hail" options
let shownodes = optShowNodes opts
request <- readRequest opts args
let Request rq cdata = request
when (isJust shownodes) $ do
hPutStrLn stderr "Initial cluster status:"
hPutStrLn stderr $ Cluster.printNodes (cdNodes cdata)
(fromJust shownodes)
let sols = processRequest request >>= processResults rq
let (ok, info, rn) =
case sols of
Ok as -> (True, "Request successful: " ++
intercalate ", " (Cluster.asLog as),
Cluster.asSolutions as)
Bad s -> (False, "Request failed: " ++ s, [])
resp = formatResponse ok info rq rn
putStrLn resp
| ganeti/htools | hail.hs | gpl-2.0 | 3,805 | 0 | 18 | 1,008 | 909 | 457 | 452 | 78 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
module Lambda.Quiz (generator) where
import Lambda.Type
import Lambda.Step
import qualified Lambda.Roll
import qualified Lambda.Derive.Instance as I
import qualified Lambda.Derive.Config as C
import Autolib.TES.Identifier
import Autolib.Set
import Autolib.Size
import Autolib.Schichten
import Autolib.Reporter
import Autolib.ToDoc
import System.Random ( randomRIO )
import Data.Ix
import Data.Maybe
import Control.Monad ( guard )
generator conf key = do
s <- randomRIO $ C.start_size_range conf
t <- Lambda.Roll.application s $ C.free_variables conf
let ds = do
d <- take 1000 $ derivations conf t
guard $ I.steps d == Just ( C.derivation_length conf )
guard $ nontrivial d
return d
case ds of
[] -> generator conf key
d : _ -> return d
nontrivial inst = isNothing $ result $ do
let leftmost_strategy = replicate ( fromJust $ I.steps inst ) 0
result <- derive ( I.from inst ) leftmost_strategy
assert ( result == I.to inst ) $ text "OK"
derivations :: C.Type -> Lambda -> [ I.Type ]
derivations conf t =
bfs ( next conf ) ( I.initial t )
-- | all ways to extend given derivation by one step (at the end)
-- derivation must fulfil restrictions
next :: C.Type -> I.Type -> Set I.Type
next conf inst = mkSet $ do
let t = I.to inst
p <- redex_positions t
redex <- peek t p
redukt <- step redex
result <- poke t ( p, redukt )
guard $ inRange ( C.overall_size_range conf ) ( size result )
( l :: Int ) <- maybeToList $ I.steps inst
guard $ inRange ( 0, C.derivation_length conf ) l
return $ inst
{ I.to = result
, I.steps = fmap succ $ I.steps inst
}
| Erdwolf/autotool-bonn | src/Lambda/Quiz.hs | gpl-2.0 | 1,797 | 0 | 16 | 491 | 606 | 302 | 304 | 48 | 2 |
import Data.Hashable
import GHC.IORef
import Data.Maybe
type FullHash = Int
data State k v = State {
keey :: IORef k
, fullhash :: !(Maybe FullHash)
, value :: IORef v
, flags :: Int
}
data Key k = Kempty | Key { fullHash :: !FullHash
, key :: !k
}
newKey :: Hashable k => k -> Key k
newKey k = Key (hash k) k
getFullHash :: Key k -> FullHash
getFullHash = fullHash
getKey :: Key k -> Maybe k
getKey Kempty = Nothing
getKey (Key h k) = Just k
--compares keys
keyComp:: Eq key =>
Key key -> Key key -> Bool
keyComp Kempty Kempty = True
keyComp Kempty _ = False
keyComp _ Kempty = False
keyComp (Key h1 k1) (Key h2 k2) = if h1 == h2 then k1 == k2 else False
| MathiasBartl/Concurrent_Datastructures | datastructurePrototype.hs | gpl-2.0 | 716 | 6 | 11 | 201 | 299 | 157 | 142 | 30 | 2 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.Modify
(
controlGrid,
controlEmpty,
controlCamera,
controlCameraPathWait,
controlCameraPathContinue,
inputCamera,
inputPathWait,
inputPathContinue,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld
import Game.Grid.Helpers
import Graphics.UI.GLFW
--------------------------------------------------------------------------------
-- controlGrid
controlGrid :: (s -> GridWorld -> b -> MEnv' (s, GridWorld, b)) ->
(s -> GridWorld -> b -> MEnv' (s, GridWorld, b))
controlGrid control = \s grid b -> do
-- begin + control
(s', grid', b') <- control s (gridClearEvents grid) b
-- update
return (s', updateCameraCommands grid', b')
updateCameraCommands :: GridWorld -> GridWorld
updateCameraCommands grid =
if gridCameraCommandTick grid <= worldTick grid
then case gridCameraCommands grid of
[] -> grid
(cmd:cmds) -> let grid' = gridModifyCamera grid $ \cam ->
cameraEatCameraCommand cam cmd
in grid'
{
gridCameraCommands = cmds,
gridCameraCommandTick = worldTick grid +
cameracommandTicks cmd
}
else grid
--------------------------------------------------------------------------------
-- controls for use
-- | no control
controlEmpty :: s -> GridWorld -> b -> MEnv' (s, GridWorld, b)
controlEmpty = \s grid b -> do
return (s, grid, b)
-- | control camera from input
controlCamera :: s -> GridWorld -> b -> MEnv' (s, GridWorld, b)
controlCamera = \s grid b -> do
grid' <- inputCamera grid
return (s, grid', b)
-- | control camera and path from input
controlCameraPathWait :: s -> GridWorld -> b -> MEnv' (s, GridWorld, b)
controlCameraPathWait = \s grid b -> do
grid' <- inputCamera grid
grid'' <- inputPathWait grid'
return (s, grid'', b)
-- | control camera and path from input
controlCameraPathContinue :: s -> GridWorld -> b -> MEnv' (s, GridWorld, b)
controlCameraPathContinue = \s grid b -> do
grid' <- inputCamera grid
grid'' <- inputPathContinue grid'
return (s, grid'', b)
--------------------------------------------------------------------------------
--
inputCamera :: GridWorld -> MEnv' GridWorld
inputCamera grid = do
gridModifyCameraM grid $ \camera -> do
-- save current view
camera' <- keysTouchHandleTouched camera $ \_ ->
let View a b c = cameraCurrentView camera
in camera
{
-- current view
cameraView = View a b c,
cameraViewIdeal = View (a + 1.0) (b + 1.0) (c + 1.0),
cameraViewAAlpha = 0.0,
cameraViewAAlphaIdeal = 0.0,
cameraViewASpeed = valueGridCameraViewASpeed,
cameraViewBAlpha = 0.0,
cameraViewBAlphaIdeal = 0.0,
cameraViewBSpeed = valueGridCameraViewBSpeed,
cameraViewCAlpha = 0.0,
cameraViewCAlphaIdeal = 0.0,
cameraViewCSpeed = valueGridCameraViewCSpeed
}
-- move camera
camera'' <- keysTouchHandleCircleVector camera' $ \ticks (x, y) r (x', y') r' ->
let View a b c = cameraView camera'
cAlphaIdeal = keepInside (valueGridCameraViewCMin - c)
(valueGridCameraViewCMax - c)
((r' - r) * valueGridCameraViewCSens)
in camera'
{
cameraViewCAlphaIdeal = cAlphaIdeal
}
keysTouchHandlePointVector camera'' $ \ticks (x, y) (x', y') ->
let View a b c = cameraView camera''
aAlphaIdeal = (x' - x) * valueGridCameraViewASens
bAlphaIdeal = keepInside (bMin - b) (bMax - b)
((y - y') * valueGridCameraViewBSens)
{-
cAlphaIdeal = keepInside (valueGridCameraViewCMin - c)
(valueGridCameraViewCMax - c)
((r - r') * valueGridCameraViewCSens)
-}
in camera''
{
cameraViewAAlphaIdeal = aAlphaIdeal,
cameraViewBAlphaIdeal = bAlphaIdeal
--cameraViewCAlphaIdeal = cAlphaIdeal
}
where
gridModifyCameraM grid f = do
cam' <- f $ gridCamera grid
return grid { gridCamera = cam' }
bMin = (-0.249 * tau)
bMax = 0.249 * tau
inputPathWait :: GridWorld -> MEnv' GridWorld
inputPathWait grid = do
gridModifyPathM grid $
inputRoll >>>
inputDirWait
inputPathContinue :: GridWorld -> MEnv' GridWorld
inputPathContinue grid = do
gridModifyPathM grid $
inputRoll >>>
inputDirContinue
inputRoll =
(arrKey (CharKey '\'') $ \path ->
return (pathModifyTurn path $ \turn -> anticlockTurn `mappend` turn)) >>>
(arrKey (CharKey 'J') $ \path ->
return (pathModifyTurn path $ \turn -> clockTurn `mappend` turn))
inputDirWait =
(\path -> case pathWaiting path of
False -> return path
True -> maybeTurnWait >>= \maybe -> case maybe of
Nothing -> return path
Just turn -> let path' = pathModifyTurn path (mappend turn)
in return path' { pathWaiting = False })
inputDirContinue = \path ->
maybeTurnContinue >>= \maybe -> return path { pathTurnState = maybe }
maybeTurnWait :: MEnv' (Maybe Turn)
maybeTurnWait =
arrM Nothing $ (arrKey (SpecialKey LEFT) $ \_ -> return $ Just leftTurn) >>>
(arrKey (SpecialKey RIGHT) $ \_ -> return $ Just rightTurn) >>>
(arrKey (SpecialKey DOWN) $ \_ -> return $ Just downTurn) >>>
(arrKey (SpecialKey UP) $ \_ -> return $ Just upTurn) >>>
(arrKey (SpecialKey RSHIFT) $ \_ -> return $ Just straightTurn)
where
arrM a fmb =
fmb a
maybeTurnContinue :: MEnv' (Maybe Turn)
maybeTurnContinue =
arrM Nothing $ (arrKeyHold (SpecialKey LEFT) $ \_ -> return $ Just leftTurn) >>>
(arrKeyHold (SpecialKey RIGHT) $ \_ -> return $ Just rightTurn) >>>
(arrKeyHold (SpecialKey DOWN) $ \_ -> return $ Just downTurn) >>>
(arrKeyHold (SpecialKey UP) $ \_ -> return $ Just upTurn) >>>
(arrKeyHold (SpecialKey RSHIFT) $ \_ -> return $ Just straightTurn)
where
arrM a fmb =
fmb a
arrKey key arr = \a ->
keysKeyOnce key >>= \bool -> case bool of
False -> return a
True -> arr a
arrKeyHold key arr = \a -> do
state <- io $ getKey key
case state of
Release -> return a
Press -> arr a
{-
gridModifyPathM grid $ \path -> do
controlPathRoll >>= \maybeRoll -> case maybeRoll of
-- roll
Just roll -> do
return $ pathModifyTurn path
$ \turn -> roll `mappend` turn
-- turn
Nothing -> do
case pathWaiting (gridPathA grid) of
-- only controllable when waiting
False -> do
return path
True -> do
controlPathTurnDrop >>= \maybeTurn -> case maybeTurn of
Nothing ->
return path
Just t -> do
let path' = pathModifyTurn path (mappend t)
return path' { pathWaiting = False }
-}
{-
inputPathContinue :: GridWorld -> MEnv' GridWorld
inputPathContinue grid = do
return grid
-- set ControlPosRef
grid' <- keysTouchHandlePointTouched grid $ \pos ->
grid { gridControlPosRef = pos }
sens <- fmap gamedataSens resourceGameData
gridModifyPathM grid' $ \path -> do
controlPathRoll >>= \maybeRoll -> case maybeRoll of
-- roll
Just roll -> do
return $ pathModifyTurn path
$ \turn -> roll `mappend` turn
-- turn
Nothing -> do
-- drag control
tstate' <- keysTouchHandlePointDrag (pathTurnState path) $ \_ _ (x', y') ->
let (x, y) = gridControlPosRef grid'
dx = x' - x
dy = y' - y
tstate | abs dx < abs dy = tstateY
| abs dy < abs dx = tstateX
| otherwise = pathTurnState path
tstateX | dx <= (-sens) = Just leftTurn
| sens <= dx = Just rightTurn
| otherwise = pathTurnState path
tstateY | dy <= (-sens) = Just downTurn
| sens <= dy = Just upTurn
| otherwise = pathTurnState path
in tstate
-- drop control
tstate'' <- keysTouchHandlePointDrop tstate' $ \_ _ (x', y') ->
let (x, y) = gridControlPosRef grid'
dx = x' - x
dy = y' - y
tstate | abs dx < abs dy = tstateY
| abs dy < abs dx = tstateX
| otherwise = Just straightTurn
tstateX | dx <= (-sens) = Just leftTurn
| sens <= dx = Just rightTurn
| otherwise = Just straightTurn
tstateY | dy <= (-sens) = Just downTurn
| sens <= dy = Just upTurn
| otherwise = Just straightTurn
in tstate
return path { pathTurnState = tstate'' }
-}
controlPathRoll :: MEnv' (Maybe Turn)
controlPathRoll = do
(wth, hth) <- screenShape
keysTouchHandleReleased Nothing $ \(x, y) ->
let maybe | x * wth < valueGridRollSize = Just anticlockTurn
| (1 - x) * wth < valueGridRollSize = Just clockTurn
| otherwise = Nothing
in maybe
{-
-- fixme: remove
-- | non-empty iff acceptable turn
controlPathTurnDrag :: MEnv' (Maybe Turn)
controlPathTurnDrag = do
sens <- fmap gamedataSens resourceGet
keysTouchHandlePointDrag Nothing $ \_ (x, y) (x', y') ->
let dx = x' - x
dy = y' - y
maybe | abs dy < abs dx = maybeX
| abs dx < abs dy = maybeY
| otherwise = Nothing
maybeX | sens <= dx = Just rightTurn
| dx <= (-sens) = Just leftTurn
| otherwise = Nothing
maybeY | sens <= dy = Just upTurn
| dy <= (-sens) = Just downTurn
| otherwise = Nothing
in maybe
-- fixme: move into controlPathTurnWait
-- | non-empty iff acceptable turn
controlPathTurnDrop :: MEnv' (Maybe Turn)
controlPathTurnDrop = do
sens <- fmap gamedataSens resourceGet
keysTouchHandlePointDrop Nothing $ \_ (x, y) (x', y') ->
let dx = x' - x
dy = y' - y
maybe | abs dy < abs dx = maybeX
| abs dx < abs dy = maybeY
| otherwise = Just straightTurn
maybeX | sens <= dx = Just rightTurn
| dx <= (-sens) = Just leftTurn
| otherwise = Just straightTurn
maybeY | sens <= dy = Just upTurn
| dy <= (-sens) = Just downTurn
| otherwise = Just straightTurn
in maybe
-}
| karamellpelle/grid | designer/source/Game/Grid/Modify.hs | gpl-3.0 | 13,287 | 0 | 22 | 5,470 | 2,004 | 1,057 | 947 | 145 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
-- | An implementation of logging using the standard output.
module Impl.StdoutLogging where
import Control.Monad.IO.Class
import Data.Monoid
import qualified Data.Text as T
import DSL.LearningPlatformMTL
newtype StdoutLoggingI m a =
StdoutLoggingI {runStdoutLogging :: m a}
deriving (Monad, Functor, Applicative, MonadIO)
instance MonadIO m => LogDSL (StdoutLoggingI m) where
log level msg = liftIO $ putStrLn $ show level <> ": " <> T.unpack msg
| capitanbatata/sandbox | learning-platform/src/Impl/StdoutLogging.hs | gpl-3.0 | 590 | 0 | 9 | 134 | 126 | 72 | 54 | 12 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceNetworking.Operations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ <https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started Service Networking API Reference> for @servicenetworking.operations.get@.
module Network.Google.Resource.ServiceNetworking.Operations.Get
(
-- * REST Resource
OperationsGetResource
-- * Creating a Request
, operationsGet
, OperationsGet
-- * Request Lenses
, ogXgafv
, ogUploadProtocol
, ogAccessToken
, ogUploadType
, ogName
, ogCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceNetworking.Types
-- | A resource alias for @servicenetworking.operations.get@ method which the
-- 'OperationsGet' request conforms to.
type OperationsGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Operation
-- | Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ 'operationsGet' smart constructor.
data OperationsGet =
OperationsGet'
{ _ogXgafv :: !(Maybe Xgafv)
, _ogUploadProtocol :: !(Maybe Text)
, _ogAccessToken :: !(Maybe Text)
, _ogUploadType :: !(Maybe Text)
, _ogName :: !Text
, _ogCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OperationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ogXgafv'
--
-- * 'ogUploadProtocol'
--
-- * 'ogAccessToken'
--
-- * 'ogUploadType'
--
-- * 'ogName'
--
-- * 'ogCallback'
operationsGet
:: Text -- ^ 'ogName'
-> OperationsGet
operationsGet pOgName_ =
OperationsGet'
{ _ogXgafv = Nothing
, _ogUploadProtocol = Nothing
, _ogAccessToken = Nothing
, _ogUploadType = Nothing
, _ogName = pOgName_
, _ogCallback = Nothing
}
-- | V1 error format.
ogXgafv :: Lens' OperationsGet (Maybe Xgafv)
ogXgafv = lens _ogXgafv (\ s a -> s{_ogXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ogUploadProtocol :: Lens' OperationsGet (Maybe Text)
ogUploadProtocol
= lens _ogUploadProtocol
(\ s a -> s{_ogUploadProtocol = a})
-- | OAuth access token.
ogAccessToken :: Lens' OperationsGet (Maybe Text)
ogAccessToken
= lens _ogAccessToken
(\ s a -> s{_ogAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ogUploadType :: Lens' OperationsGet (Maybe Text)
ogUploadType
= lens _ogUploadType (\ s a -> s{_ogUploadType = a})
-- | The name of the operation resource.
ogName :: Lens' OperationsGet Text
ogName = lens _ogName (\ s a -> s{_ogName = a})
-- | JSONP
ogCallback :: Lens' OperationsGet (Maybe Text)
ogCallback
= lens _ogCallback (\ s a -> s{_ogCallback = a})
instance GoogleRequest OperationsGet where
type Rs OperationsGet = Operation
type Scopes OperationsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient OperationsGet'{..}
= go _ogName _ogXgafv _ogUploadProtocol
_ogAccessToken
_ogUploadType
_ogCallback
(Just AltJSON)
serviceNetworkingService
where go
= buildClient (Proxy :: Proxy OperationsGetResource)
mempty
| brendanhay/gogol | gogol-servicenetworking/gen/Network/Google/Resource/ServiceNetworking/Operations/Get.hs | mpl-2.0 | 4,637 | 0 | 15 | 1,056 | 701 | 411 | 290 | 99 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.MachineTypes.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified machine type. Get a list of available machine
-- types by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.machineTypes.get@.
module Network.Google.Resource.Compute.MachineTypes.Get
(
-- * REST Resource
MachineTypesGetResource
-- * Creating a Request
, machineTypesGet
, MachineTypesGet
-- * Request Lenses
, mtgProject
, mtgZone
, mtgMachineType
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.machineTypes.get@ method which the
-- 'MachineTypesGet' request conforms to.
type MachineTypesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"machineTypes" :>
Capture "machineType" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] MachineType
-- | Returns the specified machine type. Get a list of available machine
-- types by making a list() request.
--
-- /See:/ 'machineTypesGet' smart constructor.
data MachineTypesGet = MachineTypesGet'
{ _mtgProject :: !Text
, _mtgZone :: !Text
, _mtgMachineType :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MachineTypesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mtgProject'
--
-- * 'mtgZone'
--
-- * 'mtgMachineType'
machineTypesGet
:: Text -- ^ 'mtgProject'
-> Text -- ^ 'mtgZone'
-> Text -- ^ 'mtgMachineType'
-> MachineTypesGet
machineTypesGet pMtgProject_ pMtgZone_ pMtgMachineType_ =
MachineTypesGet'
{ _mtgProject = pMtgProject_
, _mtgZone = pMtgZone_
, _mtgMachineType = pMtgMachineType_
}
-- | Project ID for this request.
mtgProject :: Lens' MachineTypesGet Text
mtgProject
= lens _mtgProject (\ s a -> s{_mtgProject = a})
-- | The name of the zone for this request.
mtgZone :: Lens' MachineTypesGet Text
mtgZone = lens _mtgZone (\ s a -> s{_mtgZone = a})
-- | Name of the machine type to return.
mtgMachineType :: Lens' MachineTypesGet Text
mtgMachineType
= lens _mtgMachineType
(\ s a -> s{_mtgMachineType = a})
instance GoogleRequest MachineTypesGet where
type Rs MachineTypesGet = MachineType
type Scopes MachineTypesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient MachineTypesGet'{..}
= go _mtgProject _mtgZone _mtgMachineType
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy MachineTypesGetResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/MachineTypes/Get.hs | mpl-2.0 | 3,779 | 0 | 16 | 926 | 468 | 280 | 188 | 76 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceManagement.Services.Rollouts.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the history of the service configuration rollouts for a managed
-- service, from the newest to the oldest.
--
-- /See:/ <https://cloud.google.com/service-management/ Service Management API Reference> for @servicemanagement.services.rollouts.list@.
module Network.Google.Resource.ServiceManagement.Services.Rollouts.List
(
-- * REST Resource
ServicesRolloutsListResource
-- * Creating a Request
, servicesRolloutsList
, ServicesRolloutsList
-- * Request Lenses
, srlXgafv
, srlUploadProtocol
, srlAccessToken
, srlUploadType
, srlServiceName
, srlFilter
, srlPageToken
, srlPageSize
, srlCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceManagement.Types
-- | A resource alias for @servicemanagement.services.rollouts.list@ method which the
-- 'ServicesRolloutsList' request conforms to.
type ServicesRolloutsListResource =
"v1" :>
"services" :>
Capture "serviceName" Text :>
"rollouts" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListServiceRolloutsResponse
-- | Lists the history of the service configuration rollouts for a managed
-- service, from the newest to the oldest.
--
-- /See:/ 'servicesRolloutsList' smart constructor.
data ServicesRolloutsList =
ServicesRolloutsList'
{ _srlXgafv :: !(Maybe Xgafv)
, _srlUploadProtocol :: !(Maybe Text)
, _srlAccessToken :: !(Maybe Text)
, _srlUploadType :: !(Maybe Text)
, _srlServiceName :: !Text
, _srlFilter :: !(Maybe Text)
, _srlPageToken :: !(Maybe Text)
, _srlPageSize :: !(Maybe (Textual Int32))
, _srlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesRolloutsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srlXgafv'
--
-- * 'srlUploadProtocol'
--
-- * 'srlAccessToken'
--
-- * 'srlUploadType'
--
-- * 'srlServiceName'
--
-- * 'srlFilter'
--
-- * 'srlPageToken'
--
-- * 'srlPageSize'
--
-- * 'srlCallback'
servicesRolloutsList
:: Text -- ^ 'srlServiceName'
-> ServicesRolloutsList
servicesRolloutsList pSrlServiceName_ =
ServicesRolloutsList'
{ _srlXgafv = Nothing
, _srlUploadProtocol = Nothing
, _srlAccessToken = Nothing
, _srlUploadType = Nothing
, _srlServiceName = pSrlServiceName_
, _srlFilter = Nothing
, _srlPageToken = Nothing
, _srlPageSize = Nothing
, _srlCallback = Nothing
}
-- | V1 error format.
srlXgafv :: Lens' ServicesRolloutsList (Maybe Xgafv)
srlXgafv = lens _srlXgafv (\ s a -> s{_srlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
srlUploadProtocol :: Lens' ServicesRolloutsList (Maybe Text)
srlUploadProtocol
= lens _srlUploadProtocol
(\ s a -> s{_srlUploadProtocol = a})
-- | OAuth access token.
srlAccessToken :: Lens' ServicesRolloutsList (Maybe Text)
srlAccessToken
= lens _srlAccessToken
(\ s a -> s{_srlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
srlUploadType :: Lens' ServicesRolloutsList (Maybe Text)
srlUploadType
= lens _srlUploadType
(\ s a -> s{_srlUploadType = a})
-- | Required. The name of the service. See the
-- [overview](\/service-management\/overview) for naming requirements. For
-- example: \`example.googleapis.com\`.
srlServiceName :: Lens' ServicesRolloutsList Text
srlServiceName
= lens _srlServiceName
(\ s a -> s{_srlServiceName = a})
-- | Required. Use \`filter\` to return subset of rollouts. The following
-- filters are supported: -- To limit the results to only those in
-- [status](google.api.servicemanagement.v1.RolloutStatus) \'SUCCESS\', use
-- filter=\'status=SUCCESS\' -- To limit the results to those in
-- [status](google.api.servicemanagement.v1.RolloutStatus) \'CANCELLED\' or
-- \'FAILED\', use filter=\'status=CANCELLED OR status=FAILED\'
srlFilter :: Lens' ServicesRolloutsList (Maybe Text)
srlFilter
= lens _srlFilter (\ s a -> s{_srlFilter = a})
-- | The token of the page to retrieve.
srlPageToken :: Lens' ServicesRolloutsList (Maybe Text)
srlPageToken
= lens _srlPageToken (\ s a -> s{_srlPageToken = a})
-- | The max number of items to include in the response list. Page size is 50
-- if not specified. Maximum value is 100.
srlPageSize :: Lens' ServicesRolloutsList (Maybe Int32)
srlPageSize
= lens _srlPageSize (\ s a -> s{_srlPageSize = a}) .
mapping _Coerce
-- | JSONP
srlCallback :: Lens' ServicesRolloutsList (Maybe Text)
srlCallback
= lens _srlCallback (\ s a -> s{_srlCallback = a})
instance GoogleRequest ServicesRolloutsList where
type Rs ServicesRolloutsList =
ListServiceRolloutsResponse
type Scopes ServicesRolloutsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/service.management",
"https://www.googleapis.com/auth/service.management.readonly"]
requestClient ServicesRolloutsList'{..}
= go _srlServiceName _srlXgafv _srlUploadProtocol
_srlAccessToken
_srlUploadType
_srlFilter
_srlPageToken
_srlPageSize
_srlCallback
(Just AltJSON)
serviceManagementService
where go
= buildClient
(Proxy :: Proxy ServicesRolloutsListResource)
mempty
| brendanhay/gogol | gogol-servicemanagement/gen/Network/Google/Resource/ServiceManagement/Services/Rollouts/List.hs | mpl-2.0 | 6,857 | 0 | 20 | 1,582 | 981 | 570 | 411 | 140 | 1 |
ans :: String -> String -> String
ans s ('(':as) = ans ('(':s) as
ans s ('[':as) = ans ('[':s) as
ans (s:sr) (')':as) = if s == '(' then (ans sr as) else "no"
ans (s:sr) (']':as) = if s == '[' then (ans sr as) else "no"
ans [] (')':as) = "no"
ans [] (']':as) = "no"
ans s (a:as) = ans s as
ans (a:as) [] = "no"
ans [] [] = "yes"
main = do
c <- getContents
let i = takeWhile (/= ".") $ lines c
o = map (ans []) i
mapM_ putStrLn o
| a143753/AOJ | 1173.hs | apache-2.0 | 445 | 0 | 13 | 116 | 314 | 162 | 152 | 15 | 3 |
module Halvin.Test where
spec :: Spec
spec = describe "Halvin" $ return
| kdkeyser/halvin | src/test/HalvinTest.hs | apache-2.0 | 76 | 0 | 6 | 16 | 23 | 13 | 10 | 3 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Safe #-}
-- | Here's a simple example of a program that uses @envparse@'s parser:
--
-- @
-- module Main (main) where
--
-- import Control.Monad (unless)
-- import Env
--
-- data Hello = Hello { name :: String, quiet :: Bool }
--
-- hello :: IO Hello
-- hello = Env.'parse' ('Help.header' \"envparse example\") $
-- Hello \<$\> 'var' ('str' <=< 'nonempty') \"NAME\" ('help' \"Target for the greeting\")
-- \<*\> 'switch' \"QUIET\" ('help' \"Whether to actually print the greeting\")
--
-- main :: IO ()
-- main = do
-- Hello {name, quiet} <- hello
-- unless quiet $
-- putStrLn (\"Hello, \" ++ name ++ \"!\")
-- @
--
-- The @NAME@ environment variable is mandatory and contains the name of the person to
-- greet. @QUIET@, on the other hand, is an optional boolean flag, false by default, that
-- decides whether the greeting should be silent.
--
-- If the @NAME@ variable is undefined in the environment then running the program will
-- result in the following help text:
--
-- @
-- envparse example
--
-- Available environment variables:
--
-- NAME Target for the greeting
-- QUIET Whether to actually print the
-- greeting
--
-- Parsing errors:
--
-- NAME is unset
-- @
module Env
( parse
, parseOr
, Parser
, Mod
, Help.Info
, Help.header
, Help.desc
, Help.footer
, Help.handleError
, Help.ErrorHandler
, Help.defaultErrorHandler
, prefixed
, var
, Var
, Reader
, str
, char
, nonempty
, splitOn
, auto
, def
, helpDef
, flag
, switch
, Flag
, HasHelp
, help
, sensitive
, Help.helpDoc
, Error(..)
, Error.AsUnset(..)
, Error.AsEmpty(..)
, Error.AsUnread(..)
-- * Re-exports
-- $re-exports
, optional, (<=<), (>=>), (<>), asum
-- * Testing
-- $testing
, parsePure
) where
import Control.Applicative
import Control.Monad ((>=>), (<=<))
import Data.Foldable (asum, for_)
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (Monoid(..), (<>))
#else
import Data.Monoid ((<>))
#endif
import System.Environment (getEnvironment)
#if __GLASGOW_HASKELL__ >= 708
import System.Environment (unsetEnv)
#endif
import System.Exit (exitFailure)
import qualified System.IO as IO
import qualified Env.Internal.Help as Help
import Env.Internal.Parser
import Env.Internal.Error (Error)
import qualified Env.Internal.Error as Error
-- $re-exports
-- External functions that may be useful to the consumer of the library
-- $testing
-- Utilities to test—without dabbling in IO—that your parsers do
-- what you want them to do
-- | Parse the environment or die
--
-- Prints the help text and exits with @EXIT_FAILURE@ on encountering a parse error.
--
-- @
-- >>> parse ('Help.header' \"env-parse 0.2.0\") ('var' 'str' \"USER\" ('def' \"nobody\"))
-- @
parse :: Error.AsUnset e => (Help.Info Error -> Help.Info e) -> Parser e a -> IO a
parse m =
fmap (either (\_ -> error "absurd") id) . parseOr die m
-- | Try to parse the environment
--
-- Use this if simply dying on failure (the behavior of 'parse') is inadequate for your needs.
parseOr :: Error.AsUnset e => (String -> IO a) -> (Help.Info Error -> Help.Info e) -> Parser e b -> IO (Either a b)
parseOr onFailure helpMod parser = do
b <- fmap (parsePure parser) getEnvironment
#if __GLASGOW_HASKELL__ >= 708
for_ b $ \_ ->
traverseSensitiveVar parser unsetEnv
#endif
traverseLeft (onFailure . Help.helpInfo (helpMod Help.defaultInfo) parser) b
die :: String -> IO a
die m =
do IO.hPutStrLn IO.stderr m; exitFailure
traverseLeft :: Applicative f => (a -> f b) -> Either a t -> f (Either b t)
traverseLeft f =
either (fmap Left . f) (pure . Right)
| supki/envparse | src/Env.hs | bsd-2-clause | 3,823 | 0 | 13 | 898 | 711 | 431 | 280 | 62 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
module Buster.TypesSpec (spec) where
import Test.Hspec
import Data.String.QQ (s)
import Data.Yaml (decodeEither)
import Buster.Types
spec :: Spec
spec = describe "parsing Config" $ do
it "parses a minimal Config" $
parseStr baseConfigStr `shouldBe` Right baseConfig
it "parses a full Config" $
parseStr fullConfigStr `shouldBe` Right fullConfig
it "defaults method" $
parseStr fullConfigNoMethodStr `shouldBe` Right fullConfigGet
it "parses multiple urls" $
parseStr fullConfigMultipleStr `shouldBe` Right fullConfigMultiple
where baseConfigStr = [s|
---
urls: []
|]
fullConfigStr = [s|
verbose: true
monitor: true
log_file: "/path/to/output.log"
urls:
- url: http://www.example.com
interval: 1000
method: POST
|]
fullConfigNoMethodStr = [s|
verbose: true
monitor: true
log_file: "/path/to/output.log"
urls:
- url: http://www.example.com
interval: 1000
|]
fullConfigMultipleStr = [s|
verbose: true
monitor: true
log_file: "/path/to/output.log"
urls:
- url: http://www.example.com
interval: 1000
method: POST
- url: http://www.example.com
interval: 1000
method: GET
|]
baseConfig = Config { configVerbose = False,
urlConfigs = [],
configMonitor = False,
configLogFile = Nothing}
fullConfigGet = Config { configVerbose = True,
configMonitor = True,
configLogFile = Just "/path/to/output.log",
urlConfigs = [fullUrlConfig { requestMethod = "GET"}]}
fullConfig = Config { configVerbose = True,
configMonitor = True,
configLogFile = Just "/path/to/output.log",
urlConfigs = [fullUrlConfig]}
fullConfigMultiple = Config { configVerbose = True,
configMonitor = True,
configLogFile = Just "/path/to/output.log",
urlConfigs = [fullUrlConfig,
fullUrlConfig { requestMethod = "GET"}]}
fullUrlConfig = UrlConfig { url = "http://www.example.com",
requestInterval = 1000,
requestMethod = "POST" }
parseStr = decodeEither
| MichaelXavier/Buster | test/Buster/TypesSpec.hs | bsd-2-clause | 2,570 | 0 | 11 | 932 | 380 | 227 | 153 | 42 | 1 |
module GitDirTest
( suite
) where
import Test.Tasty
import Test.Tasty.HUnit
import GitDir
import GitFile
import Data.Map as Map
a11 = createGitFile' "a/1/test1.hs" [] [] 1 10
a12 = createGitFile' "a/1/test2.hs" [] [] 2 20
a23 = createGitFile' "a/2/test3.hs" [] [] 4 40
a24 = createGitFile' "a/2/test4.hs" [] [] 8 80
b15 = createGitFile' "b/1/test5.hs" [] [] 16 160
files = [a11, a12, a23, a24, b15]
suite :: TestTree
suite = testGroup "GitDir"
[ testGroup "collectDirs"
[ testCase "ends up with a root dir" $ gitDirRootTest
, testCase "collects all additions for the root" $ gitDirRootAddTest
, testCase "collects all deletions for the root" $ gitDirRootDelTest
, testCase "collects all root children I" $ gitDirRootChildrenLengthTest
, testCase "collects all root children II" $ gitDirRootChildrenPathTest
, testCase "collects all root children III" $ gitDirRootChildrenAdditionTest
, testCase "collects subdirectories of children I" $ gitDirRootGrandchildrenLengthTest
, testCase "collects subdirectories of children II" $ gitDirRootGrandchildrenPathTest
]
, testGroup "collectDirFromFile"
[ testCase "converts file to dir I" $ collectDirFromFileLengthTest
, testCase "converts file to dir II" $ collectDirFromFilePathTest
, testCase "merges with an already present dir I" $ collectDirFromFilePathWithMergeTest
, testCase "merges with an already present dir II" $ collectDirFromFileAdditionsWithMergeTest
, testCase "merges with an already present dir III" $ collectDirFromFileDeletionsWithMergeTest
]
]
collectDirs' = collectDirs ""
gitDirRootTest = "root" @=? actual
where actual = getGitDirPath $ collectDirs "root" files
gitDirRootAddTest = 31 @=? actual
where actual = getGitDirAdditions $ collectDirs' files
gitDirRootDelTest = 310 @=? actual
where actual = getGitDirDeletions $ collectDirs' files
gitDirRootChildrenLengthTest = 2 @=? actual
where actual = length . getGitDirChildren . collectDirs' $ files
gitDirRootChildrenPathTest = ["b", "a"] @=? actual
where actual = toPaths . getGitDirChildren . collectDirs' $ files
gitDirRootChildrenAdditionTest = 15 @=? actual
where actual = getGitDirAdditions . takeSecond . getGitDirChildren . collectDirs' $ files
gitDirRootGrandchildrenLengthTest = 2 @=? actual
where actual = length . (getGrandchildren takeSecond) . collectDirs' $ files
gitDirRootGrandchildrenPathTest = ["a/2", "a/1"] @=? actual
where actual = toPaths . (getGrandchildren takeSecond) . collectDirs' $ files
collectDirFromFileLengthTest = 1 @=? actual
where actual = length . Map.toList $ collectDirFromFile a11 Map.empty
collectDirFromFilePathTest = ["a/1"] @=? actual
where actual = (Prelude.map fst) . Map.toList $ collectDirFromFile a11 Map.empty
collectDirFromFilePathWithMergeTest = ["a/1"] @=? actual
where actual = (Prelude.map fst) . Map.toList $ collected
collected = Prelude.foldr collectDirFromFile Map.empty [a11, a12]
collectDirFromFileAdditionsWithMergeTest = 3 @=? actual
where actual = getGitDirAdditions . snd . head . Map.toList $ collected
collected = Prelude.foldr collectDirFromFile Map.empty [a11, a12]
collectDirFromFileDeletionsWithMergeTest = 30 @=? actual
where actual = getGitDirDeletions . snd . head . Map.toList $ collected
collected = Prelude.foldr collectDirFromFile Map.empty [a11, a12]
getGrandchildren :: ([GitDir] -> GitDir) -> GitDir -> [GitDir]
getGrandchildren childAccessor = getGitDirChildren . childAccessor . getGitDirChildren
takeSecond :: [a] -> a
takeSecond = head . tail
toPaths :: [GitDir] -> [String]
toPaths = Prelude.map getGitDirPath
| LFDM/hstats | test/GitDirTest.hs | bsd-3-clause | 3,652 | 0 | 11 | 605 | 897 | 479 | 418 | 66 | 1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE CPP, ForeignFunctionInterface, MagicHash, UnboxedTuples #-}
-----------------------------------------------------------------------------
-- |
-- Module : Debug.Trace
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Functions for tracing and monitoring execution.
--
-- These can be useful for investigating bugs or performance problems.
-- They should /not/ be used in production code.
--
-----------------------------------------------------------------------------
module Debug.Trace (
-- * Tracing
-- $tracing
trace, -- :: String -> a -> a
traceShow,
traceStack,
traceIO, -- :: String -> IO ()
putTraceMsg,
-- * Eventlog tracing
-- $eventlog_tracing
traceEvent,
traceEventIO,
) where
import Prelude
import System.IO.Unsafe
import Control.Monad
import Foreign.C.String
import GHC.Base
import qualified GHC.Foreign
import GHC.IO.Encoding
import GHC.Ptr
import GHC.Stack
import System.IO (hPutStrLn,stderr)
-- $tracing
--
-- The 'trace', 'traceShow' and 'traceIO' functions print messages to an output
-- stream. They are intended for \"printf debugging\", that is: tracing the flow
-- of execution and printing interesting values.
-- The usual output stream is 'System.IO.stderr'. For Windows GUI applications
-- (that have no stderr) the output is directed to the Windows debug console.
-- Some implementations of these functions may decorate the string that\'s
-- output to indicate that you\'re tracing.
-- | The 'traceIO' function outputs the trace message from the IO monad.
-- This sequences the output with respect to other IO actions.
--
traceIO :: String -> IO ()
traceIO msg = do
hPutStrLn stderr msg
-- don't use debugBelch() directly, because we cannot call varargs functions
-- using the FFI.
foreign import ccall unsafe "HsBase.h debugBelch2"
debugBelch :: CString -> CString -> IO ()
-- | Deprecated. Use 'traceIO'.
putTraceMsg :: String -> IO ()
putTraceMsg = traceIO
{-# DEPRECATED putTraceMsg "Use Debug.Trace.traceIO" #-}
{-# NOINLINE trace #-}
{-|
The 'trace' function outputs the trace message given as its first argument,
before returning the second argument as its result.
For example, this returns the value of @f x@ but first outputs the message.
> trace ("calling f with x = " ++ show x) (f x)
The 'trace' function should /only/ be used for debugging, or for monitoring
execution. The function is not referentially transparent: its type indicates
that it is a pure function but it has the side effect of outputting the
trace message.
-}
trace :: String -> a -> a
trace string expr = unsafePerformIO $ do
traceIO string
return expr
{-|
Like 'trace', but uses 'show' on the argument to convert it to a 'String'.
This makes it convenient for printing the values of interesting variables or
expressions inside a function. For example here we print the value of the
variables @x@ and @z@:
> f x y =
> traceShow (x, z) $ result
> where
> z = ...
> ...
-}
traceShow :: (Show a) => a -> b -> b
traceShow = trace . show
-- $eventlog_tracing
--
-- Eventlog tracing is a performance profiling system. These functions emit
-- extra events into the eventlog. In combination with eventlog profiling
-- tools these functions can be used for monitoring execution and
-- investigating performance problems.
--
-- Currently only GHC provides eventlog profiling, see the GHC user guide for
-- details on how to use it. These function exists for other Haskell
-- implementations but no events are emitted. Note that the string message is
-- always evaluated, whether or not profiling is available or enabled.
{-# NOINLINE traceEvent #-}
-- | The 'traceEvent' function behaves like 'trace' with the difference that
-- the message is emitted to the eventlog, if eventlog profiling is available
-- and enabled at runtime.
--
-- It is suitable for use in pure code. In an IO context use 'traceEventIO'
-- instead.
--
-- Note that when using GHC's SMP runtime, it is possible (but rare) to get
-- duplicate events emitted if two CPUs simultaneously evaluate the same thunk
-- that uses 'traceEvent'.
--
traceEvent :: String -> a -> a
traceEvent msg expr = unsafeDupablePerformIO $ do
traceEventIO msg
return expr
-- | The 'traceEventIO' function emits a message to the eventlog, if eventlog
-- profiling is available and enabled at runtime.
--
-- Compared to 'traceEvent', 'traceEventIO' sequences the event with respect to
-- other IO actions.
--
traceEventIO :: String -> IO ()
traceEventIO msg = (return $! length msg) >> return ()
-- | like 'trace', but additionally prints a call stack if one is
-- available.
--
-- In the current GHC implementation, the call stack is only
-- availble if the program was compiled with @-prof@; otherwise
-- 'traceStack' behaves exactly like 'trace'. Entries in the call
-- stack correspond to @SCC@ annotations, so it is a good idea to use
-- @-fprof-auto@ or @-fprof-auto-calls@ to add SCC annotations automatically.
--
traceStack :: String -> a -> a
traceStack str expr = unsafePerformIO $ do
traceIO str
stack <- currentCallStack
when (not (null stack)) $ traceIO (renderStack stack)
return expr
| joelburget/haste-compiler | libraries/base-ghc-7.6/Debug/Trace.hs | bsd-3-clause | 5,445 | 0 | 13 | 1,044 | 474 | 285 | 189 | 48 | 1 |
module Module5.Task23 where
import Control.Monad.State (State, get, modify', runState, evalState)
import Control.Monad (replicateM)
-- system code
data Tree a = Leaf a | Fork (Tree a) a (Tree a) deriving Show
-- solution code
numberTree :: Tree () -> Tree Integer
numberTree tree = evalState (helper tree) 1 where
helper (Leaf _) = do
n <- get
modify' succ
return $ Leaf n
helper (Fork l _ r) = do
l' <- helper l
n <- get
modify' succ
r' <- helper r
return $ Fork l' n r'
| dstarcev/stepic-haskell | src/Module5/Task23.hs | bsd-3-clause | 510 | 0 | 10 | 127 | 215 | 108 | 107 | 16 | 2 |
{-# LANGUAGE LambdaCase, TupleSections, OverloadedStrings #-}
module Transformations.Optimising.SimpleDeadFunctionElimination where
import Text.Printf
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Functor.Foldable as Foldable
import qualified Data.Foldable
import Grin.Grin
simpleDeadFunctionElimination :: Program -> Program
simpleDeadFunctionElimination (Program exts defs) = Program liveExts liveDefs where
liveExts = [ext | ext <- exts, Set.member (eName ext) liveNames]
liveDefs = [def | def@(Def name _ _) <- defs, Set.member name liveSet]
liveNames = cata collectAll $ Program [] liveDefs -- collect all live names
defMap :: Map Name Def
defMap = Map.fromList [(name, def) | def@(Def name _ _) <- defs]
lookupDef :: Name -> Maybe Def
lookupDef name = Map.lookup name defMap
liveSet :: Set Name
liveSet = fst $ until (\(live, visited) -> live == visited) visit (Set.singleton "grinMain", mempty)
visit :: (Set Name, Set Name) -> (Set Name, Set Name)
visit (live, visited) = (mappend live seen, mappend visited toVisit) where
toVisit = Set.difference live visited
seen = foldMap (maybe mempty (cata collect) . lookupDef) toVisit
collect :: ExpF (Set Name) -> Set Name
collect = \case
SAppF name _ | Map.member name defMap -> Set.singleton name
exp -> Data.Foldable.fold exp
collectAll :: ExpF (Set Name) -> Set Name
collectAll = \case
SAppF name args -> Set.singleton name
exp -> Data.Foldable.fold exp
| andorp/grin | grin/src/Transformations/Optimising/SimpleDeadFunctionElimination.hs | bsd-3-clause | 1,551 | 0 | 14 | 285 | 553 | 292 | 261 | 33 | 3 |
{-# LANGUAGE RankNTypes, GADTs #-}
module Obsidian.GCDObsidian.CodeGen.Common where
import Data.List
import Data.Word
import qualified Data.Map as Map
import Obsidian.GCDObsidian.Kernel
import Obsidian.GCDObsidian.Exp
import Obsidian.GCDObsidian.Types
import Obsidian.GCDObsidian.Globs
import Obsidian.GCDObsidian.CodeGen.PP
import Obsidian.GCDObsidian.CodeGen.Memory
------------------------------------------------------------------------------
-- TINY TOOLS
fst2 (x,y,z) = (x,y)
------------------------------------------------------------------------------
data GenConfig = GenConfig { global :: String,
local :: String };
genConfig = GenConfig
------------------------------------------------------------------------------
-- Helpers
mappedName :: Name -> Bool
mappedName name = isPrefixOf "arr" name
genType _ Int = "int "
genType _ Float = "float "
genType _ Double = "double "
genType _ Bool = "int "
genType _ Word8 = "uint8_t "
genType _ Word16 = "uint16_t "
genType _ Word32 = "uint32_t "
genType _ Word64 = "uint64_t "
genType gc (Pointer t) = genType gc t ++ "*"
genType gc (Global t) = global gc ++" "++ genType gc t -- "__global " ++ genType t
genType gc (Local t) = local gc ++" "++ genType gc t
genCast gc t = "(" ++ genType gc t ++ ")"
parens s = '(' : s ++ ")"
------------------------------------------------------------------------------
-- genExp C-style
genExp :: Scalar a => GenConfig -> MemMap -> Exp a -> [String]
-- Cheat and do CUDA printing here as well
genExp gc _ (BlockIdx X) = ["blockIdx.x"]
genExp gc _ (BlockIdx Y) = ["blockIdx.y"]
genExp gc _ (BlockIdx Z) = ["blockIdx.z"]
genExp gc _ (ThreadIdx X) = ["threadIdx.x"]
genExp gc _ (ThreadIdx Y) = ["threadIdx.y"]
genExp gc _ (ThreadIdx Z) = ["threadIdx.z"]
genExp gc _ (BlockDim X) = ["blockDim.x"]
genExp gc _ (BlockDim Y) = ["blockDim.y"]
genExp gc _ (BlockDim Z) = ["blockDim.z"]
genExp gc _ (GridDim X) = ["gridDim.x"]
genExp gc _ (GridDim Y) = ["gridDim.y"]
genExp gc _ (GridDim Z) = ["gridDim.z"]
genExp gc _ (Literal a) = [show a]
genExp gc _ (Index (name,[])) = [name]
genExp gc mm exp@(Index (name,es)) =
[name' ++ genIndices gc mm es]
where
(offs,t) =
case Map.lookup name mm of
Nothing -> error "array does not excist in map"
(Just x) -> x
name' = if mappedName name
then parens$ genCast gc t ++
if offs > 0
then "(sbase+" ++ show offs ++ ")"
else "sbase"
else name
genExp gc mm (BinOp op e1 e2) =
[genOp op (genExp gc mm e1 ++ genExp gc mm e2)]
genExp gc mm (UnOp op e) =
[genOp op (genExp gc mm e)]
genExp gc mm (If b e1 e2) =
[genIf (genExp gc mm b ++
genExp gc mm e1 ++
genExp gc mm e2 )]
----------------------------------------------------------------------------
--
genIndices gc mm es = concatMap (pIndex mm) es
where
pIndex mm e = "[" ++ concat (genExp gc mm e) ++ "]"
genIf [b,e1,e2] = "(" ++ b ++ " ? " ++ e1 ++ " : " ++ e2 ++ ")"
------------------------------------------------------------------------------
-- genOp
genOp :: Op a -> [String] -> String
genOp Add [a,b] = oper "+" a b
genOp Sub [a,b] = oper "-" a b
genOp Mul [a,b] = oper "*" a b
genOp Div [a,b] = oper "/" a b
genOp Mod [a,b] = oper "%" a b
genOp Sin [a] = func "sin" a
genOp Cos [a] = func "cos" a
-- Bool ops
genOp Eq [a,b] = oper "==" a b
genOp Lt [a,b] = oper "<" a b
genOp LEq [a,b] = oper "<=" a b
genOp Gt [a,b] = oper ">" a b
genOp GEq [a,b] = oper ">=" a b
-- Bitwise ops
genOp BitwiseAnd [a,b] = oper "&" a b
genOp BitwiseOr [a,b] = oper "|" a b
genOp BitwiseXor [a,b] = oper "^" a b
genOp BitwiseNeg [a] = unOp "~" a
genOp ShiftL [a,b] = oper "<<" a b
genOp ShiftR [a,b] = oper ">>" a b
-- built-ins
genOp Min [a,b] = func "min" (a ++ "," ++ b)
genOp Max [a,b] = func "max" (a ++ "," ++ b)
func f a = f ++ "(" ++ a ++ ")"
oper f a b = "(" ++ a ++ f ++ b ++ ")"
unOp f a = "(" ++ f ++ a ++ ")"
------------------------------------------------------------------------------
-- Configurations, threads,memorymap
data Config = Config {configThreads :: NumThreads,
configMM :: MemMap,
configLocalMem :: Word32}
config = Config
assign :: Scalar a => GenConfig -> MemMap -> Exp a -> Exp a -> PP ()
assign gc mm name val = line ((concat (genExp gc mm name)) ++
" = " ++ concat (genExp gc mm val) ++
";")
cond :: GenConfig -> MemMap -> Exp Bool -> PP ()
cond gc mm e = line ("if " ++ concat (genExp gc mm e))
-- used in both OpenCL and CUDA generation
potentialCond gc mm n nt pp
| n < nt =
do
cond gc mm (tid <* (fromIntegral n))
begin
pp
end
| n == nt = pp
| otherwise = error "potentialCond: should not happen"
| svenssonjoel/GCDObsidian | Obsidian/GCDObsidian/CodeGen/Common.hs | bsd-3-clause | 5,103 | 0 | 13 | 1,419 | 1,913 | 997 | 916 | 111 | 4 |
module Lucid.Polymer where
import Lucid.Polymer.Internal
| athanclark/lucid-polymer | src/Lucid/Polymer.hs | bsd-3-clause | 58 | 0 | 4 | 6 | 12 | 8 | 4 | 2 | 0 |
{-# LANGUAGE FlexibleContexts #-}
-- | This module implements facilities for determining whether a
-- reduction or fold can be expressed in a closed form (i.e. not as a
-- SOAC).
--
-- Right now, the module can detect only trivial cases. In the
-- future, we would like to make it more powerful, as well as possibly
-- also being able to analyse sequential loops.
module Futhark.Optimise.Simplifier.ClosedForm
( foldClosedForm
, loopClosedForm
)
where
import Control.Monad
import Control.Applicative
import Data.Maybe
import qualified Data.HashMap.Lazy as HM
import qualified Data.HashSet as HS
import Data.Monoid
import Prelude
import Futhark.Construct
import Futhark.Representation.AST
import Futhark.Transform.Rename
import Futhark.MonadFreshNames
import Futhark.Optimise.Simplifier.RuleM
-- | A function that, given a variable name, returns its definition.
type VarLookup lore = VName -> Maybe (Exp lore)
{-
Motivation:
let {*[int,x_size_27] map_computed_shape_1286} = replicate(x_size_27,
all_equal_shape_1044) in
let {*[bool,x_size_27] map_size_checks_1292} = replicate(x_size_27, x_1291) in
let {bool all_equal_checked_1298, int all_equal_shape_1299} =
reduceT(fn {bool, int} (bool bacc_1293, int nacc_1294, bool belm_1295,
int nelm_1296) =>
let {bool tuplit_elems_1297} = bacc_1293 && belm_1295 in
{tuplit_elems_1297, nelm_1296},
{True, 0}, map_size_checks_1292, map_computed_shape_1286)
-}
-- | @foldClosedForm look foldfun accargs arrargs@ determines whether
-- each of the results of @foldfun@ can be expressed in a closed form.
foldClosedForm :: MonadBinder m =>
VarLookup (Lore m)
-> Pattern (Lore m)
-> Lambda (Lore m)
-> [SubExp] -> [VName]
-> RuleM m ()
foldClosedForm look pat lam accs arrs = do
inputsize <- arraysSize 0 <$> mapM lookupType arrs
closedBody <- checkResults (patternNames pat) inputsize mempty knownBindings
(map paramName (lambdaParams lam))
(lambdaBody lam) accs
isEmpty <- newVName "fold_input_is_empty"
letBindNames'_ [isEmpty] $
PrimOp $ CmpOp (CmpEq int32) inputsize (intConst Int32 0)
letBind_ pat =<<
eIf (eSubExp $ Var isEmpty)
(resultBodyM accs)
(renameBody closedBody)
where knownBindings = determineKnownBindings look lam accs arrs
-- | @loopClosedForm pat respat merge bound bodys@ determines whether
-- the do-loop can be expressed in a closed form.
loopClosedForm :: MonadBinder m =>
PatternT attr
-> [(FParam (Lore m),SubExp)]
-> Names -> SubExp -> Body (Lore m)
-> RuleM m ()
loopClosedForm pat merge i bound body = do
closedBody <- checkResults mergenames bound i knownBindings
(map identName mergeidents) body mergeexp
isEmpty <- newVName "bound_is_zero"
letBindNames'_ [isEmpty] $
PrimOp $ CmpOp (CmpSlt Int32) bound (intConst Int32 0)
letBindNames'_ (patternNames pat) =<<
eIf (eSubExp $ Var isEmpty)
(resultBodyM mergeexp)
(renameBody closedBody)
where (mergepat, mergeexp) = unzip merge
mergeidents = map paramIdent mergepat
mergenames = map paramName mergepat
knownBindings = HM.fromList $ zip mergenames mergeexp
checkResults :: MonadBinder m =>
[VName]
-> SubExp
-> Names
-> HM.HashMap VName SubExp
-> [VName] -- ^ Lambda-bound
-> Body (Lore m)
-> [SubExp]
-> RuleM m (Body (Lore m))
checkResults pat size untouchable knownBindings params body accs = do
((), bnds) <- collectBindings $
zipWithM_ checkResult (zip pat res) (zip accparams accs)
mkBodyM bnds $ map Var pat
where bndMap = makeBindMap body
(accparams, _) = splitAt (length accs) params
res = bodyResult body
nonFree = boundInBody body <>
HS.fromList params <>
untouchable
checkResult (p, Var v) (accparam, acc) = do
e@(PrimOp (BinOp bop x y)) <- liftMaybe $ HM.lookup v bndMap
-- One of x,y must be *this* accumulator, and the other must
-- be something that is free in the body.
let isThisAccum = (==Var accparam)
(this, el) <- liftMaybe $
case ((asFreeSubExp x, isThisAccum y),
(asFreeSubExp y, isThisAccum x)) of
((Just free, True), _) -> Just (acc, free)
(_, (Just free, True)) -> Just (acc, free)
_ -> Nothing
case bop of
LogAnd -> do
letBindNames'_ [v] e
letBindNames'_ [p] $ PrimOp $ BinOp LogAnd this el
Add t | Just properly_typed_size <- properIntSize t -> do
size' <- properly_typed_size
letBindNames'_ [p] =<<
eBinOp (Add t) (eSubExp this)
(pure $ PrimOp $ BinOp (Mul t) el size')
FAdd t | Just properly_typed_size <- properFloatSize t -> do
size' <- properly_typed_size
letBindNames'_ [p] =<<
eBinOp (FAdd t) (eSubExp this)
(pure $ PrimOp $ BinOp (FMul t) el size')
_ -> cannotSimplify -- Um... sorry.
checkResult _ _ = cannotSimplify
asFreeSubExp :: SubExp -> Maybe SubExp
asFreeSubExp (Var v)
| HS.member v nonFree = HM.lookup v knownBindings
asFreeSubExp se = Just se
properIntSize Int32 = Just $ return size
properIntSize t = Just $ letSubExp "converted_size" $
PrimOp $ ConvOp (SExt Int32 t) size
properFloatSize t =
Just $ letSubExp "converted_size" $
PrimOp $ ConvOp (SIToFP Int32 t) size
determineKnownBindings :: VarLookup lore -> Lambda lore -> [SubExp] -> [VName]
-> HM.HashMap VName SubExp
determineKnownBindings look lam accs arrs =
accBindings <> arrBindings
where (accparams, arrparams) =
splitAt (length accs) $ lambdaParams lam
accBindings = HM.fromList $
zip (map paramName accparams) accs
arrBindings = HM.fromList $ mapMaybe isReplicate $
zip (map paramName arrparams) arrs
isReplicate (p, v)
| Just (PrimOp (Replicate _ ve)) <- look v = Just (p, ve)
isReplicate _ = Nothing
makeBindMap :: Body lore -> HM.HashMap VName (Exp lore)
makeBindMap = HM.fromList . mapMaybe isSingletonBinding . bodyBindings
where isSingletonBinding (Let pat _ e) = case patternNames pat of
[v] -> Just (v,e)
_ -> Nothing
| mrakgr/futhark | src/Futhark/Optimise/Simplifier/ClosedForm.hs | bsd-3-clause | 6,944 | 0 | 21 | 2,216 | 1,740 | 872 | 868 | 127 | 9 |
{-
Totality checker
-}
module Test () where
{-@ bar :: (a, {v:[b]|((len v) = 1)}) -> b @-}
bar (_, [x]) = x
| spinda/liquidhaskell | tests/gsoc15/broken/pos/grty3.hs | bsd-3-clause | 115 | 0 | 7 | 31 | 26 | 17 | 9 | 2 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
import Control.Applicative
import Control.Monad
import Control.Monad.ST
import qualified Data.Array as FA
import Data.Array.IO
import Data.Array.MArray
import Data.List
split :: String -> [String]
split "" = [""]
split (' ':as) = "":split as
split (a:as) = let
fst:others = split as
in
(a:fst):others
readNums :: IO [Int]
readNums = map read . split <$> getLine
data Heap a = Heap { array :: IOArray Int a
, size :: Int
}
newHeap :: Int -> IO (Heap Int)
newHeap size = do
array <- newArray (0, size) 0
writeArray array 0 (-10001)
return $ Heap{ array = array
, size = 0
}
insertHeap :: Int -> Heap Int -> IO (Heap Int)
insertHeap num heap@Heap{..} = do
let idx = size + 1
_insert idx num array
return heap{ size = size + 1 }
where
_insert idx num array = do
let newIdx = idx `quot` 2
cItem <- readArray array newIdx
if cItem > num
then do
writeArray array idx cItem
_insert newIdx num array
else do
writeArray array idx num
getPath :: IOArray Int Int -> Int -> IO [Int]
getPath _ 0 = return []
getPath arr idx = do
num <- readArray arr idx
remaining <- getPath arr (idx `quot` 2)
return $ num:remaining
main = do
n:m:_ <- readNums
heap <- newHeap n
nums <- readNums
newHeap <- foldM (flip insertHeap) heap nums
indices <- readNums
forM indices $ \idx -> do
path <- getPath (array newHeap) idx
putStrLn $ intercalate " " $ map show path
| y-usuzumi/survive-the-course | www.icourse163.org/ZJU-93001/第五周编程作业/1.hs | bsd-3-clause | 1,659 | 0 | 14 | 517 | 648 | 324 | 324 | 54 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
-- | The core printer combinators. The rest (in
-- Text.Pretty.Printers.Combinators) is defined in terms of the ones
-- in this module.
module Text.Pretty.Printers.Core where
import Data.Monoid
import Data.String
-- | Basic 1-dimentional documents that can be catenated (<>) from
-- Monoid
class (Monoid d, IsString d) => Document d where
-- | A class of 2-dimentional documents with the additional notion of
-- characters whose interpreteation depends on the context.
class Document d => ContextDocument c d | c -> d where
-- | Produce a document depending on the context
ctxDoc :: (c -> d) -> d
class PositionContext c where
getPosition :: c -> Pos
class OptionalContext c o | c -> o where
getOptions :: c -> o
class LineLengthContext c where
getLineLength :: c -> LineLength
data LineLength = Absolute Int
| Ribbon Int
data Pos = Pos (Int, Int)
data Doc a c = Build a
| Cat (Doc a c) (Doc a c)
| Ctx (c -> Doc a c)
| Empty
instance Monoid a => Monoid (Doc a c) where
mappend (Build l) (Build r) = Build $ l <> r
mappend Empty r = r
mappend r Empty = r
mappend (Cat d1 d2) d3 = d1 <> d2 <> d3
mappend d1 (Cat d2 d3) = d1 <> d2 <> d3
mempty = Empty
| achudnov/printers | src/Text/Pretty/Printers/Core.hs | bsd-3-clause | 1,392 | 0 | 9 | 330 | 369 | 201 | 168 | -1 | -1 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
-- | A module providing combinatorial testing utilities.
module Test.Utils.Combinatorial(
combinatorial2,
combinatorial3,
combinatorial4,
combinatorial5,
combinatorial6
) where
import Test.HUnitPlus.Base
-- | Generate a set of tests from a generator function and two lists
-- of possible values of arguments.
combinatorial2 :: (ty1 -> ty2 -> Test)
-- ^ Function that produces a @Test@ from two arguments.
-> [ty1]
-- ^ Possible values of the first argument.
-> [ty2]
-- ^ Possible values of the second argument.
-> [Test]
-- ^ A list of tests
combinatorial2 mkTest args1 args2 =
foldr (\arg1 tests1 ->
foldr (\arg2 tests2 -> mkTest arg1 arg2 : tests2)
tests1 args2)
[] args1
-- | Generate a set of tests from a generator function and three lists
-- of possible values of arguments.
combinatorial3 :: (ty1 -> ty2 -> ty3 -> Test)
-- ^ Function that produces a @Test@ from three arguments.
-> [ty1]
-- ^ Possible values of the first argument.
-> [ty2]
-- ^ Possible values of the second argument.
-> [ty3]
-- ^ Possible values of the third argument.
-> [Test]
-- ^ A list of tests
combinatorial3 mkTest args1 args2 args3 =
foldr (\arg1 tests1 ->
foldr (\arg2 tests2 ->
foldr (\arg3 tests3 -> mkTest arg1 arg2 arg3 : tests3)
tests2 args3)
tests1 args2)
[] args1
-- | Generate a set of tests from a generator function and four lists
-- of possible values of arguments.
combinatorial4 :: (ty1 -> ty2 -> ty3 -> ty4 -> Test)
-- ^ Function that produces a @Test@ from four arguments.
-> [ty1]
-- ^ Possible values of the first argument.
-> [ty2]
-- ^ Possible values of the second argument.
-> [ty3]
-- ^ Possible values of the third argument.
-> [ty4]
-- ^ Possible values of the fourth argument.
-> [Test]
-- ^ A list of tests
combinatorial4 mkTest args1 args2 args3 args4 =
foldr (\arg1 tests1 ->
foldr (\arg2 tests2 ->
foldr (\arg3 tests3 ->
foldr (\arg4 tests4 ->
mkTest arg1 arg2 arg3 arg4 : tests4)
tests3 args4)
tests2 args3)
tests1 args2)
[] args1
-- | Generate a set of tests from a generator function and five lists
-- of possible values of arguments.
combinatorial5 :: (ty1 -> ty2 -> ty3 -> ty4 -> ty5 -> Test)
-- ^ Function that produces a @Test@ from five arguments.
-> [ty1]
-- ^ Possible values of the first argument.
-> [ty2]
-- ^ Possible values of the second argument.
-> [ty3]
-- ^ Possible values of the third argument.
-> [ty4]
-- ^ Possible values of the fourth argument.
-> [ty5]
-- ^ Possible values of the fifth argument.
-> [Test]
-- ^ A list of tests
combinatorial5 mkTest args1 args2 args3 args4 args5 =
foldr (\arg1 tests1 ->
foldr (\arg2 tests2 ->
foldr (\arg3 tests3 ->
foldr (\arg4 tests4 ->
foldr (\arg5 tests5 ->
mkTest arg1 arg2 arg3 arg4 arg5 :
tests5)
tests4 args5)
tests3 args4)
tests2 args3)
tests1 args2)
[] args1
-- | Generate a set of tests from a generator function and six lists
-- of possible values of arguments.
combinatorial6 :: (ty1 -> ty2 -> ty3 -> ty4 -> ty5 -> ty6 -> Test)
-- ^ Function that produces a @Test@ from five arguments.
-> [ty1]
-- ^ Possible values of the first argument.
-> [ty2]
-- ^ Possible values of the second argument.
-> [ty3]
-- ^ Possible values of the third argument.
-> [ty4]
-- ^ Possible values of the fourth argument.
-> [ty5]
-- ^ Possible values of the fifth argument.
-> [ty6]
-- ^ Possible values of the sixth argument.
-> [Test]
-- ^ A list of tests
combinatorial6 mkTest args1 args2 args3 args4 args5 args6 =
foldr (\arg1 tests1 ->
foldr (\arg2 tests2 ->
foldr (\arg3 tests3 ->
foldr (\arg4 tests4 ->
foldr (\arg5 tests5 ->
foldr (\arg6 tests6 ->
mkTest arg1 arg2 arg3
arg4 arg5 arg6 :
tests6)
tests5 args6)
tests4 args5)
tests3 args4)
tests2 args3)
tests1 args2)
[] args1
| emc2/test-utils | src/Test/Utils/Combinatorial.hs | bsd-3-clause | 7,157 | 0 | 24 | 2,842 | 891 | 511 | 380 | 89 | 1 |
{-# LANGUAGE RecordWildCards #-}
module CodeWidget.CodeWidgetAPI where
import Data.Maybe
import Data.List
import qualified Graphics.UI.Gtk as G
import qualified Graphics.UI.Gtk.SourceView as G
import Text.Parsec
import Text.Parsec.Pos
import Data.IORef
import Util
import CodeWidget.CodeWidgetTypes
import CodeWidget.CodeWidgetUtil
import CodeWidget.CodeWidgetInternal
import CodeWidget.CodeWidgetSBar
-- Individual API functions
codePageCreate :: RCodeView -> String -> IO Region
codePageCreate ref f = do
cv <- readIORef ref
let lng = cvLanguage cv
let font = cvFont cv
let nb = cvNotebook cv
txt <- readFile f
vbox <- G.vBoxNew False 0
G.widgetShow vbox
scroll <- G.scrolledWindowNew Nothing Nothing
G.widgetShow scroll
G.boxPackStart vbox scroll G.PackGrow 0
table <- G.textTagTableNew
buf <- G.sourceBufferNew (Just table)
etag <- G.textTagNew Nothing
G.set etag [G.textTagEditable G.:= False]
G.textTagTableAdd table etag
G.textTagSetPriority etag 0
G.sourceBufferSetLanguage buf (Just lng)
G.sourceBufferSetHighlightSyntax buf True
v <- G.sourceViewNewWithBuffer buf
G.set v [ G.sourceViewAutoIndent G.:= True,
G.sourceViewIndentWidth G.:= 4,
G.sourceViewTabWidth G.:= 4,
G.sourceViewInsertSpacesInsteadOfTabs G.:= True
]
G.widgetModifyFont v $ Just font
G.textViewSetEditable v True
G.widgetShow v
G.containerAdd scroll v
pgid <- G.notebookAppendPage nb vbox f
root <- mkRootRegion pgid buf table
apiStrLn $ "CW# pageCreate: file:" ++ show f ++ " pg:" ++ show pgid
let newpg = PageContext { pgID = pgid
, pgView = v
, pgBuffer = buf
, pgTagTable = table
, pgEditTag = etag
, pgNextRegion = (rootRegion + 1)
, pgRegions = [root]
, pgFileName = f
}
writeIORef ref cv { cvPages = newpg:(cvPages cv)}
iter <- G.textBufferGetStartIter buf
cvRgnInsertText newpg iter txt
iter <- G.textBufferGetStartIter buf
G.textBufferPlaceCursor buf iter
nbi <- cvCurPage cv
csbCursUpdate cv nbi
-- setup signal handlers
-- buf signals
_ <- G.on buf G.deleteRange (bufSigDeleteRange ref)
_ <- G.after buf G.bufferInsertText (bufSigInsertText ref)
_ <- G.after v G.pasteClipboard (viewSigPasteClibB ref)
_ <- G.on v G.keyReleaseEvent (viewKeyRelease ref)
_ <- G.after v G.moveCursor (csbCursMove ref)
_ <- G.after v G.moveFocus (csbFocusMove ref)
_ <- G.after buf G.markSet (csbMarkSet ref)
return $ Region pgid rootRegion
codeRegionUnderCursor :: RCodeView -> IO (Maybe (Region, SourcePos))
codeRegionUnderCursor ref = do
CodeView{..} <- readIORef ref
pageid <- G.notebookGetCurrentPage cvNotebook
let pc = fromJust $ find ((== pageid) . pgID) cvPages
curpos <- cvCursorPos pc
mrc <- cvWhoHoldsPos pc curpos
maybe (return Nothing)
(\rc -> do sp <- rgnStartPos pc rc
let lin = sourceLine curpos - sourceLine sp
col = if' (lin == 0) (sourceColumn curpos - sourceColumn sp + 1) (sourceColumn curpos)
return $ Just (Region pageid (rcRegion rc), newPos (sourceName curpos) (lin + 1) col))
mrc
codeRegionCreate :: RCodeView -> Region -> SourcePos -> Bool -> String -> IO () -> IO Region
codeRegionCreate ref parent pos ed txt f = do
cv <- readIORef ref
case getContexts cv parent of
Nothing -> error ("regionCreate: cannot find notebook page " ++ (show (pid parent)))
Just (ctx@(pg,x)) -> do
r <- cvRgnCreateEmpty ref ctx pos ed f
codeRegionSetText ref r txt
apiStrLn $ "CW# regionCreate: pg:" ++ show (pgID pg) ++ " rg:" ++ show (rcRegion x) ++ " pos:" ++ show pos ++ " ed:" ++ show ed ++ " Region:" ++ show (rid r)
return r
codeRegionCreateFrom :: RCodeView -> Region -> (SourcePos, SourcePos) -> Bool -> IO () -> IO Region
codeRegionCreateFrom ref parent (from, to) ed f = do
cv <- readIORef ref
case getContexts cv parent of
Nothing -> error ("regionCreateFrom: cannot find parent region " ++ (show parent))
Just (ctx@(pg,x)) -> do
r <- cvRgnCreateFrom ref ctx from to ed False f
apiStrLn $ "CW# regionCreateFrom: pg:" ++ show (pgID pg) ++ " rg:" ++ show (rcRegion x) ++ " fm:" ++ show from ++ " to:" ++ show to ++ " ed:" ++ show ed ++ " Region:" ++ show (rid r)
--codeDumpRegions ref parent
return r
codeRegionEditable :: RCodeView -> Region -> Bool -> IO ()
codeRegionEditable ref r b = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionEditable: cannot find region " ++ (show r))
Just (p,x) -> if ([] == childRegions p x)
then do apiStrLn $ "CW# regionEditable: pg:" ++ show (pgID p) ++ " rg:" ++ show (rcRegion x) ++ " ed:" ++ show b
let nx = x{rcEditable = b}
let ox = otherRegions p (rcRegion x)
let np = p {pgRegions = nx:ox}
let op = otherPages cv (pgID p)
writeIORef ref cv {cvPages = np:op}
--rgnSetMarkVis np nx
cvSetEditFlags np
return ()
else error ("regionEditable: cannot change region with nested subregions")
codeRegionDelete :: RCodeView -> Region -> IO ()
codeRegionDelete ref r = do
cv <- readIORef ref
if ((rid r) > 0)
then case getContexts cv r of
Nothing -> error ("regionDelete: specified region does not exist: " ++ (show r))
Just ctx -> do let pg = fst ctx
let x = snd ctx
apiStrLn $ "CW# regionDelete: pg:" ++ show (pgID pg) ++ " rg:" ++ show (rcRegion x)
si <- rgnStart pg x
ei <- rgnEnd pg x
G.textBufferRemoveTag (pgBuffer pg) (rcBgTag x) si ei
let newrgns = otherRegions pg (rcRegion x)
G.textBufferDeleteMark (pgBuffer pg) (rcStart x)
G.textBufferDeleteMark (pgBuffer pg) (rcEnd x)
let npg = pg {pgRegions = newrgns}
let ops = otherPages cv (pgID npg)
let ncv = cv {cvPages = npg:ops}
writeIORef ref ncv
cvSetEditFlags npg
--codeDumpRegions ref r
else if' ((rid r) == 0) (error "regionDelete: attempt to delete root region!") (error $ "regionDelete: invalid negative region " ++ (show r))
codeRegionGetText :: RCodeView -> Region -> IO String
codeRegionGetText ref r = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionGetText: region not found: " ++ (show r))
Just (pg,rc) -> cvSubRgnText pg rc
codeRegionGetBoundedText :: RCodeView -> Region -> (SourcePos, SourcePos) -> IO String
codeRegionGetBoundedText ref r (from, to) = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionGetBoundedText: region not found: " ++ (show r))
Just (pg,rc) -> do
apiStrLn $ "CW# regionGetBoundedText: pg: " ++ show (pgID pg) ++ " rg:" ++ show (rcRegion rc) ++ " Fm:" ++ show from ++ " To:" ++ show to
s <- rgnMapPos pg rc from
e <- rgnMapPos pg rc to
si <- rootIterFromPos pg s
ei <- rootIterFromPos pg e
cvRgnGetText pg si ei False
codeRegionSetText :: RCodeView -> Region -> String -> IO ()
codeRegionSetText ref r txt = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionSetText: region not found: " ++ (show r))
Just (pg,rc) -> do apiStrLn $ "CW# regionSetText: pg: " ++ show (pgID pg) ++ " rg:" ++ show (rcRegion rc) ++ " Text:" ++ show txt
if (isRoot rc)
then do G.textBufferSetText (pgBuffer pg) txt
else do iter1 <- rgnStart pg rc
iter2 <- rgnEnd pg rc
G.textBufferDelete (pgBuffer pg) iter1 iter2
G.textBufferInsert (pgBuffer pg) iter1 txt
cvSetEditFlags pg
codeRegionInsertText :: RCodeView -> Region -> String -> IO ()
codeRegionInsertText ref r t = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionInsertText: region not found: " ++ (show r))
Just (pg,x) -> do apiStrLn $ "CW# regionInsertText: pg: " ++ show (pgID pg) ++ " rg:" ++ show (rcRegion x) ++ " Text:" ++ show t
di <- cvInsertMark ref pg x
i3 <- G.textBufferGetIterAtMark (pgBuffer pg) di
cvRgnInsertText pg i3 t
cvSetEditFlags pg
codeRegionDeleteText :: RCodeView -> Region -> (SourcePos, SourcePos) -> IO ()
codeRegionDeleteText ref r (from, to) = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionDeleteText: region not found: " ++ (show r))
Just (pg,x) -> do apiStrLn $ "CW# regiondeleteText: pg: " ++ show (pgID pg) ++ " rg:" ++ show (rcRegion x) ++ " fm:" ++ show from ++ " to:" ++ show to
s <- rgnMapPos pg x from
e <- rgnMapPos pg x to
si <- rootIterFromPos pg s
ei <- rootIterFromPos pg e
G.textBufferDelete (pgBuffer pg) si ei
codeGetAllText :: RCodeView -> Region -> IO String
codeGetAllText ref r = do
cv <- readIORef ref
case getPage cv (pid r) of
Nothing -> error "regionGetAllText: bad Region"
Just pg -> do apiStrLn $ "CW# regionGetAllText: pg:" ++ show (pgID pg)
cvGetAllText pg (rid r)
codeTagNew :: RCodeView -> Region -> IO G.TextTag
codeTagNew ref r = do
cv <- readIORef ref
tag <- G.textTagNew Nothing
case getContexts cv r of
Nothing -> error "tagNew: bad Region"
Just (pg,rc) -> do apiStrLn $ "CW# tagNew: pg:" ++ show (pgID pg) ++ " Rg:" ++ show (rcRegion rc)
G.textTagTableAdd (pgTagTable pg) tag
return tag
codeRegionApplyTag :: RCodeView -> Region -> G.TextTag -> (SourcePos, SourcePos) -> IO ()
codeRegionApplyTag ref r t (from, to) = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionApplyTag: region not found: " ++ (show r))
Just (pg,x) -> do rfrom <- rgnMapPos pg x from
rto <- rgnMapPos pg x to
siter <- rootIterFromPos pg rfrom
eiter <- rootIterFromPos pg rto
-- cvSetMyPage cv pg
apiStrLn $ "CW# regionApplyTag: pg:" ++ show (pgID pg) ++ " Rg:" ++ show (rcRegion x) ++ " Fm:" ++ show rfrom ++ " to:" ++ show rto
G.textBufferApplyTag (pgBuffer pg) t siter eiter
codeRegionRemoveTag :: RCodeView -> Region -> G.TextTag -> IO ()
codeRegionRemoveTag ref r t = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionRemoveTag: region not found: " ++ (show r))
Just (pg,x) -> do apiStrLn $ "CW# regionRemoveTag: pg:" ++ show (pgID pg) ++ " Rg:" ++ show (rcRegion x)
iter1 <- rgnStart pg x
iter2 <- rgnEnd pg x
G.textBufferRemoveTag (pgBuffer pg) t iter1 iter2
codeRegionSetMark :: RCodeView -> Region -> G.TextMark -> SourcePos -> IO ()
codeRegionSetMark ref r m p = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionSetMark: region not found: " ++ (show r))
Just (pg,x) -> do rpos <- rgnMapPos pg x p
iter <- G.textBufferGetIterAtLineOffset (pgBuffer pg) (sourceLine rpos - 1) (sourceColumn rpos - 1)
G.textBufferAddMark (pgBuffer pg) m iter
codeRegionGetIter :: RCodeView -> Region -> SourcePos -> IO G.TextIter
codeRegionGetIter ref r p = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionGetIter: region not found: " ++ (show r))
Just (pg,x) -> do rpos <- rgnMapPos pg x p
G.textBufferGetIterAtLineOffset (pgBuffer pg) (sourceLine rpos - 1) (sourceColumn rpos - 1)
codeRegionGetSelection :: RCodeView -> Region -> IO (Maybe CwSelection)
codeRegionGetSelection ref r = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error "regionGetSelection: bad Region"
Just (pg, x) -> do hassel <- G.textBufferHasSelection (pgBuffer pg)
case hassel of
False -> return Nothing
True -> do (ifm,ito) <- G.textBufferGetSelectionBounds (pgBuffer pg)
pfm <- posFromIter pg ifm
pto <- posFromIter pg ito
apiStrLn $ "CW# regionGetSelection: From:" ++ show pfm ++ " To:" ++ show pto
mrc <- cvWhoHoldsPos pg pfm
case mrc of
Nothing -> return Nothing
Just rc -> do sp <- mapPosToRgn pg rc pfm
ep <- mapPosToRgn pg rc pto
apiStrLn $ "CW# getSel: R:" ++ show (rcRegion rc) ++ " ST:" ++ show sp ++ " ED:" ++ show ep
let rgn = Region (pgID pg) (rcRegion rc)
return $ Just (CwSelection rgn sp ep)
codeRegionScrollToPos :: RCodeView -> Region -> SourcePos -> IO ()
codeRegionScrollToPos ref r pos = do
cv <- readIORef ref
case getContexts cv r of
Nothing -> error ("regionScrollToPos: region not found: " ++ (show r))
Just (pg,x) -> do
apiStrLn $ "CW# regionScrollToPos: pg:" ++ show (pgID pg) ++ " Rg:" ++ show (rcRegion x) ++ " Pos:" ++ show pos
rpos <- rgnMapPos pg x pos
t3 <- rootIterFromPos pg rpos
cvSetMyPage cv pg
_ <- G.textViewScrollToIter (pgView pg) t3 0.1 Nothing
return ()
codeDumpRegions :: RCodeView -> Region -> IO ()
codeDumpRegions ref r = do
cv <- readIORef ref
case getPage cv (pid r) of
Nothing -> error ("dumpRegions: page not found: " ++ (show r))
Just pg -> do dumpRgns pg
| termite2/code-widget | CodeWidget/CodeWidgetAPI.hs | bsd-3-clause | 16,485 | 0 | 29 | 6,746 | 4,852 | 2,284 | 2,568 | 278 | 4 |
{-# OPTIONS -fno-warn-unused-do-bind #-}
module Main where
import Control.Applicative
import System.Directory
import System.Environment
import System.Exit
import System.IO
import System.Process
import Text.Printf
--------------------------------------------------------------------------------
-- Commands
newProjectTar :: FilePath
newProjectTar = "~/dev/new-project.tar.gz"
tarCmd :: FilePath -> CreateProcess
tarCmd dir = (quiteShell $ printf "tar xzf %s" newProjectTar) { cwd = Just dir }
gitInitCmd :: FilePath -> CreateProcess
gitInitCmd dir = (quiteShell "git init") { cwd = Just dir }
--------------------------------------------------------------------------------
-- IO stuff
runCmd :: CreateProcess -> IO ()
runCmd p = do
(_, _, Just herr, ph) <- createProcess p { std_err = CreatePipe }
ex <- waitForProcess ph
case ex of
ExitSuccess -> return ()
ExitFailure _ -> do
errLog <- (unlines . map ("! " ++) . lines) <$> hGetContents herr
printf "Shell error:\n\n%s\n" errLog
exitFailure
main :: IO ()
main = do
[dir] <- getArgs
exists <- doesDirectoryExist dir
if not exists then do
createDirectory dir
runCmd $ tarCmd dir
runCmd $ gitInitCmd dir
printf "New project created.\n"
else do
printf "Directory already exists.\n"
exitFailure
--------------------------------------------------------------------------------
-- Helper
quiteShell :: String -> CreateProcess
quiteShell s = (shell s) { std_out = CreatePipe, std_err = CreatePipe }
| mcmaniac/HScript | ex-src/newProject/Main.hs | bsd-3-clause | 1,599 | 0 | 18 | 345 | 392 | 201 | 191 | 39 | 2 |
module Perseus.Storage where
| bergey/perseus | src/Perseus/Storage.hs | bsd-3-clause | 29 | 0 | 3 | 3 | 6 | 4 | 2 | 1 | 0 |
-- | A simplified MID file player, as an example application using System.MIDI.
-- You will need a GM (General MIDI) capable synth, or something like that (Windows has one built-in).
--
module Main where
--------------------------------------------------------------------------------
import Data.Ord
import Data.List
import Control.Concurrent
import Control.Monad
import System.IO
import System.Environment
import System.Exit
import System.MIDI
import System.MIDI.Utility
import SMF
--------------------------------------------------------------------------------
-- player thread
player :: Connection -> MVar [MidiEvent] -> IO ()
player conn mv = do
t <- currentTime conn
evs <- readMVar mv
case evs of
[] -> do
putStrLn "the song ended."
return ()
(MidiEvent s ev):evs' -> do
when (s<=t) $ do
swapMVar mv evs'
case ev of
SysEx _ -> return ()
Undefined -> return ()
_ -> send conn ev
threadDelay 1000
player conn mv
-- song
data Song = Song
{ song_bpm :: Float
, song_tracks :: [[MidiEvent]]
}
filterMap :: (a -> Maybe b) -> [a] -> [b]
filterMap f xs = map (fromJust . f) $ filter test xs where
test x = case f x of
Just _ -> True
Nothing -> False
fromJust (Just x) = x
tmeta (MidiEvent' ts (Left x)) = Just (ts,x)
tmeta (MidiEvent' _ (Right _)) = Nothing
tmidi (MidiEvent' _ (Left _)) = Nothing
tmidi (MidiEvent' ts (Right y)) = Just $ MidiEvent (fromIntegral ts) y
toSong :: TimeBase -> [[MidiEvent']] -> Song
toSong division tracks = Song bpm $ map convert midi where
convert = map (\(MidiEvent ts ev) -> MidiEvent (round $ pertick * fromIntegral ts) ev)
tTempo (_,Tempo tempo) = Just tempo
tTempo _ = Nothing
tempos = filterMap tTempo metaAll
(tempo,bpm) = case tempos of
[] -> ( 500000 , 120 )
t:_ -> ( t , 60000000 / fromIntegral t )
pertick = timestampUnitInMilisecs division tempo
metaAll = concat meta
meta = map (filterMap tmeta) tracks
midi = map (filterMap tmidi) tracks
--------------------------------------------------------------------------------
main = do
args <- getArgs
fname <- case args of
[s] -> return s
_ -> do
putStrLn "Usage: playmidi <fname.mid>"
exitFailure
((_,division),tracks) <- loadSMF fname
let song = toSong division tracks
putStrLn $ "bpm = " ++ show (song_bpm song)
let events = sortBy (comparing $ \(MidiEvent t _) -> t) $ concat (song_tracks song)
mv <- newMVar events
dst <- selectOutputDevice "Select midi output device" Nothing
conn <- openDestination dst
start conn
thread <- forkIO (player conn mv)
putStrLn "Press 'ENTER' to exit."
getLine
killThread thread
close conn
| chpatrick/hmidi | examples/playmidi.hs | bsd-3-clause | 2,793 | 0 | 19 | 677 | 942 | 466 | 476 | 74 | 4 |
module Data.Astro.Time.ConvTest
(
tests
)
where
import Data.Astro.Types
import Data.Time.LocalTime
import Data.Astro.Time.Conv
import Data.Astro.Time.JulianDate
import Data.Astro.Time.JulianDateTest (testJD)
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.HUnit
import Test.HUnit.Approx
import Test.QuickCheck
tests = [ testGroup "LCT conversion properties" [
testProperty "TZ: -5" $ prop_LCTConversion 4
, testProperty "TZ: 0" $ prop_LCTConversion 0
, testProperty "TZ: 3" $ prop_LCTConversion 3
]
, testGroup "LCD Conversion properties" [
testProperty "TZ: -5" $ prop_LCDConversion 4
, testProperty "TZ: 0" $ prop_LCDConversion 0
, testProperty "TZ: 3" $ prop_LCDConversion 3
]
]
prop_LCTConversion tz = forAll (choose (0, 999999999)) check
where check n =
let jd = LCT (DH tz) (JD n)
jd2 = zonedTimeToLCT $ lctToZonedTime jd
LCT _ (JD n2) = jd2
in abs(n - n2) < 0.00000001
prop_LCDConversion tz = forAll (choose (0, 999999999)) check
where check n =
let (jd, _) = splitToDayAndTime (JD n) -- drop time part
lct = LCT (DH tz) jd
lcd2 = zonedTimeToLCD $ lctToZonedTime lct
LCD _ (JD n2) = lcd2
JD n1 = jd
in abs(n1 - n2) < 0.00000001
| Alexander-Ignatyev/astro | test/Data/Astro/Time/ConvTest.hs | bsd-3-clause | 1,496 | 0 | 13 | 443 | 432 | 228 | 204 | 36 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.PipelineStatisticsQuery
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.PipelineStatisticsQuery (
-- * Extension Support
glGetARBPipelineStatisticsQuery,
gl_ARB_pipeline_statistics_query,
-- * Enums
pattern GL_CLIPPING_INPUT_PRIMITIVES_ARB,
pattern GL_CLIPPING_OUTPUT_PRIMITIVES_ARB,
pattern GL_COMPUTE_SHADER_INVOCATIONS_ARB,
pattern GL_FRAGMENT_SHADER_INVOCATIONS_ARB,
pattern GL_GEOMETRY_SHADER_INVOCATIONS,
pattern GL_GEOMETRY_SHADER_PRIMITIVES_EMITTED_ARB,
pattern GL_PRIMITIVES_SUBMITTED_ARB,
pattern GL_TESS_CONTROL_SHADER_PATCHES_ARB,
pattern GL_TESS_EVALUATION_SHADER_INVOCATIONS_ARB,
pattern GL_VERTEX_SHADER_INVOCATIONS_ARB,
pattern GL_VERTICES_SUBMITTED_ARB
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/PipelineStatisticsQuery.hs | bsd-3-clause | 1,154 | 0 | 5 | 131 | 97 | 66 | 31 | 17 | 0 |
{-# LANGUAGE OverloadedStrings, GADTs, StandaloneDeriving #-}
module SMVM.MatrixMarket (Matrix(..), readMatrix) where
import Control.Applicative hiding (many)
import Data.Complex
import Data.Attoparsec.Char8 hiding (parse, Result(..))
import Data.Attoparsec.Lazy as AL (parse, Result(..))
import Data.ByteString.Lex.Double
import qualified Data.ByteString.Lazy as L
-- | Specifies the element type. Pattern matrices do not have any elements,
-- only indices, and only make sense for coordinate matrices and vectors.
--
data Field = Real | Complex | Integer | Pattern
deriving (Eq, Show)
-- | Specifies either sparse or dense storage. In sparse (\"coordinate\")
-- storage, elements are given in (i,j,x) triplets for matrices (or (i,x) for
-- vectors). Indices are 1-based, so that A(1,1) is the first element of a
-- matrix, and x(1) is the first element of a vector.
--
-- In dense (\"array\") storage, elements are given in column-major order.
--
-- In both cases, each element is given on a separate line.
--
data Format = Coordinate | Array
deriving (Eq, Show)
-- | Specifies any special structure in the matrix. For symmetric and hermition
-- matrices, only the lower-triangular part of the matrix is given. For skew
-- matrices, only the entries below the diagonal are stored.
--
data Structure = General | Symmetric | Hermitian | Skew
deriving (Eq, Show)
-- We really want a type parameter to Matrix, but I think that requires some
-- kind of dynamic typing so that we can determine (a ~ Integral) or (a ~
-- RealFloat), and so forth, depending on the file being read. This will do for
-- our purposes...
--
-- Format is: (rows,columns) nnz [(row,column,value)]
--
data Matrix where
PatternMatrix :: (Int,Int) -> Int -> [(Int,Int)] -> Matrix
IntMatrix :: (Int,Int) -> Int -> [(Int,Int,Int)] -> Matrix
RealMatrix :: (Int,Int) -> Int -> [(Int,Int,Float)] -> Matrix
ComplexMatrix :: (Int,Int) -> Int -> [(Int,Int,Complex Float)] -> Matrix
deriving instance Show Matrix
--------------------------------------------------------------------------------
-- Combinators
--------------------------------------------------------------------------------
comment :: Parser ()
comment = char '%' *> skipWhile (not . eol) *> endOfLine
where eol w = w `elem` "\n\r"
floating :: Fractional a => Parser a
floating = do
mv <- readDouble <$> (skipSpace *> takeTill isSpace) -- readDouble does the fancy stuff
case mv of
Just (v,_) -> return . realToFrac $ v
Nothing -> fail "floating-point number"
integral :: Integral a => Parser a
integral = skipSpace *> decimal
format :: Parser Format
format = string "coordinate" *> pure Coordinate
<|> string "array" *> pure Array
<?> "matrix format"
field :: Parser Field
field = string "real" *> pure Real
<|> string "complex" *> pure Complex
<|> string "integer" *> pure Integer
<|> string "pattern" *> pure Pattern
<?> "matrix field"
structure :: Parser Structure
structure = string "general" *> pure General
<|> string "symmetric" *> pure Symmetric
<|> string "hermitian" *> pure Hermitian
<|> string "skew-symmetric" *> pure Skew
<?> "matrix structure"
header :: Parser (Format,Field,Structure)
header = string "%%MatrixMarket matrix"
>> (,,) <$> (skipSpace *> format)
<*> (skipSpace *> field)
<*> (skipSpace *> structure)
<* endOfLine
<?> "MatrixMarket header"
extent :: Parser (Int,Int,Int)
extent = do
[m,n,l] <- skipWhile isSpace *> count 3 integral <* endOfLine
return (m,n,l)
line :: Parser a -> Parser (Int,Int,a)
line f = (,,) <$> integral
<*> integral
<*> f
<* endOfLine
--------------------------------------------------------------------------------
-- Matrix Market
--------------------------------------------------------------------------------
matrix :: Parser Matrix
matrix = do
(_,t,_) <- header
(m,n,l) <- skipMany comment *> extent
case t of
Real -> RealMatrix (m,n) l `fmap` many1 (line floating)
Complex -> ComplexMatrix (m,n) l `fmap` many1 (line ((:+) <$> floating <*> floating))
Integer -> IntMatrix (m,n) l `fmap` many1 (line integral)
Pattern -> PatternMatrix (m,n) l `fmap` many1 ((,) <$> integral <*> integral)
readMatrix :: FilePath -> IO Matrix
readMatrix file = do
chunks <- L.readFile file
case parse matrix chunks of
AL.Fail _ _ msg -> error $ file ++ ": " ++ msg
AL.Done _ mtx -> return mtx
| blambo/accelerate-examples | tests/simple/SMVM/MatrixMarket.hs | bsd-3-clause | 4,708 | 0 | 16 | 1,092 | 1,172 | 643 | 529 | 78 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Api.Pin.Data where
import Control.Applicative
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LE
import Data.Aeson
import Data.Aeson.Types
import Data.Maybe
import Data.Text
import Data.Text.Encoding
import Data.Vector (toList)
import Network.HTTP.Conduit
toParams :: PinRequest -> [(B.ByteString, B.ByteString)]
toParams pin = [
("amount", fromInt . pinAmount $ pin)
, ("description", fromText . pinDescription $ pin)
, ("email", fromText . pinEmail $ pin)
, ("ip_address", fromText . pinIp $ pin)
, ("card[number]", fromText . pinNumber $ pin)
, ("card[expiry_month]", fromInt . pinExpiryMonth $ pin)
, ("card[expiry_year]", fromInt . pinExpiryYear $ pin)
, ("card[cvc]", fromInt . pinCvc $ pin)
, ("card[name]", fromText . pinName $ pin)
, ("card[address_line1]", fromText . pinAddress1 . pinAddress $ pin)
, ("card[address_line2]", fromText . fromMaybe "" . pinAddress2 . pinAddress $ pin)
, ("card[address_city]", fromText . pinCity . pinAddress $ pin)
, ("card[address_postcode]", fromText . pinPostcode . pinAddress $ pin)
, ("card[address_state]", fromText . pinState . pinAddress $ pin)
, ("card[address_country]", fromText . pinCountry . pinAddress $ pin)
]
fromInt :: Int -> B.ByteString
fromInt = fromText . pack . show
fromText :: Text -> B.ByteString
fromText = encodeUtf8
toText :: BL.ByteString -> Text
toText = LT.toStrict . LE.decodeUtf8
toStrictBS :: BL.ByteString -> B.ByteString
toStrictBS = B.concat . BL.toChunks
-- urlEncodedBody params
type PinAmount = Int -- Amount in cents
data PinConfig = PinConfig {
pinUrl :: Text
, pinApiKey :: Text
, pinManagerSettings :: ManagerSettings
}
pinApiKeyBS :: PinConfig -> B.ByteString
pinApiKeyBS = encodeUtf8 . pinApiKey
data PinAddress =
PinAddress {
pinAddress1 :: Text
, pinAddress2 :: Maybe Text
, pinCity :: Text
, pinPostcode :: Text
, pinState :: Text
, pinCountry :: Text
}
deriving (Show)
type PinCustomerToken = Text
type PinCardToken = Text
type PinChargeToken = Text
type PinName = Text
type PinCurrency = Maybe Text
type PinEmail = Text
type PinIp = Text
type PinMonth = Int
type PinYear = Int
type PinCardNumber = Text
type PinDisplayNumber = Text
type PinScheme = Text
type PinCvc = Int
type PinDate = Text
data PinChargeDetails =
PinChargeDetails {
pinChargeAmount :: PinAmount
, pinChargeDescription :: Text
, pinChargeCurrency :: Maybe Text
}
data PinCard =
PinCard PinCardNumber PinMonth PinYear PinCvc PinName PinAddress
data PinCharge =
PinCharge PinCard PinChargeDetails PinEmail PinIp
| PinChargeCard PinCardToken PinChargeDetails PinEmail PinIp
| PinChargeCustomer PinCustomerToken PinChargeDetails PinEmail
data PinRefund =
PinRefund PinChargeToken PinAmount
data PinCreateCard =
PinCreateCard PinCard
data PinCreateCustomer =
PinCreateCustomer PinEmail PinCard
| PinCreateCustomerFromCard PinEmail PinCardToken
data PinDisplayCharge =
PinDisplayCharge PinChargeToken Bool PinChargeDetails PinEmail PinIp PinDate (Maybe Text) (Maybe Text) PinDisplayCard -- FIX transfer????
data PinDisplayCard =
PinDisplayCard PinCardToken PinDisplayNumber PinScheme PinAddress Bool
data PinCustomerResponse =
PinCustomerResponse PinCustomerToken PinEmail PinDate PinDisplayCard
data PinResponse a =
PinResponseSuccess a
| PinResponseUnauthorized
| PinResponseUnproccessible {
pinResponseError :: Text
, pinResponseErrorDescription :: Text
, pinResponseMessages :: [(Text, Text)] -- (Code, Message)
}
| PinResponseServerError Text
| PinResponseInvalidResponseCode Int Text
| PinResponseJsonSyntaxError Int Text Text
| PinResponseJsonFormatError Int Text Text
deriving (Show)
data PinResponseSuccessData =
PinResponseSuccessData
Text
Bool
PinAmount
Text
Text
Text
Text
PinCard
data PinResponseUnproccessibleData =
PinResponseUnproccessibleData
Text
Text
[(Text, Text)]
message :: Value -> Parser (Text, Text)
message (Object o) = (,) <$> o .: "code" <*> o .: "message"
message _ = fail "Expected message to be an object"
messages :: Value -> Parser [(Text, Text)]
messages (Array a) = mapM message (toList a)
messages _ = fail "Expected messages to be an array"
instance FromJSON PinResponseUnproccessibleData where
parseJSON (Object o) = PinResponseUnproccessibleData
<$> o .: "error"
<*> o .: "error_description"
<*> (o .: "messages" >>= messages)
parseJSON _ = fail "Invalid PinResponseUnproccessibleData"
instance FromJSON PinResponseSuccessData where
parseJSON (Object o) =
o .: "response" >>= \response -> case response of
(Object oo) -> PinResponseSuccessData
<$> oo .: "token"
<*> oo .: "success"
<*> oo .: "amount"
<*> oo .: "description"
<*> oo .: "email"
<*> oo .: "ip_address"
<*> oo .: "created_at"
<*> oo .: "card"
_ -> fail "Invalid PinCard.response"
parseJSON _ = fail "Invalid PinCard"
instance FromJSON PinAddress where
parseJSON (Object o) = PinAddress
<$> o .: "address_line1"
<*> o .: "address_line2"
<*> o .: "address_city"
<*> o .: "address_postcode"
<*> o .: "address_state"
<*> o .: "address_country"
parseJSON _ = fail "Invalid PinCard"
instance FromJSON PinCard where
parseJSON o'@(Object o) = PinCard
<$> o .: "token"
<*> o .: "display_number"
<*> o .: "scheme"
<*> parseJSON o'
parseJSON _ = fail "Invalid PinCard"
syntaxErr :: Int -> BL.ByteString -> Text -> PinResponse
syntaxErr code body msg = PinResponseJsonSyntaxError code msg (toText body)
formatErr :: Int -> BL.ByteString -> Text -> PinResponse
formatErr code body msg = PinResponseJsonFormatError code msg (toText body)
successToPinResponse :: PinResponseSuccessData -> PinResponse
successToPinResponse (PinResponseSuccessData t r a d e ip ts c) = PinResponseSuccess t r a d e ip ts c
unprocessibleToPinResponse :: PinResponseUnproccessibleData -> PinResponse
unprocessibleToPinResponse (PinResponseUnproccessibleData e d ms) = PinResponseUnproccessible e d ms
| markhibberd/pin | src/Network/Api/Pin/Data.hs | bsd-3-clause | 6,329 | 0 | 26 | 1,245 | 1,658 | 923 | 735 | 170 | 1 |
import Data.List
import Data.Numbers.Primes
decompose :: Int -> [Int]
decompose n = factor primes n
where factor (p:ps) n
| (p*p) > n = [n]
| mod n p == 0 = p : (factor (p:ps) (div n p))
| otherwise = factor ps n
sumProperDivisors :: Int -> Int
sumProperDivisors n = product (map pisigma (group (decompose n))) - n
where pisigma xs = 1 + (foldl (\a b -> b + (a * b)) 0 xs)
amicables :: [Int]
amicables = [a | a <- [1..], let b = sumProperDivisors a, sumProperDivisors b == a, a /= b]
main :: IO ()
main = print $ sum (takeWhile (<10000) amicables)
| JacksonGariety/euler.hs | 021.hs | bsd-3-clause | 614 | 0 | 14 | 178 | 317 | 163 | 154 | 15 | 1 |
{-# LANGUAGE TupleSections #-}
module Monto.DependencyGraph where
import Data.Graph.Inductive (Adj, Context, Gr, Node)
import qualified Data.Graph.Inductive as G
import Data.List (mapAccumR)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
data DependencyGraph v e
= DependencyGraph
{ maxNode :: Node
, nodeMap :: Map v Node
, dependencies :: Gr v e
} deriving (Eq)
instance (Show v, Show e) => Show (DependencyGraph v e) where
show gr = G.prettify (dependencies gr)
empty :: DependencyGraph v e
empty = DependencyGraph
{ nodeMap = M.empty
, dependencies = G.empty
, maxNode = 1
}
register :: (Ord v) => v -> [(e,v)] -> DependencyGraph v e -> DependencyGraph v e
{-# INLINE register #-}
register from to gr =
let (gr',fromNode) = registerNode gr from
(gr'',toNodes) = mapAccumR registerNode gr' (map snd to)
dependencies' = G.insEdges [ (fromNode,toNode,e) | (e,toNode) <- zip (map fst to) toNodes]
$ G.delEdges [ (fromNode,suc) | suc <- G.suc (dependencies gr'') fromNode ]
$ dependencies gr''
in gr''
{ dependencies = dependencies'
}
filterDeps :: (v -> Bool) -> DependencyGraph v e -> DependencyGraph v e
filterDeps predicate gr =
let (delete,keep) = M.partitionWithKey (\k _ -> predicate k) (nodeMap gr)
in gr { dependencies = G.delNodes (M.elems delete) (dependencies gr)
, nodeMap = keep
}
deregister :: Ord v => v -> DependencyGraph v e -> DependencyGraph v e
deregister dep gr = fromMaybe gr $ do
node <- M.lookup dep (nodeMap gr)
return $ gr { dependencies = G.delNode node (dependencies gr)
, nodeMap = M.delete dep (nodeMap gr)
}
registerNode :: Ord v => DependencyGraph v e -> v -> (DependencyGraph v e,Node)
{-# INLINE registerNode #-}
registerNode gr v =
case M.lookup v (nodeMap gr) of
Just node -> (gr,node)
Nothing ->
let gr' = gr
{ nodeMap = M.insert v newNode (nodeMap gr)
, dependencies = G.insNode (newNode,v) (dependencies gr)
, maxNode = newNode
}
in (gr',newNode)
where
newNode = maxNode gr + 1
lookupReverseDependencies :: (Ord v) => v -> DependencyGraph v e -> [(e,v)]
lookupReverseDependencies = lookupGraph (\(pre,_,_,_) -> pre)
lookupDependencies :: (Ord v) => v -> DependencyGraph v e -> [(e,v)]
lookupDependencies = lookupGraph (\(_,_,_,suc) -> suc)
lookupGraph :: (Ord v) => (Context v e -> Adj e) -> v -> DependencyGraph v e -> [(e,v)]
lookupGraph direction v gr = fromMaybe [] $ do
node <- M.lookup v (nodeMap gr)
ctx <- fst $ G.match node (dependencies gr)
let adj = direction ctx
mapM (\(e,n) -> (e,) <$> G.lab (dependencies gr) n) adj
| monto-editor/broker | src/Monto/DependencyGraph.hs | bsd-3-clause | 2,831 | 0 | 17 | 754 | 1,121 | 598 | 523 | 63 | 2 |
{-# LANGUAGE RecordWildCards, TemplateHaskell, FlexibleContexts, ScopedTypeVariables, ConstraintKinds #-}
module HAST.BDD(compileBDD, compileBDD') where
import Control.Monad.State
import Control.Monad.ST
import Data.Bits
import Debug.Trace
import Control.Monad.Morph
import qualified Data.Map.Strict as Map
import Data.Map.Strict (Map)
import Util hiding (trace)
import Cudd.Imperative
import Synthesis.Interface
import Synthesis.Resource
import Synthesis.RefineCommon
import HAST.HAST
block :: (RM s u t) =>
(DDManager s u -> DDNode s u -> DDNode s u -> ST s (DDNode s u)) ->
(DDManager s u -> DDNode s u) ->
DDManager s u ->
[DDNode s u] ->
t (ST s) (DDNode s u)
block func s m nodes = do
$rp ref (s m)
go (s m) nodes
where
go accum [] = return accum
go accum (n:ns) = do
accum' <- $r2 (func m) accum n
$d (deref m) accum
$d (deref m) n
go accum' ns
conj, disj :: (RM s u t)
=> DDManager s u
-> [DDNode s u]
-> t (ST s) (DDNode s u)
conj = block bAnd bOne
disj = block bOr bZero
ccase :: (RM s u t) => DDManager s u -> [(DDNode s u, DDNode s u)] -> t (ST s) (DDNode s u)
ccase m = go (bZero m) (bZero m)
where
go accum neg [] = do
$d (deref m) neg
return accum
go accum neg ((cond, cas): cs) = do
--alive == cond, cas, accum, neg
econd <- $r2 (bAnd m) cond (bNot neg)
clause <- $r2 (bAnd m) econd cas
$d (deref m) econd
$d (deref m) cas
--alive == cond, accum, neg, clause
accum' <- $r2 (bOr m) clause accum
$d (deref m) accum
$d (deref m) clause
--alive == cond, neg, accum'
neg' <- $r2 (bOr m) cond neg
$d (deref m) cond
$d (deref m) neg
--alive == accum', neg'
go accum' neg' cs
compileBDD' :: forall v s u pdb. (Show v)
=> DDManager s u
-> VarOps pdb v s u
-> (v -> Maybe String) -- returns BDD variable group tag for a variable
-> AST [DDNode s u] [DDNode s u] (DDNode s u) v
-> StateT pdb (ST s) (DDNode s u)
compileBDD' m vo ft ast = hoist (liftM fst . (runResource :: Monad m => InUse (DDNode s u) -> (ResourceT (DDNode s u)) m a -> m (a, InUse (DDNode s u))) Map.empty) $ (compileBDD m vo ft ast :: StateT pdb (ResourceT (DDNode s u) (ST s)) (DDNode s u))
compileBDD :: forall v s u t pdb. (Show v, RM s u t)
=> DDManager s u
-> VarOps pdb v s u
-> (v -> Maybe String) -- returns BDD variable group tag for a variable
-> AST [DDNode s u] [DDNode s u] (DDNode s u) v
-> StateT pdb (t (ST s)) (DDNode s u)
compileBDD m VarOps{..} ftag = compile' where
binOp :: forall t. RM s u t => (DDManager s u -> DDNode s u -> DDNode s u -> ST s (DDNode s u))
-> DDManager s u
-> AST [DDNode s u] [DDNode s u] (DDNode s u) v
-> AST [DDNode s u] [DDNode s u] (DDNode s u) v
-> StateT pdb (t (ST s)) (DDNode s u)
binOp func m x y = do
x <- compile' x
y <- compile' y
res <- lift $ $r2 (func m) x y
lift $ $d (deref m) x
lift $ $d (deref m) y
return res
getAVar :: forall t. RM s u t => ASTVar [DDNode s u] [DDNode s u] v -> StateT pdb (t (ST s)) [DDNode s u]
getAVar (FVar f) = return f
getAVar (EVar e) = return e
getAVar (NVar v) = hoist lift $ getVar v (ftag v) --TODO fix when Interface.hs uses resource monad
withTmpMany :: forall t. RM s u t => Int -> ([DDNode s u] -> StateT pdb ((ResourceT (DDNode s u)) (ST s)) (DDNode s u)) -> StateT pdb (t (ST s)) (DDNode s u)
withTmpMany n f = do
(res :: DDNode s u) <- hoist lift $ withTmpMany' [] n f
lift $ $rr $ return res
return res
withTmpMany' :: [DDNode s u] -> Int -> ([DDNode s u] -> StateT pdb ((ResourceT (DDNode s u)) (ST s)) (DDNode s u)) -> StateT pdb (ST s) (DDNode s u)
withTmpMany' nodes 0 f = hoist (liftM fst . runResource Map.empty) $ f nodes
withTmpMany' nodes n f = withTmp (\node -> withTmpMany' (node:nodes) (n-1) f)
compile' :: forall t. RM s u t => AST [DDNode s u] [DDNode s u] (DDNode s u) v
-> StateT pdb (t (ST s)) (DDNode s u)
compile' T = do
lift $ $rp ref $ (bOne m :: DDNode s u)
return $ bOne m
compile' F = do
lift $ $rp ref $ (bZero m :: DDNode s u)
return $ bZero m
compile' (Not x) = liftM bNot $ compile' x
compile' (And x y) = binOp bAnd m x y
compile' (Or x y) = binOp bOr m x y
compile' (XNor x y) = binOp bXnor m x y
compile' (Imp x y) = binOp bimp m x y
where
bimp m x y = bOr m (bNot x) y
compile' (Conj es) = do
es <- sequence $ map compile' es
lift $ conj m es
compile' (Disj es) = do
es <- sequence $ map compile' es
lift $ disj m es
compile' (Case cs) = do
cs <- sequence $ map func cs
lift $ ccase m cs
where
func (x, y) = do
x <- compile' x
y <- compile' y
return (x, y)
compile' (EqVar x y) = do
x <- getAVar x
y <- getAVar y
lift $ $r $ xEqY m x y --TODO reference counting
compile' (EqConst x c) = do
x <- getAVar x
lift $ $r $ computeCube m x $ bitsToBoolArrBe (length x) c --TODO reference counting
compile' (Exists w f)
| w <= 0 = error $ "compileBDD error: cannot quantify " ++ show w ++ " bits"
| otherwise = withTmpMany w $ \x -> do
res' <- compile' $ f x
xcube <- lift $ $r $ nodesToCube m x --TODO reference counting
res <- lift $ $r2 (bExists m) res' xcube
lift $ $d (deref m) xcube
lift $ $d (deref m) res'
return res
compile' (NExists n w f)
| w <= 0 = error $ "compileBDD error: cannot quantify " ++ show w ++ " bits"
| otherwise = withTmpMany w $ \x -> do
res' <- compile' $ f x
xcube <- lift $ $r $ nodesToCube m x --TODO reference counting
res <- lift $ $r2 (bExists m) res' xcube
lift $ $d (deref m) xcube
lift $ $d (deref m) res'
return res
compile' (Var x) = do
[x] <- getAVar x
lift $ $rp ref x
return x
compile' (Let x f) = do
bind <- compile' x
res <- compile' (f bind)
lift $ $d (deref m) bind
return res
compile' (LetLit x) = do
lift $ $rp ref x
return x
| termite2/hast | HAST/BDD.hs | bsd-3-clause | 6,732 | 0 | 19 | 2,443 | 3,104 | 1,477 | 1,627 | 155 | 20 |
module HpackSpec (spec) where
import Prelude ()
import Prelude.Compat
import Control.Monad.Compat
import Control.DeepSeq
import Data.Version (Version(..), showVersion)
import Test.Hspec
import Test.Mockery.Directory
import Test.QuickCheck
import Hpack
makeVersion :: [Int] -> Version
makeVersion v = Version v []
spec :: Spec
spec = do
describe "parseVerbosity" $ do
it "returns True by default" $ do
parseVerbosity ["foo"] `shouldBe` (True, ["foo"])
context "with --silent" $ do
it "returns False" $ do
parseVerbosity ["--silent"] `shouldBe` (False, [])
describe "extractVersion" $ do
it "extracts Hpack version from a cabal file" $ do
let cabalFile = ["-- This file has been generated from package.yaml by hpack version 0.10.0."]
extractVersion cabalFile `shouldBe` Just (Version [0, 10, 0] [])
it "is total" $ do
let cabalFile = ["-- This file has been generated from package.yaml by hpack version "]
extractVersion cabalFile `shouldBe` Nothing
describe "parseVersion" $ do
it "is inverse to showVersion" $ do
let positive = getPositive <$> arbitrary
forAll (replicateM 3 positive) $ \xs -> do
let v = Version xs []
parseVersion (showVersion v) `shouldBe` Just v
describe "hpackWithVersion" $ do
context "when only the hpack version in the cabal file header changed" $ do
it "does not write a new cabal file" $ do
inTempDirectory $ do
writeFile "package.yaml" "name: foo"
hpackWithVersion (makeVersion [0,8,0]) "." False
old <- readFile "foo.cabal" >>= (return $!!)
hpackWithVersion (makeVersion [0,10,0]) "." False
readFile "foo.cabal" `shouldReturn` old
context "when exsting cabal file was generated with a newer version of hpack" $ do
it "does not re-generate" $ do
inTempDirectory $ do
writeFile "package.yaml" $ unlines [
"name: foo"
, "version: 0.1.0"
]
hpackWithVersion (makeVersion [0,10,0]) "." False
old <- readFile "foo.cabal" >>= (return $!!)
writeFile "package.yaml" $ unlines [
"name: foo"
, "version: 0.2.0"
]
hpackWithVersion (makeVersion [0,8,0]) "." False
readFile "foo.cabal" `shouldReturn` old
| phadej/hpack | test/HpackSpec.hs | mit | 2,444 | 0 | 22 | 721 | 654 | 325 | 329 | 55 | 1 |
{-|
Module : Filesystem.CanonicalPath.Directory
Copyright : (c) Boris Buliga, 2014
License : MIT
Maintainer : d12frosted@icloud.com
Stability : experimental
Portability : portable
Redefinition of some functions from @System.Directory@ module. Some of them have different signature, because they need to work with @'CanonicalPath'@. For example, we can't create functions @createDirectory :: 'CanonicalPath' -> IO ()@, because it has no sense. How can we create directory that already exists? Instead we have function @createDirectory :: 'CanonicalPath' -> 'FilePath' -> IO 'CanonicalPath'@, that creates new directory in base existing directory with provided name. And also it returns @'CanonicalPath'@ of newly created directory. Isn't it nice?
A lot of functions come in two variants: one that returns resulting @'CanonicalPath' and second that ignores result (they end with '_' symbol).
Happy Haskell Hacking!
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Filesystem.CanonicalPath.Directory where
import BasicPrelude
import Data.Text ()
import Filesystem.CanonicalPath
import Filesystem.CanonicalPath.Internal
import Filesystem.Path
import qualified System.Directory as Directory
{-|
@'createDirectory' base dir@ creates new directory @dir@ in existing @base@ directory and returns @'CanonicalPath'@ of created directory.
For more information look for documentation of @'System.Directory.createDirectory'@.
/Since 0.1.1.0/
-}
createDirectory :: MonadIO m
=> CanonicalPath -- ^ base directory
-> FilePath -- ^ name of new directory
-> m CanonicalPath -- ^ @'CanonicalPath'@ of created directory
createDirectory cp dir =
do liftIO . Directory.createDirectory $ toPrelude path
return $ CanonicalPath path
where path = unsafePath cp </> dir
{-|
Variant of @'createDirectory' that ignores resulting @'CanonicalPath'.
/Since 0.2.2.0/
-}
createDirectory_ :: MonadIO m => CanonicalPath -> FilePath -> m ()
createDirectory_ cp dir = voidM $ createDirectory cp dir
{-|
@'createDirectoryIfMissing' parents dir@ creates a new directory @dir@ in @base@ directory. If the first argument is 'True' the function will also create all parent directories if they are missing. Function returns @'CanonicalPath'@ of created directory.
For more information look for documentation of @'System.Directory.createDirectoryIfMissing'@.
/Since 0.1.1.0/
-}
createDirectoryIfMissing :: MonadIO m
=> Bool -- ^ Create its parents too?
-> CanonicalPath -- ^ base directory
-> FilePath -- ^ name of new directory
-> m CanonicalPath -- ^ @'CanonicalPath'@ of created directory
createDirectoryIfMissing flag cp dir =
do liftIO . Directory.createDirectoryIfMissing flag $ toPrelude path
return $ CanonicalPath path
where path = unsafePath cp </> dir
{-|
Variant of @'createDirectoryIfMissing' that ignores resulting @'CanonicalPath'.
/Since 0.2.2.0/
-}
createDirectoryIfMissing_ :: MonadIO m => Bool -> CanonicalPath -> FilePath -> m ()
createDirectoryIfMissing_ flag cp dir = voidM $ createDirectoryIfMissing flag cp dir
{-|
@'removeDirectory' dir@ removes an existing directory /dir/.
For more information look for documentation of @'System.Directory.removeDirectory'@.
/Since 0.1.1.0/
-}
removeDirectory :: MonadIO m => CanonicalPath -> m ()
removeDirectory = liftIO . preludeMap Directory.removeDirectory
{-|
@'removeDirectoryRecursive' dir@ removes an existing directory /dir/ together with its content and all subdirectories. Be careful, if the directory contains symlinks, the function will follow them.
For more information look for documentation of @'System.Directory.removeDirectoryRecursive'@.
/Since 0.1.1.0/
-}
removeDirectoryRecursive :: MonadIO m => CanonicalPath -> m ()
removeDirectoryRecursive = liftIO . preludeMap Directory.removeDirectoryRecursive
{-|
@'renameDirectory' old new@ changes the name of an existing directory from /old/ to /new/ and returns @'CanonicalPath'@ of new directory.
For more information look for documentation of @'System.Directory.renameDirectory'@.
/Since 0.1.1.0/
-}
renameDirectory :: MonadIO m
=> CanonicalPath -- ^ old directory
-> FilePath -- ^ new directory (should be just name of directory)
-> m CanonicalPath -- ^ @'CanonicalPath'@ of new directory
renameDirectory cp p =
do newPath <- canonicalPath $ parent p
liftIO $ Directory.renameDirectory (toPrelude . unsafePath $ cp) (toPrelude p)
return . CanonicalPath $ unsafePath newPath </> dirname (p </> "")
{-|
Variant of @'renameDirectory' that ignores resulting @'CanonicalPath'.
/Since 0.2.2.0/
-}
renameDirectory_ :: MonadIO m => CanonicalPath -> FilePath -> m ()
renameDirectory_ cp p = voidM $ renameDirectory cp p
{-|
@'getDirectoryContents' dir@ returns a list of /all/ entries in /dir/. If you want to have list of @'CanonicalPath'@ instead use function @'getDirectoryContents''@.
For more information look for documentation of @'System.Directory.getDirectoryContents'@.
/Since 0.1.1.0/
-}
getDirectoryContents :: MonadIO m => CanonicalPath -> m [FilePath]
getDirectoryContents cp = liftIO . liftM (fromPrelude <$>) $ preludeMap Directory.getDirectoryContents cp
{-|
The same as @'getDirectoryContents'@, but returns list of @'CanonicalPath'@ instead of @'FilePath'@.
/Since 0.1.1.0/
-}
getDirectoryContents' :: MonadIO m => CanonicalPath -> m [CanonicalPath]
getDirectoryContents' cp = liftIO . liftM (CanonicalPath . (</> ) up <$>) $ getDirectoryContents cp
where up = unsafePath cp
{-|
The same as @'getDirectoryContents'@, but returns list of @'Text'@ instead of @'FilePath'@.
/Since 0.2.2.0/
-}
getDirectoryContents'' :: MonadIO m => CanonicalPath -> m [Text]
getDirectoryContents'' cp = liftIO . liftM (fromString <$>) $ preludeMap Directory.getDirectoryContents cp
{- |If the operating system has a notion of current directories, 'getCurrentDirectory' returns an @'CanonicalPath'@ to the current directory of the calling process.
For more information look for documentation of @'System.Directory.getCurrentDirectory'@.
/Since 0.1.1.0/
-}
getCurrentDirectory :: MonadIO m => m CanonicalPath
getCurrentDirectory = liftIO $ liftM (CanonicalPath . fromPrelude) Directory.getCurrentDirectory
{-|
If the operating system has a notion of current directories, @'setCurrentDirectory' dir@ changes the current directory of the calling process to /dir/.
For more information look for documentation of @'System.Directory.setCurrentDirectory'@.
/Since 0.1.1.0/
-}
setCurrentDirectory :: MonadIO m => CanonicalPath -> m ()
setCurrentDirectory = liftIO . preludeMap Directory.setCurrentDirectory
{-|
Returns the current user's home directory.
For more information look for documentation of @'System.Directory.getHomeDirectory'@.
/Since 0.1.1.0/
-}
getHomeDirectory :: MonadIO m => m CanonicalPath
getHomeDirectory = liftIO $ liftM (CanonicalPath . fromPrelude) Directory.getHomeDirectory
{-|
Returns the @'CanonicalPath'@ of a directory in which application-specific data for the current user can be stored. The result of 'getAppUserDataDirectory' for a given application is specific to the current user.
For more information look for documentation of @'System.Directory.getAppUserDataDirectory'@.
/Since 0.1.1.0/
-}
getAppUserDataDirectory :: MonadIO m => Text -> m FilePath
getAppUserDataDirectory = liftIO . liftM fromPrelude . Directory.getAppUserDataDirectory . textToString
{-|
Returns the current user's document directory.
For more information look for documentation of @'System.Directory.getUserDocumentsDirectory'@.
/Since 0.1.1.0/
-}
getUserDocumentsDirectory :: MonadIO m => m CanonicalPath
getUserDocumentsDirectory = liftIO $ liftM (CanonicalPath . fromPrelude) Directory.getUserDocumentsDirectory
{-|
Returns the current directory for temporary files.
For more information look for documentation of @'System.Directory.getUserDocumentsDirectorygetTemporaryDirectory'@.
/Since 0.1.1.0/
-}
getTemporaryDirectory :: MonadIO m => m FilePath
getTemporaryDirectory = liftIO $ liftM fromPrelude Directory.getTemporaryDirectory
{-|
'removeFile' /file/ removes the directory entry for an existing file /file/, where /file/ is not itself a directory.
For more information look for documentation of @'System.Directory.removeFile'@.
/Since 0.1.1.0/
-}
removeFile :: MonadIO m => CanonicalPath -> m ()
removeFile = liftIO . preludeMap Directory.removeFile
{-|
@'renameFile' old new@ changes the name of an existing file system object from /old/ to /new/.
For more information look for documentation of @'System.Directory.renameFile'@.
/Since 0.1.1.0/
-}
renameFile :: MonadIO m
=> CanonicalPath -- ^ @'CanonicalPath'@ of file you want to rename
-> FilePath -- ^ new name of file
-> m CanonicalPath -- ^ @'CanonicalPath'@ of /new/ file
renameFile cp p =
do newPath <- canonicalPath $ parent p
liftIO $ Directory.renameFile (toPrelude . unsafePath $ cp) (toPrelude p)
return . CanonicalPath $ unsafePath newPath </> filename p
{-|
Variant of @'renameFile' that ignores resulting @'CanonicalPath'.
/Since 0.2.2.0/
-}
renameFile_ :: MonadIO m => CanonicalPath -> FilePath -> m ()
renameFile_ cp p = voidM $ renameFile cp p
{-|
@'copyFile' old new@ copies the existing file from /old/ to /new/. If the /new/ file already exists, it is atomically replaced by the /old/ file. Neither path may refer to an existing directory. The permissions of /old/ are copied to /new/, if possible.
For more information look for documentation of @'System.Directory.copyFile'@.
/Since 0.1.1.0/
-}
copyFile :: MonadIO m
=> CanonicalPath -- ^ @'CanonicalPath'@ of file you want to copy
-> FilePath -- ^ name of new file (actually it can be path relative to directory of /old/
-> m CanonicalPath -- ^ @'CanonicalPath'@ of /new/ file
copyFile oldFile newFile =
do newPath <- canonicalPath $ parent newFile
liftIO $ Directory.copyFile (toPrelude . unsafePath $ oldFile) (toPrelude newFile)
return . CanonicalPath $ unsafePath newPath </> filename newFile
copyFile_ :: MonadIO m => CanonicalPath -> FilePath -> m ()
copyFile_ oldFile newFile = voidM $ copyFile oldFile newFile
| d12frosted/CanonicalPath | Filesystem/CanonicalPath/Directory.hs | mit | 10,397 | 0 | 11 | 1,773 | 1,266 | 637 | 629 | 85 | 1 |
#!/usr/bin/env ./execcluster.sh
{-# LANGUAGE CPP #-}
module Main where
#ifndef ghcjs_HOST_OS
import Control.Monad
import Control.Monad.IO.Class
import Data.IORef
import GHC.Conc
import Control.Applicative
import Data.Monoid
import Transient.Internals
import Transient.Indeterminism
import Transient.Logged
import Transient.Move
import Transient.Move.Utils
import Transient.Move.Services
import Transient.MapReduce
import Transient.EVars
import Control.Concurrent
import System.IO.Unsafe
import Data.List
import Control.Exception.Base
import qualified Data.Map as M
import System.Exit
import System.Process
import Control.Monad.State
#define _UPK_(x) {-# UNPACK #-} !(x)
#define shouldRun(x) (local $ getMyNode >>= \n -> assert (nodePort n == (nodePort x)) (return ()))
#define shouldRun1(x) (local $ getMyNode >>= \(Node _ p _ _) -> liftIO (print p >> print x >> print ( p == (x))))
main= do
keep $ initNode $ do
n1 <- local getMyNode
n2 <- requestInstall "" ("executable", "TestSuite1") !> "request"
n3 <- requestInstall "" ("executable", "TestSuite1")
-- shell "./TestSuite1 -p start/localhost/8081/add/localhost/8080/y"
-- shell "./TestSuite1 -p start/localhost/8082/add/localhost/8080/y"
local $ option "f" "fire"
-- async $ do
-- let delay= (nodePort node -2000 + 1) *10000000
-- threadDelay delay
nodes <- local getNodes
onAll $ liftIO $ print nodes
let n1= head nodes
n2= nodes !! 1
n3= nodes !! 2
localIO $ putStrLn "------checking Alternative distributed--------"
r <- local $ do
runCloud $ (runAt n1 (shouldRun(n1) >> return "hello" ))
<|> (runAt n2 (shouldRun(n2) >> return "world" ))
<|> (runAt n3 (shouldRun(n3) >> return "world2" ))
localIO $ print r
-- loggedc $ assert(sort r== ["hello", "world","world2"]) $ localIO $ print r
-- localIO $ putStrLn "--------------checking Applicative distributed--------"
-- r <- loggedc $(runAt n2000 (shouldRun(2000) >> return "hello "))
-- <> (runAt n2001 (shouldRun(2001) >> return "world " ))
-- <> (runAt n2002 (shouldRun(2002) >> return "world2" ))
--
-- assert(r== "hello world world2") $ localIO $ print r
-- localIO $ putStrLn "----------------checking monadic, distributed-------------"
-- r <- runAt n2000 (shouldRun(2000)
-- >> runAt n2001 (shouldRun(2001)
-- >> runAt n2002 (shouldRun(2002) >> (return "HELLO" ))))
--
-- assert(r== "HELLO") $ localIO $ print r
--
--
-- localIO $ putStrLn "----------------checking map-reduce -------------"
--
-- r <- reduce (+) . mapKeyB (\w -> (w, 1 :: Int)) $ getText words "hello world hello"
-- localIO $ putStr "SOLUTION: " >> print r
-- assert (sort (M.toList r) == sort [("hello",2::Int),("world",1)]) $ return r
-- local $ exit ()
-- print "SUCCESS"
-- exitSuccess
runNodes nodes= foldr (<|>) empty (map listen nodes) <|> return ()
#else
main= return ()
#endif
| transient-haskell/transient-universe | tests/TestSuite1.hs | mit | 3,463 | 0 | 22 | 1,088 | 444 | 250 | 194 | 42 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Monoid
import qualified Data.Text as T
import Online
import System.Environment
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
import Tests
----------------------------------------------------------------------
withQuickCheckDepth :: TestName -> Int -> [TestTree] -> TestTree
withQuickCheckDepth tn depth tests =
localOption (QuickCheckTests depth) (testGroup tn tests)
----------------------------------------------------------------------
onlineTestsIfEnabled :: IO [TestTree]
onlineTestsIfEnabled = do
apiKey <- (return . fmap T.pack) =<< lookupEnv "MANDRILL_API_KEY"
case apiKey of
Nothing -> return []
Just k -> return [
testGroup "Mandrill online tests" [
testCase "users/info.json" (testOnlineUsersInfo k)
, testCase "users/ping2.json" (testOnlineUsersPing2 k)
, testCase "users/senders.json" (testOnlineUsersSenders k)
, testCase "messages/send.json" (testOnlineMessagesSend k)
, testCase "inbound/addDomain.json" (testOnlineDomainAdd k)
, testCase "inbound/addRoute.json" (testOnlineRouteAdd k)
]]
----------------------------------------------------------------------
main :: IO ()
main = do
onlineTests <- onlineTestsIfEnabled
defaultMainWithIngredients defaultIngredients $
testGroup "Mandrill tests" $ onlineTests <> [
testGroup "Mandrill offline tests" [
testCase "users/info.json API parsing" testUsersInfo
, testCase "users/senders.json API parsing" testUsersSenders
, testCase "messages/send.json API parsing" testMessagesSend
, testCase "messages/send.json API response parsing" testMessagesResponseRejected
, testCase "inbound/add-route.json API response parsing" testRouteAdd
, testCase "inbound/add-domain.json API response parsing" testDomainAdd
, testCase "senders/verify-domain.json API response parsing" testVerifyDomain
, testCase "webhooks/add.json API response parsing" testWebhookAdd
]
]
| adinapoli/mandrill | test/Main.hs | mit | 2,274 | 0 | 16 | 560 | 379 | 196 | 183 | 40 | 2 |
{-# LANGUAGE PatternGuards #-}
{-
Copyright (C) 2008 Andrea Rossato <andrea.rossato@ing.unitn.it>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Biblio
Copyright : Copyright (C) 2008-2010 Andrea Rossato
License : GNU GPL, version 2 or above
Maintainer : Andrea Rossato <andrea.rossato@unitn.it>
Stability : alpha
Portability : portable
-}
module Text.Pandoc.Biblio ( processBiblio ) where
import Data.List
import Data.Unique
import Data.Char ( isDigit, isPunctuation )
import qualified Data.Map as M
import Text.CSL hiding ( Cite(..), Citation(..) )
import qualified Text.CSL as CSL ( Cite(..) )
import Text.Pandoc.Definition
import Text.Pandoc.Generic
import Text.Pandoc.Shared (stringify)
import Text.ParserCombinators.Parsec
import Control.Monad
-- | Process a 'Pandoc' document by adding citations formatted
-- according to a CSL style, using 'citeproc' from citeproc-hs.
processBiblio :: FilePath -> [Reference] -> Pandoc -> IO Pandoc
processBiblio cslfile r p
= if null r then return p
else do
csl <- readCSLFile cslfile
p' <- bottomUpM setHash p
let (nts,grps) = if styleClass csl == "note"
then let cits = queryWith getCite p'
ncits = map (queryWith getCite) $ queryWith getNote p'
needNt = cits \\ concat ncits
in (,) needNt $ getNoteCitations needNt p'
else (,) [] $ queryWith getCitation p'
result = citeproc procOpts csl r (setNearNote csl $
map (map toCslCite) grps)
cits_map = M.fromList $ zip grps (citations result)
biblioList = map (renderPandoc' csl) (bibliography result)
Pandoc m b = bottomUp (procInlines $ processCite csl cits_map) p'
return . generateNotes nts . Pandoc m $ b ++ biblioList
-- | Substitute 'Cite' elements with formatted citations.
processCite :: Style -> M.Map [Citation] [FormattedOutput] -> [Inline] -> [Inline]
processCite _ _ [] = []
processCite s cs (i:is)
| Cite t _ <- i = process t ++ processCite s cs is
| otherwise = i : processCite s cs is
where
addNt t x = if null x then [] else [Cite t $ renderPandoc s x]
process t = case M.lookup t cs of
Just x -> if isTextualCitation t && x /= []
then renderPandoc s [head x] ++
if tail x /= []
then Space : addNt t (tail x)
else []
else [Cite t $ renderPandoc s x]
Nothing -> [Str ("Error processing " ++ show t)]
isTextualCitation :: [Citation] -> Bool
isTextualCitation (c:_) = citationMode c == AuthorInText
isTextualCitation _ = False
-- | Retrieve all citations from a 'Pandoc' docuument. To be used with
-- 'queryWith'.
getCitation :: Inline -> [[Citation]]
getCitation i | Cite t _ <- i = [t]
| otherwise = []
getNote :: Inline -> [Inline]
getNote i | Note _ <- i = [i]
| otherwise = []
getCite :: Inline -> [Inline]
getCite i | Cite _ _ <- i = [i]
| otherwise = []
getNoteCitations :: [Inline] -> Pandoc -> [[Citation]]
getNoteCitations needNote
= let mvCite i = if i `elem` needNote then Note [Para [i]] else i
setNote = bottomUp mvCite
getCits = concat . flip (zipWith $ setCiteNoteNum) [1..] .
map (queryWith getCite) . queryWith getNote . setNote
in queryWith getCitation . getCits
setHash :: Citation -> IO Citation
setHash (Citation i p s cm nn _)
= hashUnique `fmap` newUnique >>= return . Citation i p s cm nn
generateNotes :: [Inline] -> Pandoc -> Pandoc
generateNotes needNote = bottomUp (mvCiteInNote needNote)
procInlines :: ([Inline] -> [Inline]) -> Block -> Block
procInlines f b
| Plain inls <- b = Plain $ f inls
| Para inls <- b = Para $ f inls
| Header i inls <- b = Header i $ f inls
| otherwise = b
mvCiteInNote :: [Inline] -> Block -> Block
mvCiteInNote is = procInlines mvCite
where
mvCite :: [Inline] -> [Inline]
mvCite inls
| x:i:xs <- inls, startWithPunct xs
, x == Space, i `elem_` is = switch i xs ++ mvCite (tailFirstInlineStr xs)
| x:i:xs <- inls
, x == Space, i `elem_` is = mvInNote i : mvCite xs
| i:xs <- inls, i `elem_` is
, startWithPunct xs = switch i xs ++ mvCite (tailFirstInlineStr xs)
| i:xs <- inls, Note _ <- i = checkNt i : mvCite xs
| i:xs <- inls = i : mvCite xs
| otherwise = []
elem_ x xs = case x of Cite cs _ -> (Cite cs []) `elem` xs; _ -> False
switch i xs = Str (headInline xs) : mvInNote i : []
mvInNote i
| Cite t o <- i = Note [Para [Cite t $ sanitize o]]
| otherwise = Note [Para [i ]]
sanitize i
| endWithPunct i = toCapital i
| otherwise = toCapital (i ++ [Str "."])
checkPt i
| Cite c o : xs <- i
, endWithPunct o, startWithPunct xs
, endWithPunct o = Cite c (initInline o) : checkPt xs
| x:xs <- i = x : checkPt xs
| otherwise = []
checkNt = bottomUp $ procInlines checkPt
setCiteNoteNum :: [Inline] -> Int -> [Inline]
setCiteNoteNum ((Cite cs o):xs) n = Cite (setCitationNoteNum n cs) o : setCiteNoteNum xs n
setCiteNoteNum _ _ = []
setCitationNoteNum :: Int -> [Citation] -> [Citation]
setCitationNoteNum i = map $ \c -> c { citationNoteNum = i}
toCslCite :: Citation -> CSL.Cite
toCslCite c
= let (l, s) = locatorWords $ citationSuffix c
(la,lo) = parseLocator l
citMode = case citationMode c of
AuthorInText -> (True, False)
SuppressAuthor -> (False,True )
NormalCitation -> (False,False)
s' = case s of
[] -> []
(Str (y:_) : _) | isPunctuation y -> s
_ -> Str "," : Space : s
in emptyCite { CSL.citeId = citationId c
, CSL.citePrefix = PandocText $ citationPrefix c
, CSL.citeSuffix = PandocText $ s'
, CSL.citeLabel = la
, CSL.citeLocator = lo
, CSL.citeNoteNumber = show $ citationNoteNum c
, CSL.authorInText = fst citMode
, CSL.suppressAuthor = snd citMode
, CSL.citeHash = citationHash c
}
locatorWords :: [Inline] -> (String, [Inline])
locatorWords inp =
case parse pLocatorWords "suffix" inp of
Right r -> r
Left _ -> ("",inp)
pLocatorWords :: GenParser Inline st (String, [Inline])
pLocatorWords = do
l <- pLocator
s <- getInput -- rest is suffix
if length l > 0 && last l == ','
then return (init l, Str "," : s)
else return (l, s)
pMatch :: (Inline -> Bool) -> GenParser Inline st Inline
pMatch condition = try $ do
t <- anyToken
guard $ condition t
return t
pSpace :: GenParser Inline st Inline
pSpace = pMatch (== Space)
pLocator :: GenParser Inline st String
pLocator = try $ do
optional $ pMatch (== Str ",")
optional pSpace
f <- many1 (notFollowedBy pSpace >> anyToken)
gs <- many1 pWordWithDigits
return $ stringify f ++ (' ' : unwords gs)
pWordWithDigits :: GenParser Inline st String
pWordWithDigits = try $ do
pSpace
r <- many1 (notFollowedBy pSpace >> anyToken)
let s = stringify r
guard $ any isDigit s
return s
| Lythimus/lptv | sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/Biblio.hs | gpl-2.0 | 8,574 | 0 | 19 | 2,871 | 2,677 | 1,343 | 1,334 | 162 | 5 |
{-| Module : OneLiner
License : GPL
Maintainer : helium@cs.uu.nl
Stability : experimental
Portability : portable
-}
module Helium.Utils.OneLiner(OneLineTree(..), showOneLine) where
import Data.List
data OneLineTree
= OneLineNode [OneLineTree]
| OneLineText String
collapseString :: String
collapseString = "..."
collapseWidth :: Int
collapseWidth = length collapseString
showOneLine :: Int -> OneLineTree -> String
showOneLine width tree =
case tree of
OneLineText s -> s
OneLineNode ts -> oneLine True width ts
oneLine :: Bool -> Int -> [OneLineTree] -> String
oneLine toplevel width trees
| not toplevel && -- do not collapse at toplevel
thisLevel > width -- collapse if not even texts can be displayed
= collapseString
| not toplevel &&
minSize trees > collapseWidth &&
minSize trees > width -- only collapse if that makes things better
= collapseString
| otherwise = concatMap processTree (zip childWidths trees)
where
thisLevel = countThisLevel trees
childSizes = map (\t -> case t of { OneLineText _ -> 0; OneLineNode _ -> maxSize [t]} ) trees
numberedChildren = zip [0..] childSizes
childWidths = map snd (sort (distribute (width - thisLevel) numberedChildren))
processTree (_ , OneLineText s) = s
processTree (childWidth, OneLineNode ts) = oneLine False childWidth ts
maxSize :: [OneLineTree] -> Int
maxSize ts =
let
sizeOne :: OneLineTree -> Int
sizeOne (OneLineText s) = length s
sizeOne (OneLineNode subTs) = maxSize subTs
in
sum (map sizeOne ts)
minSize :: [OneLineTree] -> Int
minSize ts =
let
sizeOne :: OneLineTree -> Int
sizeOne (OneLineText s) = length s
sizeOne (OneLineNode subTs) = min (minSize subTs) collapseWidth
in
sum (map sizeOne ts)
countThisLevel :: [OneLineTree] -> Int
countThisLevel ts =
sum [ length s | OneLineText s <- ts ]
distribute :: Int -> [(Int, Int)] -> [(Int, Int)]
distribute width children
| null smallChildren = [ (nr, widthPerChild) | (nr, _) <- children ]
| otherwise =
smallChildren ++ distribute leftOvers bigChildren
where
widthPerChild = width `div` length children
(smallChildren, bigChildren) =
partition (\(_, need) -> need <= widthPerChild) children
leftOvers = width - sum (map snd smallChildren)
| roberth/uu-helium | src/Helium/Utils/OneLiner.hs | gpl-3.0 | 2,524 | 0 | 14 | 705 | 739 | 382 | 357 | 56 | 3 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DeviceFarm
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- AWS Device Farm is a service that enables mobile app developers to test
-- Android, iOS, and Fire OS apps on physical phones, tablets, and other
-- devices in the cloud.
--
-- /See:/ <http://docs.aws.amazon.com/devicefarm/latest/APIReference/Welcome.html AWS API Reference>
module Network.AWS.DeviceFarm
(
-- * Service Configuration
deviceFarm
-- * Errors
-- $errors
-- ** IdempotencyException
, _IdempotencyException
-- ** ArgumentException
, _ArgumentException
-- ** NotFoundException
, _NotFoundException
-- ** ServiceAccountException
, _ServiceAccountException
-- ** LimitExceededException
, _LimitExceededException
-- * Waiters
-- $waiters
-- * Operations
-- $operations
-- ** ListProjects
, module Network.AWS.DeviceFarm.ListProjects
-- ** GetDevicePoolCompatibility
, module Network.AWS.DeviceFarm.GetDevicePoolCompatibility
-- ** ListTests
, module Network.AWS.DeviceFarm.ListTests
-- ** ListArtifacts
, module Network.AWS.DeviceFarm.ListArtifacts
-- ** CreateUpload
, module Network.AWS.DeviceFarm.CreateUpload
-- ** GetDevicePool
, module Network.AWS.DeviceFarm.GetDevicePool
-- ** ListDevicePools
, module Network.AWS.DeviceFarm.ListDevicePools
-- ** GetUpload
, module Network.AWS.DeviceFarm.GetUpload
-- ** CreateDevicePool
, module Network.AWS.DeviceFarm.CreateDevicePool
-- ** ListRuns
, module Network.AWS.DeviceFarm.ListRuns
-- ** GetTest
, module Network.AWS.DeviceFarm.GetTest
-- ** GetDevice
, module Network.AWS.DeviceFarm.GetDevice
-- ** ListJobs
, module Network.AWS.DeviceFarm.ListJobs
-- ** GetJob
, module Network.AWS.DeviceFarm.GetJob
-- ** ScheduleRun
, module Network.AWS.DeviceFarm.ScheduleRun
-- ** GetRun
, module Network.AWS.DeviceFarm.GetRun
-- ** ListSamples
, module Network.AWS.DeviceFarm.ListSamples
-- ** ListSuites
, module Network.AWS.DeviceFarm.ListSuites
-- ** GetAccountSettings
, module Network.AWS.DeviceFarm.GetAccountSettings
-- ** ListUploads
, module Network.AWS.DeviceFarm.ListUploads
-- ** GetSuite
, module Network.AWS.DeviceFarm.GetSuite
-- ** GetProject
, module Network.AWS.DeviceFarm.GetProject
-- ** ListUniqueProblems
, module Network.AWS.DeviceFarm.ListUniqueProblems
-- ** ListDevices
, module Network.AWS.DeviceFarm.ListDevices
-- ** CreateProject
, module Network.AWS.DeviceFarm.CreateProject
-- * Types
-- ** ArtifactCategory
, ArtifactCategory (..)
-- ** ArtifactType
, ArtifactType (..)
-- ** BillingMethod
, BillingMethod (..)
-- ** DeviceAttribute
, DeviceAttribute (..)
-- ** DeviceFormFactor
, DeviceFormFactor (..)
-- ** DevicePlatform
, DevicePlatform (..)
-- ** DevicePoolType
, DevicePoolType (..)
-- ** ExecutionResult
, ExecutionResult (..)
-- ** ExecutionStatus
, ExecutionStatus (..)
-- ** RuleOperator
, RuleOperator (..)
-- ** SampleType
, SampleType (..)
-- ** TestType
, TestType (..)
-- ** UploadStatus
, UploadStatus (..)
-- ** UploadType
, UploadType (..)
-- ** AccountSettings
, AccountSettings
, accountSettings
, asAwsAccountNumber
, asUnmeteredDevices
-- ** Artifact
, Artifact
, artifact
, aArn
, aUrl
, aExtension
, aName
, aType
-- ** CPU
, CPU
, cpu
, cpuFrequency
, cpuClock
, cpuArchitecture
-- ** Counters
, Counters
, counters
, cPassed
, cSkipped
, cWarned
, cStopped
, cTotal
, cFailed
, cErrored
-- ** Device
, Device
, device
, dCarrier
, dImage
, dManufacturer
, dPlatform
, dArn
, dFormFactor
, dResolution
, dMemory
, dRadio
, dOs
, dName
, dModel
, dCpu
, dHeapSize
-- ** DevicePool
, DevicePool
, devicePool
, dpArn
, dpRules
, dpName
, dpType
, dpDescription
-- ** DevicePoolCompatibilityResult
, DevicePoolCompatibilityResult
, devicePoolCompatibilityResult
, dpcrDevice
, dpcrCompatible
, dpcrIncompatibilityMessages
-- ** IncompatibilityMessage
, IncompatibilityMessage
, incompatibilityMessage
, imType
, imMessage
-- ** Job
, Job
, job
, jobStatus
, jobCounters
, jobArn
, jobCreated
, jobDevice
, jobStopped
, jobResult
, jobName
, jobType
, jobMessage
, jobStarted
-- ** Location
, Location
, location
, lLatitude
, lLongitude
-- ** Problem
, Problem
, problem
, pDevice
, pTest
, pResult
, pRun
, pJob
, pMessage
, pSuite
-- ** ProblemDetail
, ProblemDetail
, problemDetail
, pdArn
, pdName
-- ** Project
, Project
, project
, pArn
, pCreated
, pName
-- ** Radios
, Radios
, radios
, rNfc
, rGps
, rBluetooth
, rWifi
-- ** Resolution
, Resolution
, resolution
, rHeight
, rWidth
-- ** Rule
, Rule
, rule
, rAttribute
, rOperator
, rValue
-- ** Run
, Run
, run
, runBillingMethod
, runStatus
, runCounters
, runPlatform
, runArn
, runCreated
, runStopped
, runResult
, runCompletedJobs
, runName
, runType
, runMessage
, runTotalJobs
, runStarted
-- ** Sample
, Sample
, sample
, samArn
, samUrl
, samType
-- ** ScheduleRunConfiguration
, ScheduleRunConfiguration
, scheduleRunConfiguration
, srcBillingMethod
, srcRadios
, srcLocation
, srcLocale
, srcNetworkProfileARN
, srcExtraDataPackageARN
, srcAuxiliaryApps
-- ** ScheduleRunTest
, ScheduleRunTest
, scheduleRunTest
, srtTestPackageARN
, srtParameters
, srtFilter
, srtType
-- ** Suite
, Suite
, suite
, sStatus
, sCounters
, sArn
, sCreated
, sStopped
, sResult
, sName
, sType
, sMessage
, sStarted
-- ** Test
, Test
, test
, tStatus
, tCounters
, tArn
, tCreated
, tStopped
, tResult
, tName
, tType
, tMessage
, tStarted
-- ** UniqueProblem
, UniqueProblem
, uniqueProblem
, upProblems
, upMessage
-- ** Upload
, Upload
, upload
, uStatus
, uArn
, uCreated
, uUrl
, uName
, uMetadata
, uType
, uMessage
, uContentType
) where
import Network.AWS.DeviceFarm.CreateDevicePool
import Network.AWS.DeviceFarm.CreateProject
import Network.AWS.DeviceFarm.CreateUpload
import Network.AWS.DeviceFarm.GetAccountSettings
import Network.AWS.DeviceFarm.GetDevice
import Network.AWS.DeviceFarm.GetDevicePool
import Network.AWS.DeviceFarm.GetDevicePoolCompatibility
import Network.AWS.DeviceFarm.GetJob
import Network.AWS.DeviceFarm.GetProject
import Network.AWS.DeviceFarm.GetRun
import Network.AWS.DeviceFarm.GetSuite
import Network.AWS.DeviceFarm.GetTest
import Network.AWS.DeviceFarm.GetUpload
import Network.AWS.DeviceFarm.ListArtifacts
import Network.AWS.DeviceFarm.ListDevicePools
import Network.AWS.DeviceFarm.ListDevices
import Network.AWS.DeviceFarm.ListJobs
import Network.AWS.DeviceFarm.ListProjects
import Network.AWS.DeviceFarm.ListRuns
import Network.AWS.DeviceFarm.ListSamples
import Network.AWS.DeviceFarm.ListSuites
import Network.AWS.DeviceFarm.ListTests
import Network.AWS.DeviceFarm.ListUniqueProblems
import Network.AWS.DeviceFarm.ListUploads
import Network.AWS.DeviceFarm.ScheduleRun
import Network.AWS.DeviceFarm.Types
import Network.AWS.DeviceFarm.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'DeviceFarm'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
| fmapfmapfmap/amazonka | amazonka-devicefarm/gen/Network/AWS/DeviceFarm.hs | mpl-2.0 | 9,960 | 0 | 5 | 2,662 | 1,155 | 832 | 323 | 258 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Module1_Types where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
data Enum = ONE|TWO|THREE deriving (Show,Eq, Typeable.Typeable, Ord, Bounded)
instance Enum Enum where
fromEnum t = case t of
ONE -> 1
TWO -> 2
THREE -> 3
toEnum t = case t of
1 -> ONE
2 -> TWO
3 -> THREE
_ -> Exception.throw Thrift.ThriftException
instance Hashable.Hashable Enum where
hashWithSalt salt = Hashable.hashWithSalt salt . fromEnum
instance DeepSeq.NFData Enum where
rnf x = x `seq` ()
instance Arbitrary.Arbitrary Enum where
arbitrary = QuickCheck.elements (enumFromTo minBound maxBound)
data Struct = Struct
{ struct_first :: Int.Int32
, struct_second :: LT.Text
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Struct where
encode = encode_Struct
decode = decode_Struct
instance Hashable.Hashable Struct where
hashWithSalt salt record = salt `Hashable.hashWithSalt` struct_first record `Hashable.hashWithSalt` struct_second record
instance DeepSeq.NFData Struct where
rnf _record0 =
DeepSeq.rnf (struct_first _record0) `seq`
DeepSeq.rnf (struct_second _record0) `seq`
()
instance Arbitrary.Arbitrary Struct where
arbitrary = Monad.liftM Struct (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_Struct = []
| otherwise = Maybe.catMaybes
[ if obj == default_Struct{struct_first = struct_first obj} then Nothing else Just $ default_Struct{struct_first = struct_first obj}
, if obj == default_Struct{struct_second = struct_second obj} then Nothing else Just $ default_Struct{struct_second = struct_second obj}
]
from_Struct :: Struct -> Types.ThriftVal
from_Struct record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("first",Types.TI32 _v3))) $ struct_first record
, (\_v3 -> Just (2, ("second",Types.TString $ Encoding.encodeUtf8 _v3))) $ struct_second record
]
write_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Struct -> IO ()
write_Struct oprot record = Thrift.writeVal oprot $ from_Struct record
encode_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Struct -> BS.ByteString
encode_Struct oprot record = Thrift.serializeVal oprot $ from_Struct record
to_Struct :: Types.ThriftVal -> Struct
to_Struct (Types.TStruct fields) = Struct{
struct_first = maybe (struct_first default_Struct) (\(_,_val5) -> (case _val5 of {Types.TI32 _val6 -> _val6; _ -> error "wrong type"})) (Map.lookup (1) fields),
struct_second = maybe (struct_second default_Struct) (\(_,_val5) -> (case _val5 of {Types.TString _val7 -> Encoding.decodeUtf8 _val7; _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_Struct _ = error "not a struct"
read_Struct :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Struct
read_Struct iprot = to_Struct <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Struct)
decode_Struct :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Struct
decode_Struct iprot bs = to_Struct $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Struct) bs
typemap_Struct :: Types.TypeMap
typemap_Struct = Map.fromList [("first",(1,Types.T_I32)),("second",(2,Types.T_STRING))]
default_Struct :: Struct
default_Struct = Struct{
struct_first = 0,
struct_second = ""}
| Orvid/fbthrift | thrift/compiler/test/fixtures/qualified/gen-hs/Module1_Types.hs | apache-2.0 | 5,298 | 0 | 16 | 902 | 1,551 | 897 | 654 | 97 | 3 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE StandaloneDeriving #-}
-------------------------------------------------------------------------------
-- |
-- Module : System.Timeout
-- Copyright : (c) The University of Glasgow 2007
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable
--
-- Attach a timeout event to arbitrary 'IO' computations.
--
-------------------------------------------------------------------------------
module System.Timeout ( timeout ) where
#if !defined(mingw32_HOST_OS)
import Control.Monad
import GHC.Event (getSystemTimerManager,
registerTimeout, unregisterTimeout)
#endif
import Control.Concurrent
import Control.Exception (Exception(..), handleJust, bracket,
uninterruptibleMask_,
asyncExceptionToException,
asyncExceptionFromException)
import Data.Unique (Unique, newUnique)
-- An internal type that is thrown as a dynamic exception to
-- interrupt the running IO computation when the timeout has
-- expired.
newtype Timeout = Timeout Unique deriving (Eq)
-- | @since 3.0
instance Show Timeout where
show _ = "<<timeout>>"
-- Timeout is a child of SomeAsyncException
-- | @since 4.7.0.0
instance Exception Timeout where
toException = asyncExceptionToException
fromException = asyncExceptionFromException
-- |Wrap an 'IO' computation to time out and return @Nothing@ in case no result
-- is available within @n@ microseconds (@1\/10^6@ seconds). In case a result
-- is available before the timeout expires, @Just a@ is returned. A negative
-- timeout interval means \"wait indefinitely\". When specifying long timeouts,
-- be careful not to exceed @maxBound :: Int@.
--
-- >>> timeout 1000000 (threadDelay 1000 *> pure "finished on time")
-- Just "finished on time"
--
-- >>> timeout 10000 (threadDelay 100000 *> pure "finished on time")
-- Nothing
--
-- The design of this combinator was guided by the objective that @timeout n f@
-- should behave exactly the same as @f@ as long as @f@ doesn't time out. This
-- means that @f@ has the same 'myThreadId' it would have without the timeout
-- wrapper. Any exceptions @f@ might throw cancel the timeout and propagate
-- further up. It also possible for @f@ to receive exceptions thrown to it by
-- another thread.
--
-- A tricky implementation detail is the question of how to abort an @IO@
-- computation. This combinator relies on asynchronous exceptions internally.
-- The technique works very well for computations executing inside of the
-- Haskell runtime system, but it doesn't work at all for non-Haskell code.
-- Foreign function calls, for example, cannot be timed out with this
-- combinator simply because an arbitrary C function cannot receive
-- asynchronous exceptions. When @timeout@ is used to wrap an FFI call that
-- blocks, no timeout event can be delivered until the FFI call returns, which
-- pretty much negates the purpose of the combinator. In practice, however,
-- this limitation is less severe than it may sound. Standard I\/O functions
-- like 'System.IO.hGetBuf', 'System.IO.hPutBuf', Network.Socket.accept, or
-- 'System.IO.hWaitForInput' appear to be blocking, but they really don't
-- because the runtime system uses scheduling mechanisms like @select(2)@ to
-- perform asynchronous I\/O, so it is possible to interrupt standard socket
-- I\/O or file I\/O using this combinator.
timeout :: Int -> IO a -> IO (Maybe a)
timeout n f
| n < 0 = fmap Just f
| n == 0 = return Nothing
#if !defined(mingw32_HOST_OS)
| rtsSupportsBoundThreads = do
-- In the threaded RTS, we use the Timer Manager to delay the
-- (fairly expensive) 'forkIO' call until the timeout has expired.
--
-- An additional thread is required for the actual delivery of
-- the Timeout exception because killThread (or another throwTo)
-- is the only way to reliably interrupt a throwTo in flight.
pid <- myThreadId
ex <- fmap Timeout newUnique
tm <- getSystemTimerManager
-- 'lock' synchronizes the timeout handler and the main thread:
-- * the main thread can disable the handler by writing to 'lock';
-- * the handler communicates the spawned thread's id through 'lock'.
-- These two cases are mutually exclusive.
lock <- newEmptyMVar
let handleTimeout = do
v <- isEmptyMVar lock
when v $ void $ forkIOWithUnmask $ \unmask -> unmask $ do
v2 <- tryPutMVar lock =<< myThreadId
when v2 $ throwTo pid ex
cleanupTimeout key = uninterruptibleMask_ $ do
v <- tryPutMVar lock undefined
if v then unregisterTimeout tm key
else takeMVar lock >>= killThread
handleJust (\e -> if e == ex then Just () else Nothing)
(\_ -> return Nothing)
(bracket (registerTimeout tm n handleTimeout)
cleanupTimeout
(\_ -> fmap Just f))
#endif
| otherwise = do
pid <- myThreadId
ex <- fmap Timeout newUnique
handleJust (\e -> if e == ex then Just () else Nothing)
(\_ -> return Nothing)
(bracket (forkIOWithUnmask $ \unmask ->
unmask $ threadDelay n >> throwTo pid ex)
(uninterruptibleMask_ . killThread)
(\_ -> fmap Just f))
-- #7719 explains why we need uninterruptibleMask_ above.
| ezyang/ghc | libraries/base/System/Timeout.hs | bsd-3-clause | 5,767 | 0 | 20 | 1,490 | 640 | 358 | 282 | 51 | 4 |
{-
(c) The University of Glasgow, 2000-2006
\section[Finder]{Module Finder}
-}
{-# LANGUAGE CPP #-}
module Finder (
flushFinderCaches,
FindResult(..),
findImportedModule,
findPluginModule,
findExactModule,
findHomeModule,
findExposedPackageModule,
mkHomeModLocation,
mkHomeModLocation2,
mkHiOnlyModLocation,
mkHiPath,
mkObjPath,
addHomeModuleToFinder,
uncacheModule,
mkStubPaths,
findObjectLinkableMaybe,
findObjectLinkable,
cannotFindModule,
cannotFindInterface,
) where
#include "HsVersions.h"
import Module
import HscTypes
import Packages
import FastString
import Util
import PrelNames ( gHC_PRIM )
import DynFlags
import Outputable
import Maybes ( expectJust )
import Data.IORef ( IORef, readIORef, atomicModifyIORef' )
import System.Directory
import System.FilePath
import Control.Monad
import Data.Time
import Data.List ( foldl' )
type FileExt = String -- Filename extension
type BaseName = String -- Basename of file
-- -----------------------------------------------------------------------------
-- The Finder
-- The Finder provides a thin filesystem abstraction to the rest of
-- the compiler. For a given module, it can tell you where the
-- source, interface, and object files for that module live.
-- It does *not* know which particular package a module lives in. Use
-- Packages.lookupModuleInAllPackages for that.
-- -----------------------------------------------------------------------------
-- The finder's cache
-- remove all the home modules from the cache; package modules are
-- assumed to not move around during a session.
flushFinderCaches :: HscEnv -> IO ()
flushFinderCaches hsc_env =
atomicModifyIORef' fc_ref $ \fm -> (filterModuleEnv is_ext fm, ())
where
this_pkg = thisPackage (hsc_dflags hsc_env)
fc_ref = hsc_FC hsc_env
is_ext mod _ | moduleUnitId mod /= this_pkg = True
| otherwise = False
addToFinderCache :: IORef FinderCache -> Module -> FindResult -> IO ()
addToFinderCache ref key val =
atomicModifyIORef' ref $ \c -> (extendModuleEnv c key val, ())
removeFromFinderCache :: IORef FinderCache -> Module -> IO ()
removeFromFinderCache ref key =
atomicModifyIORef' ref $ \c -> (delModuleEnv c key, ())
lookupFinderCache :: IORef FinderCache -> Module -> IO (Maybe FindResult)
lookupFinderCache ref key = do
c <- readIORef ref
return $! lookupModuleEnv c key
-- -----------------------------------------------------------------------------
-- The three external entry points
-- | Locate a module that was imported by the user. We have the
-- module's name, and possibly a package name. Without a package
-- name, this function will use the search path and the known exposed
-- packages to find the module, if a package is specified then only
-- that package is searched for the module.
findImportedModule :: HscEnv -> ModuleName -> Maybe FastString -> IO FindResult
findImportedModule hsc_env mod_name mb_pkg =
case mb_pkg of
Nothing -> unqual_import
Just pkg | pkg == fsLit "this" -> home_import -- "this" is special
| otherwise -> pkg_import
where
home_import = findHomeModule hsc_env mod_name
pkg_import = findExposedPackageModule hsc_env mod_name mb_pkg
unqual_import = home_import
`orIfNotFound`
findExposedPackageModule hsc_env mod_name Nothing
-- | Locate a plugin module requested by the user, for a compiler
-- plugin. This consults the same set of exposed packages as
-- 'findImportedModule', unless @-hide-all-plugin-packages@ or
-- @-plugin-package@ are specified.
findPluginModule :: HscEnv -> ModuleName -> IO FindResult
findPluginModule hsc_env mod_name =
findHomeModule hsc_env mod_name
`orIfNotFound`
findExposedPluginPackageModule hsc_env mod_name
-- | Locate a specific 'Module'. The purpose of this function is to
-- create a 'ModLocation' for a given 'Module', that is to find out
-- where the files associated with this module live. It is used when
-- reading the interface for a module mentioned by another interface,
-- for example (a "system import").
findExactModule :: HscEnv -> Module -> IO FindResult
findExactModule hsc_env mod =
let dflags = hsc_dflags hsc_env
in if moduleUnitId mod == thisPackage dflags
then findHomeModule hsc_env (moduleName mod)
else findPackageModule hsc_env mod
-- -----------------------------------------------------------------------------
-- Helpers
-- | Given a monadic actions @this@ and @or_this@, first execute
-- @this@. If the returned 'FindResult' is successful, return
-- it; otherwise, execute @or_this@. If both failed, this function
-- also combines their failure messages in a reasonable way.
orIfNotFound :: Monad m => m FindResult -> m FindResult -> m FindResult
orIfNotFound this or_this = do
res <- this
case res of
NotFound { fr_paths = paths1, fr_mods_hidden = mh1
, fr_pkgs_hidden = ph1, fr_suggestions = s1 }
-> do res2 <- or_this
case res2 of
NotFound { fr_paths = paths2, fr_pkg = mb_pkg2, fr_mods_hidden = mh2
, fr_pkgs_hidden = ph2, fr_suggestions = s2 }
-> return (NotFound { fr_paths = paths1 ++ paths2
, fr_pkg = mb_pkg2 -- snd arg is the package search
, fr_mods_hidden = mh1 ++ mh2
, fr_pkgs_hidden = ph1 ++ ph2
, fr_suggestions = s1 ++ s2 })
_other -> return res2
_other -> return res
-- | Helper function for 'findHomeModule': this function wraps an IO action
-- which would look up @mod_name@ in the file system (the home package),
-- and first consults the 'hsc_FC' cache to see if the lookup has already
-- been done. Otherwise, do the lookup (with the IO action) and save
-- the result in the finder cache and the module location cache (if it
-- was successful.)
homeSearchCache :: HscEnv -> ModuleName -> IO FindResult -> IO FindResult
homeSearchCache hsc_env mod_name do_this = do
let mod = mkModule (thisPackage (hsc_dflags hsc_env)) mod_name
modLocationCache hsc_env mod do_this
findExposedPackageModule :: HscEnv -> ModuleName -> Maybe FastString
-> IO FindResult
findExposedPackageModule hsc_env mod_name mb_pkg
= findLookupResult hsc_env
$ lookupModuleWithSuggestions
(hsc_dflags hsc_env) mod_name mb_pkg
findExposedPluginPackageModule :: HscEnv -> ModuleName
-> IO FindResult
findExposedPluginPackageModule hsc_env mod_name
= findLookupResult hsc_env
$ lookupPluginModuleWithSuggestions
(hsc_dflags hsc_env) mod_name Nothing
findLookupResult :: HscEnv -> LookupResult -> IO FindResult
findLookupResult hsc_env r = case r of
LookupFound m pkg_conf ->
findPackageModule_ hsc_env m pkg_conf
LookupMultiple rs ->
return (FoundMultiple rs)
LookupHidden pkg_hiddens mod_hiddens ->
return (NotFound{ fr_paths = [], fr_pkg = Nothing
, fr_pkgs_hidden = map (moduleUnitId.fst) pkg_hiddens
, fr_mods_hidden = map (moduleUnitId.fst) mod_hiddens
, fr_suggestions = [] })
LookupNotFound suggest ->
return (NotFound{ fr_paths = [], fr_pkg = Nothing
, fr_pkgs_hidden = []
, fr_mods_hidden = []
, fr_suggestions = suggest })
modLocationCache :: HscEnv -> Module -> IO FindResult -> IO FindResult
modLocationCache hsc_env mod do_this = do
m <- lookupFinderCache (hsc_FC hsc_env) mod
case m of
Just result -> return result
Nothing -> do
result <- do_this
addToFinderCache (hsc_FC hsc_env) mod result
return result
addHomeModuleToFinder :: HscEnv -> ModuleName -> ModLocation -> IO Module
addHomeModuleToFinder hsc_env mod_name loc = do
let mod = mkModule (thisPackage (hsc_dflags hsc_env)) mod_name
addToFinderCache (hsc_FC hsc_env) mod (Found loc mod)
return mod
uncacheModule :: HscEnv -> ModuleName -> IO ()
uncacheModule hsc_env mod = do
let this_pkg = thisPackage (hsc_dflags hsc_env)
removeFromFinderCache (hsc_FC hsc_env) (mkModule this_pkg mod)
-- -----------------------------------------------------------------------------
-- The internal workers
-- | Implements the search for a module name in the home package only. Calling
-- this function directly is usually *not* what you want; currently, it's used
-- as a building block for the following operations:
--
-- 1. When you do a normal package lookup, we first check if the module
-- is available in the home module, before looking it up in the package
-- database.
--
-- 2. When you have a package qualified import with package name "this",
-- we shortcut to the home module.
--
-- 3. When we look up an exact 'Module', if the unit id associated with
-- the module is the current home module do a look up in the home module.
--
-- 4. Some special-case code in GHCi (ToDo: Figure out why that needs to
-- call this.)
findHomeModule :: HscEnv -> ModuleName -> IO FindResult
findHomeModule hsc_env mod_name =
homeSearchCache hsc_env mod_name $
let
dflags = hsc_dflags hsc_env
home_path = importPaths dflags
hisuf = hiSuf dflags
mod = mkModule (thisPackage dflags) mod_name
source_exts =
[ ("hs", mkHomeModLocationSearched dflags mod_name "hs")
, ("lhs", mkHomeModLocationSearched dflags mod_name "lhs")
, ("hsig", mkHomeModLocationSearched dflags mod_name "hsig")
, ("lhsig", mkHomeModLocationSearched dflags mod_name "lhsig")
]
hi_exts = [ (hisuf, mkHiOnlyModLocation dflags hisuf)
, (addBootSuffix hisuf, mkHiOnlyModLocation dflags hisuf)
]
-- In compilation manager modes, we look for source files in the home
-- package because we can compile these automatically. In one-shot
-- compilation mode we look for .hi and .hi-boot files only.
exts | isOneShot (ghcMode dflags) = hi_exts
| otherwise = source_exts
in
-- special case for GHC.Prim; we won't find it in the filesystem.
-- This is important only when compiling the base package (where GHC.Prim
-- is a home module).
if mod == gHC_PRIM
then return (Found (error "GHC.Prim ModLocation") mod)
else searchPathExts home_path mod exts
-- | Search for a module in external packages only.
findPackageModule :: HscEnv -> Module -> IO FindResult
findPackageModule hsc_env mod = do
let
dflags = hsc_dflags hsc_env
pkg_id = moduleUnitId mod
--
case lookupPackage dflags pkg_id of
Nothing -> return (NoPackage pkg_id)
Just pkg_conf -> findPackageModule_ hsc_env mod pkg_conf
-- | Look up the interface file associated with module @mod@. This function
-- requires a few invariants to be upheld: (1) the 'Module' in question must
-- be the module identifier of the *original* implementation of a module,
-- not a reexport (this invariant is upheld by @Packages.hs@) and (2)
-- the 'PackageConfig' must be consistent with the unit id in the 'Module'.
-- The redundancy is to avoid an extra lookup in the package state
-- for the appropriate config.
findPackageModule_ :: HscEnv -> Module -> PackageConfig -> IO FindResult
findPackageModule_ hsc_env mod pkg_conf =
ASSERT( moduleUnitId mod == packageConfigId pkg_conf )
modLocationCache hsc_env mod $
-- special case for GHC.Prim; we won't find it in the filesystem.
if mod == gHC_PRIM
then return (Found (error "GHC.Prim ModLocation") mod)
else
let
dflags = hsc_dflags hsc_env
tag = buildTag dflags
-- hi-suffix for packages depends on the build tag.
package_hisuf | null tag = "hi"
| otherwise = tag ++ "_hi"
mk_hi_loc = mkHiOnlyModLocation dflags package_hisuf
import_dirs = importDirs pkg_conf
-- we never look for a .hi-boot file in an external package;
-- .hi-boot files only make sense for the home package.
in
case import_dirs of
[one] | MkDepend <- ghcMode dflags -> do
-- there's only one place that this .hi file can be, so
-- don't bother looking for it.
let basename = moduleNameSlashes (moduleName mod)
loc <- mk_hi_loc one basename
return (Found loc mod)
_otherwise ->
searchPathExts import_dirs mod [(package_hisuf, mk_hi_loc)]
-- -----------------------------------------------------------------------------
-- General path searching
searchPathExts
:: [FilePath] -- paths to search
-> Module -- module name
-> [ (
FileExt, -- suffix
FilePath -> BaseName -> IO ModLocation -- action
)
]
-> IO FindResult
searchPathExts paths mod exts
= do result <- search to_search
{-
hPutStrLn stderr (showSDoc $
vcat [text "Search" <+> ppr mod <+> sep (map (text. fst) exts)
, nest 2 (vcat (map text paths))
, case result of
Succeeded (loc, p) -> text "Found" <+> ppr loc
Failed fs -> text "not found"])
-}
return result
where
basename = moduleNameSlashes (moduleName mod)
to_search :: [(FilePath, IO ModLocation)]
to_search = [ (file, fn path basename)
| path <- paths,
(ext,fn) <- exts,
let base | path == "." = basename
| otherwise = path </> basename
file = base <.> ext
]
search [] = return (NotFound { fr_paths = map fst to_search
, fr_pkg = Just (moduleUnitId mod)
, fr_mods_hidden = [], fr_pkgs_hidden = []
, fr_suggestions = [] })
search ((file, mk_result) : rest) = do
b <- doesFileExist file
if b
then do { loc <- mk_result; return (Found loc mod) }
else search rest
mkHomeModLocationSearched :: DynFlags -> ModuleName -> FileExt
-> FilePath -> BaseName -> IO ModLocation
mkHomeModLocationSearched dflags mod suff path basename = do
mkHomeModLocation2 dflags mod (path </> basename) suff
-- -----------------------------------------------------------------------------
-- Constructing a home module location
-- This is where we construct the ModLocation for a module in the home
-- package, for which we have a source file. It is called from three
-- places:
--
-- (a) Here in the finder, when we are searching for a module to import,
-- using the search path (-i option).
--
-- (b) The compilation manager, when constructing the ModLocation for
-- a "root" module (a source file named explicitly on the command line
-- or in a :load command in GHCi).
--
-- (c) The driver in one-shot mode, when we need to construct a
-- ModLocation for a source file named on the command-line.
--
-- Parameters are:
--
-- mod
-- The name of the module
--
-- path
-- (a): The search path component where the source file was found.
-- (b) and (c): "."
--
-- src_basename
-- (a): (moduleNameSlashes mod)
-- (b) and (c): The filename of the source file, minus its extension
--
-- ext
-- The filename extension of the source file (usually "hs" or "lhs").
mkHomeModLocation :: DynFlags -> ModuleName -> FilePath -> IO ModLocation
mkHomeModLocation dflags mod src_filename = do
let (basename,extension) = splitExtension src_filename
mkHomeModLocation2 dflags mod basename extension
mkHomeModLocation2 :: DynFlags
-> ModuleName
-> FilePath -- Of source module, without suffix
-> String -- Suffix
-> IO ModLocation
mkHomeModLocation2 dflags mod src_basename ext = do
let mod_basename = moduleNameSlashes mod
obj_fn = mkObjPath dflags src_basename mod_basename
hi_fn = mkHiPath dflags src_basename mod_basename
return (ModLocation{ ml_hs_file = Just (src_basename <.> ext),
ml_hi_file = hi_fn,
ml_obj_file = obj_fn })
mkHiOnlyModLocation :: DynFlags -> Suffix -> FilePath -> String
-> IO ModLocation
mkHiOnlyModLocation dflags hisuf path basename
= do let full_basename = path </> basename
obj_fn = mkObjPath dflags full_basename basename
return ModLocation{ ml_hs_file = Nothing,
ml_hi_file = full_basename <.> hisuf,
-- Remove the .hi-boot suffix from
-- hi_file, if it had one. We always
-- want the name of the real .hi file
-- in the ml_hi_file field.
ml_obj_file = obj_fn
}
-- | Constructs the filename of a .o file for a given source file.
-- Does /not/ check whether the .o file exists
mkObjPath
:: DynFlags
-> FilePath -- the filename of the source file, minus the extension
-> String -- the module name with dots replaced by slashes
-> FilePath
mkObjPath dflags basename mod_basename = obj_basename <.> osuf
where
odir = objectDir dflags
osuf = objectSuf dflags
obj_basename | Just dir <- odir = dir </> mod_basename
| otherwise = basename
-- | Constructs the filename of a .hi file for a given source file.
-- Does /not/ check whether the .hi file exists
mkHiPath
:: DynFlags
-> FilePath -- the filename of the source file, minus the extension
-> String -- the module name with dots replaced by slashes
-> FilePath
mkHiPath dflags basename mod_basename = hi_basename <.> hisuf
where
hidir = hiDir dflags
hisuf = hiSuf dflags
hi_basename | Just dir <- hidir = dir </> mod_basename
| otherwise = basename
-- -----------------------------------------------------------------------------
-- Filenames of the stub files
-- We don't have to store these in ModLocations, because they can be derived
-- from other available information, and they're only rarely needed.
mkStubPaths
:: DynFlags
-> ModuleName
-> ModLocation
-> FilePath
mkStubPaths dflags mod location
= let
stubdir = stubDir dflags
mod_basename = moduleNameSlashes mod
src_basename = dropExtension $ expectJust "mkStubPaths"
(ml_hs_file location)
stub_basename0
| Just dir <- stubdir = dir </> mod_basename
| otherwise = src_basename
stub_basename = stub_basename0 ++ "_stub"
in
stub_basename <.> "h"
-- -----------------------------------------------------------------------------
-- findLinkable isn't related to the other stuff in here,
-- but there's no other obvious place for it
findObjectLinkableMaybe :: Module -> ModLocation -> IO (Maybe Linkable)
findObjectLinkableMaybe mod locn
= do let obj_fn = ml_obj_file locn
maybe_obj_time <- modificationTimeIfExists obj_fn
case maybe_obj_time of
Nothing -> return Nothing
Just obj_time -> liftM Just (findObjectLinkable mod obj_fn obj_time)
-- Make an object linkable when we know the object file exists, and we know
-- its modification time.
findObjectLinkable :: Module -> FilePath -> UTCTime -> IO Linkable
findObjectLinkable mod obj_fn obj_time = return (LM obj_time mod [DotO obj_fn])
-- We used to look for _stub.o files here, but that was a bug (#706)
-- Now GHC merges the stub.o into the main .o (#3687)
-- -----------------------------------------------------------------------------
-- Error messages
cannotFindModule :: DynFlags -> ModuleName -> FindResult -> SDoc
cannotFindModule = cantFindErr (sLit "Could not find module")
(sLit "Ambiguous module name")
cannotFindInterface :: DynFlags -> ModuleName -> FindResult -> SDoc
cannotFindInterface = cantFindErr (sLit "Failed to load interface for")
(sLit "Ambiguous interface for")
cantFindErr :: LitString -> LitString -> DynFlags -> ModuleName -> FindResult
-> SDoc
cantFindErr _ multiple_found _ mod_name (FoundMultiple mods)
| Just pkgs <- unambiguousPackages
= hang (ptext multiple_found <+> quotes (ppr mod_name) <> colon) 2 (
sep [text "it was found in multiple packages:",
hsep (map ppr pkgs) ]
)
| otherwise
= hang (ptext multiple_found <+> quotes (ppr mod_name) <> colon) 2 (
vcat (map pprMod mods)
)
where
unambiguousPackages = foldl' unambiguousPackage (Just []) mods
unambiguousPackage (Just xs) (m, ModOrigin (Just _) _ _ _)
= Just (moduleUnitId m : xs)
unambiguousPackage _ _ = Nothing
pprMod (m, o) = text "it is bound as" <+> ppr m <+>
text "by" <+> pprOrigin m o
pprOrigin _ ModHidden = panic "cantFindErr: bound by mod hidden"
pprOrigin m (ModOrigin e res _ f) = sep $ punctuate comma (
if e == Just True
then [text "package" <+> ppr (moduleUnitId m)]
else [] ++
map ((text "a reexport in package" <+>)
.ppr.packageConfigId) res ++
if f then [text "a package flag"] else []
)
cantFindErr cannot_find _ dflags mod_name find_result
= ptext cannot_find <+> quotes (ppr mod_name)
$$ more_info
where
more_info
= case find_result of
NoPackage pkg
-> text "no unit id matching" <+> quotes (ppr pkg) <+>
text "was found" $$ looks_like_srcpkgid pkg
NotFound { fr_paths = files, fr_pkg = mb_pkg
, fr_mods_hidden = mod_hiddens, fr_pkgs_hidden = pkg_hiddens
, fr_suggestions = suggest }
| Just pkg <- mb_pkg, pkg /= thisPackage dflags
-> not_found_in_package pkg files
| not (null suggest)
-> pp_suggestions suggest $$ tried_these files
| null files && null mod_hiddens && null pkg_hiddens
-> text "It is not a module in the current program, or in any known package."
| otherwise
-> vcat (map pkg_hidden pkg_hiddens) $$
vcat (map mod_hidden mod_hiddens) $$
tried_these files
_ -> panic "cantFindErr"
build_tag = buildTag dflags
not_found_in_package pkg files
| build_tag /= ""
= let
build = if build_tag == "p" then "profiling"
else "\"" ++ build_tag ++ "\""
in
text "Perhaps you haven't installed the " <> text build <>
text " libraries for package " <> quotes (ppr pkg) <> char '?' $$
tried_these files
| otherwise
= text "There are files missing in the " <> quotes (ppr pkg) <>
text " package," $$
text "try running 'ghc-pkg check'." $$
tried_these files
tried_these files
| null files = Outputable.empty
| verbosity dflags < 3 =
text "Use -v to see a list of the files searched for."
| otherwise =
hang (text "Locations searched:") 2 $ vcat (map text files)
pkg_hidden :: UnitId -> SDoc
pkg_hidden pkgid =
text "It is a member of the hidden package"
<+> quotes (ppr pkgid)
--FIXME: we don't really want to show the unit id here we should
-- show the source package id or installed package id if it's ambiguous
<> dot $$ cabal_pkg_hidden_hint pkgid
cabal_pkg_hidden_hint pkgid
| gopt Opt_BuildingCabalPackage dflags
= let pkg = expectJust "pkg_hidden" (lookupPackage dflags pkgid)
in text "Perhaps you need to add" <+>
quotes (ppr (packageName pkg)) <+>
text "to the build-depends in your .cabal file."
| otherwise = Outputable.empty
looks_like_srcpkgid :: UnitId -> SDoc
looks_like_srcpkgid pk
-- Unsafely coerce a unit id FastString into a source package ID
-- FastString and see if it means anything.
| (pkg:pkgs) <- searchPackageId dflags (SourcePackageId (unitIdFS pk))
= parens (text "This unit ID looks like the source package ID;" $$
text "the real unit ID is" <+> quotes (ftext (unitIdFS (unitId pkg))) $$
(if null pkgs then Outputable.empty
else text "and" <+> int (length pkgs) <+> text "other candidates"))
-- Todo: also check if it looks like a package name!
| otherwise = Outputable.empty
mod_hidden pkg =
text "it is a hidden module in the package" <+> quotes (ppr pkg)
pp_suggestions :: [ModuleSuggestion] -> SDoc
pp_suggestions sugs
| null sugs = Outputable.empty
| otherwise = hang (text "Perhaps you meant")
2 (vcat (map pp_sugg sugs))
-- NB: Prefer the *original* location, and then reexports, and then
-- package flags when making suggestions. ToDo: if the original package
-- also has a reexport, prefer that one
pp_sugg (SuggestVisible m mod o) = ppr m <+> provenance o
where provenance ModHidden = Outputable.empty
provenance (ModOrigin{ fromOrigPackage = e,
fromExposedReexport = res,
fromPackageFlag = f })
| Just True <- e
= parens (text "from" <+> ppr (moduleUnitId mod))
| f && moduleName mod == m
= parens (text "from" <+> ppr (moduleUnitId mod))
| (pkg:_) <- res
= parens (text "from" <+> ppr (packageConfigId pkg)
<> comma <+> text "reexporting" <+> ppr mod)
| f
= parens (text "defined via package flags to be"
<+> ppr mod)
| otherwise = Outputable.empty
pp_sugg (SuggestHidden m mod o) = ppr m <+> provenance o
where provenance ModHidden = Outputable.empty
provenance (ModOrigin{ fromOrigPackage = e,
fromHiddenReexport = rhs })
| Just False <- e
= parens (text "needs flag -package-key"
<+> ppr (moduleUnitId mod))
| (pkg:_) <- rhs
= parens (text "needs flag -package-id"
<+> ppr (packageConfigId pkg))
| otherwise = Outputable.empty
| vTurbine/ghc | compiler/main/Finder.hs | bsd-3-clause | 26,976 | 1 | 20 | 7,732 | 5,084 | 2,589 | 2,495 | 418 | 12 |
{-# LANGUAGE RecordWildCards #-}
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy.Char8 as L8
import Data.List
import Data.Maybe
import Distribution.PackageDescription.Parse
import Distribution.Text
import Distribution.System
import Distribution.Package
import Distribution.PackageDescription hiding (options)
import Distribution.Verbosity
import System.Console.GetOpt
import System.Environment
import System.Directory
import System.IO.Error
import System.Process
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Zip as Zip
import qualified Codec.Compression.GZip as GZip
import Data.Aeson
import qualified Data.CaseInsensitive as CI
import Data.Conduit
import qualified Data.Conduit.Combinators as CC
import Data.List.Extra
import qualified Data.Text as T
import Development.Shake
import Development.Shake.FilePath
import Network.HTTP.Conduit
import Network.HTTP.Types
import Network.Mime
import Prelude -- Silence AMP warning
-- | Entrypoint.
main :: IO ()
main =
shakeArgsWith
shakeOptions { shakeFiles = releaseDir
, shakeVerbosity = Chatty
, shakeChange = ChangeModtimeAndDigestInput }
options $
\flags args -> do
gStackPackageDescription <-
packageDescription <$> readPackageDescription silent "stack.cabal"
gGithubAuthToken <- lookupEnv githubAuthTokenEnvVar
gGitRevCount <- length . lines <$> readProcess "git" ["rev-list", "HEAD"] ""
gGitSha <- trim <$> readProcess "git" ["rev-parse", "HEAD"] ""
gHomeDir <- getHomeDirectory
let gGpgKey = "0x575159689BEFB442"
gAllowDirty = False
gGithubReleaseTag = Nothing
Platform arch _ = buildPlatform
gArch = arch
gBinarySuffix = ""
gUploadLabel = Nothing
gTestHaddocks = True
gProjectRoot = "" -- Set to real value velow.
gBuildArgs = []
global0 = foldl (flip id) Global{..} flags
-- Need to get paths after options since the '--arch' argument can effect them.
projectRoot' <- getStackPath global0 "project-root"
let global = global0
{ gProjectRoot = projectRoot' }
return $ Just $ rules global args
where
getStackPath global path = do
out <- readProcess stackProgName (stackArgs global ++ ["path", "--" ++ path]) ""
return $ trim $ fromMaybe out $ stripPrefix (path ++ ":") out
-- | Additional command-line options.
options :: [OptDescr (Either String (Global -> Global))]
options =
[ Option "" [gpgKeyOptName]
(ReqArg (\v -> Right $ \g -> g{gGpgKey = v}) "USER-ID")
"GPG user ID to sign distribution package with."
, Option "" [allowDirtyOptName] (NoArg $ Right $ \g -> g{gAllowDirty = True})
"Allow a dirty working tree for release."
, Option "" [githubAuthTokenOptName]
(ReqArg (\v -> Right $ \g -> g{gGithubAuthToken = Just v}) "TOKEN")
("Github personal access token (defaults to " ++
githubAuthTokenEnvVar ++
" environment variable).")
, Option "" [githubReleaseTagOptName]
(ReqArg (\v -> Right $ \g -> g{gGithubReleaseTag = Just v}) "TAG")
"Github release tag to upload to."
, Option "" [archOptName]
(ReqArg
(\v -> case simpleParse v of
Nothing -> Left $ "Unknown architecture in --arch option: " ++ v
Just arch -> Right $ \g -> g{gArch = arch})
"ARCHITECTURE")
"Architecture to build (e.g. 'i386' or 'x86_64')."
, Option "" [binaryVariantOptName]
(ReqArg (\v -> Right $ \g -> g{gBinarySuffix = v}) "SUFFIX")
"Extra suffix to add to binary executable archive filename."
, Option "" [uploadLabelOptName]
(ReqArg (\v -> Right $ \g -> g{gUploadLabel = Just v}) "LABEL")
"Label to give the uploaded release asset"
, Option "" [noTestHaddocksOptName] (NoArg $ Right $ \g -> g{gTestHaddocks = False})
"Disable testing building haddocks."
, Option "" [staticOptName] (NoArg $ Right $ \g -> g{gBuildArgs = gBuildArgs g ++ ["--split-objs", "--ghc-options=-optc-Os -optl-static -fPIC"]})
"Build a static binary."
, Option "" [buildArgsOptName]
(ReqArg
(\v -> Right $ \g -> g{gBuildArgs = gBuildArgs g ++ words v})
"\"ARG1 ARG2 ...\"")
"Additional arguments to pass to 'stack build'."
]
-- | Shake rules.
rules :: Global -> [String] -> Rules ()
rules global@Global{..} args = do
case args of
[] -> error "No wanted target(s) specified."
_ -> want args
phony releasePhony $ do
need [checkPhony]
need [uploadPhony]
phony cleanPhony $
removeFilesAfter releaseDir ["//*"]
phony checkPhony $
need [releaseCheckDir </> binaryExeFileName]
phony uploadPhony $
mapM_ (\f -> need [releaseDir </> f <.> uploadExt]) binaryPkgFileNames
phony buildPhony $
mapM_ (\f -> need [releaseDir </> f]) binaryPkgFileNames
distroPhonies ubuntuDistro ubuntuVersions debPackageFileName
distroPhonies debianDistro debianVersions debPackageFileName
distroPhonies centosDistro centosVersions rpmPackageFileName
distroPhonies fedoraDistro fedoraVersions rpmPackageFileName
releaseDir </> "*" <.> uploadExt %> \out -> do
let srcFile = dropExtension out
mUploadLabel =
if takeExtension srcFile == ascExt
then fmap (++ " (GPG signature)") gUploadLabel
else gUploadLabel
uploadToGithubRelease global srcFile mUploadLabel
copyFileChanged srcFile out
releaseCheckDir </> binaryExeFileName %> \out -> do
need [releaseBinDir </> binaryName </> stackExeFileName]
Stdout dirty <- cmd "git status --porcelain"
when (not gAllowDirty && not (null (trim dirty))) $
error ("Working tree is dirty. Use --" ++ allowDirtyOptName ++ " option to continue anyway.")
withTempDir $ \tmpDir -> do
let cmd0 c = cmd (gProjectRoot </> releaseBinDir </> binaryName </> stackExeFileName)
(stackArgs global)
["--local-bin-path=" ++ tmpDir]
c
gBuildArgs
() <- cmd0 "install" $ concat $ concat
[["--pedantic --no-haddock-deps"], [" --haddock" | gTestHaddocks]]
() <- cmd0 (Cwd "etc/scripts") "install cabal-install"
let cmd' c = cmd (AddPath [tmpDir] []) stackProgName (stackArgs global) c gBuildArgs
() <- cmd' "test" "--pedantic --flag stack:integration-tests"
return ()
copyFileChanged (releaseBinDir </> binaryName </> stackExeFileName) out
releaseDir </> binaryPkgZipFileName %> \out -> do
stageFiles <- getBinaryPkgStageFiles
putNormal $ "zip " ++ out
liftIO $ do
entries <- forM stageFiles $ \stageFile -> do
Zip.readEntry
[Zip.OptLocation
(dropDirectoryPrefix (releaseStageDir </> binaryName) stageFile)
False]
stageFile
let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries
L8.writeFile out (Zip.fromArchive archive)
releaseDir </> binaryPkgTarGzFileName %> \out -> do
stageFiles <- getBinaryPkgStageFiles
writeTarGz out releaseStageDir stageFiles
releaseStageDir </> binaryName </> stackExeFileName %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
releaseStageDir </> (binaryName ++ "//*") %> \out -> do
copyFileChanged
(dropDirectoryPrefix (releaseStageDir </> binaryName) out)
out
releaseDir </> binaryExeFileName %> \out -> do
need [releaseBinDir </> binaryName </> stackExeFileName]
(Stdout versionOut) <- cmd (releaseBinDir </> binaryName </> stackExeFileName) "--version"
when (not gAllowDirty && "dirty" `isInfixOf` lower versionOut) $
error ("Refusing continue because 'stack --version' reports dirty. Use --" ++
allowDirtyOptName ++ " option to continue anyway.")
case platformOS of
Windows -> do
-- Windows doesn't have or need a 'strip' command, so skip it.
-- Instead, we sign the executable
liftIO $ copyFile (releaseBinDir </> binaryName </> stackExeFileName) out
actionOnException
(command_ [] "c:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Bin\\signtool.exe"
["sign"
,"/v"
,"/d", synopsis gStackPackageDescription
,"/du", homepage gStackPackageDescription
,"/n", "FP Complete, Corporation"
,"/t", "http://timestamp.verisign.com/scripts/timestamp.dll"
,out])
(removeFile out)
Linux ->
cmd "strip -p --strip-unneeded --remove-section=.comment -o"
[out, releaseBinDir </> binaryName </> stackExeFileName]
_ ->
cmd "strip -o"
[out, releaseBinDir </> binaryName </> stackExeFileName]
releaseDir </> "*" <.> ascExt %> \out -> do
need [out -<.> ""]
_ <- liftIO $ tryJust (guard . isDoesNotExistError) (removeFile out)
cmd ("gpg " ++ gpgOptions ++ " --detach-sig --armor")
[ "-u", gGpgKey
, dropExtension out ]
releaseBinDir </> binaryName </> stackExeFileName %> \out -> do
alwaysRerun
actionOnException
(cmd stackProgName
(stackArgs global)
["--local-bin-path=" ++ takeDirectory out]
"install"
gBuildArgs
"--pedantic")
(removeFile out)
debDistroRules ubuntuDistro ubuntuVersions
debDistroRules debianDistro debianVersions
rpmDistroRules centosDistro centosVersions
rpmDistroRules fedoraDistro fedoraVersions
where
debDistroRules debDistro0 debVersions = do
let anyVersion0 = anyDistroVersion debDistro0
distroVersionDir anyVersion0 </> debPackageFileName anyVersion0 <.> uploadExt %> \out -> do
let DistroVersion{..} = distroVersionFromPath out debVersions
pkgFile = dropExtension out
need [pkgFile]
() <- cmd "deb-s3 upload --preserve-versions --bucket download.fpcomplete.com"
[ "--sign=" ++ gGpgKey
, "--gpg-options=" ++ replace "-" "\\-" gpgOptions
, "--prefix=" ++ dvDistro
, "--codename=" ++ dvCodeName
, pkgFile ]
-- Also upload to the old, incorrect location for people who still have their systems
-- configured with it.
() <- cmd "deb-s3 upload --preserve-versions --bucket download.fpcomplete.com"
[ "--sign=" ++ gGpgKey
, "--gpg-options=" ++ replace "-" "\\-" gpgOptions
, "--prefix=" ++ dvDistro ++ "/" ++ dvCodeName
, pkgFile ]
copyFileChanged pkgFile out
distroVersionDir anyVersion0 </> debPackageFileName anyVersion0 %> \out -> do
docFiles <- getDocFiles
let dv@DistroVersion{..} = distroVersionFromPath out debVersions
inputFiles = concat
[[debStagedExeFile dv
,debStagedBashCompletionFile dv]
,map (debStagedDocDir dv </>) docFiles]
need inputFiles
cmd "fpm -f -s dir -t deb"
"--deb-recommends git --deb-recommends gnupg"
"-d g++ -d gcc -d libc6-dev -d libffi-dev -d libgmp-dev -d make -d xz-utils -d zlib1g-dev -d netbase -d ca-certificates"
["-n", stackProgName
,"-C", debStagingDir dv
,"-v", debPackageVersionStr dv
,"-p", out
,"-m", maintainer gStackPackageDescription
,"--description", synopsis gStackPackageDescription
,"--license", display (license gStackPackageDescription)
,"--url", homepage gStackPackageDescription]
(map (dropDirectoryPrefix (debStagingDir dv)) inputFiles)
debStagedExeFile anyVersion0 %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
debStagedBashCompletionFile anyVersion0 %> \out -> do
let dv = distroVersionFromPath out debVersions
writeBashCompletion (debStagedExeFile dv) out
debStagedDocDir anyVersion0 ++ "//*" %> \out -> do
let dv@DistroVersion{..} = distroVersionFromPath out debVersions
origFile = dropDirectoryPrefix (debStagedDocDir dv) out
copyFileChanged origFile out
rpmDistroRules rpmDistro0 rpmVersions = do
let anyVersion0 = anyDistroVersion rpmDistro0
distroVersionDir anyVersion0 </> rpmPackageFileName anyVersion0 <.> uploadExt %> \out -> do
let DistroVersion{..} = distroVersionFromPath out rpmVersions
pkgFile = dropExtension out
need [pkgFile]
let rpmmacrosFile = gHomeDir </> ".rpmmacros"
rpmmacrosExists <- liftIO $ System.Directory.doesFileExist rpmmacrosFile
when rpmmacrosExists $
error ("'" ++ rpmmacrosFile ++ "' already exists. Move it out of the way first.")
actionFinally
(do writeFileLines rpmmacrosFile
[ "%_signature gpg"
, "%_gpg_name " ++ gGpgKey ]
() <- cmd "rpm-s3 --verbose --sign --bucket=download.fpcomplete.com"
[ "--repopath=" ++ dvDistro ++ "/" ++ dvVersion
, pkgFile ]
return ())
(liftIO $ removeFile rpmmacrosFile)
copyFileChanged pkgFile out
distroVersionDir anyVersion0 </> rpmPackageFileName anyVersion0 %> \out -> do
docFiles <- getDocFiles
let dv@DistroVersion{..} = distroVersionFromPath out rpmVersions
inputFiles = concat
[[rpmStagedExeFile dv
,rpmStagedBashCompletionFile dv]
,map (rpmStagedDocDir dv </>) docFiles]
need inputFiles
cmd "fpm -s dir -t rpm"
"-d perl -d make -d automake -d gcc -d gmp-devel -d libffi -d zlib -d xz -d tar"
["-n", stackProgName
,"-C", rpmStagingDir dv
,"-v", rpmPackageVersionStr dv
,"--iteration", rpmPackageIterationStr dv
,"-p", out
,"-m", maintainer gStackPackageDescription
,"--description", synopsis gStackPackageDescription
,"--license", display (license gStackPackageDescription)
,"--url", homepage gStackPackageDescription]
(map (dropDirectoryPrefix (rpmStagingDir dv)) inputFiles)
rpmStagedExeFile anyVersion0 %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
rpmStagedBashCompletionFile anyVersion0 %> \out -> do
let dv = distroVersionFromPath out rpmVersions
writeBashCompletion (rpmStagedExeFile dv) out
rpmStagedDocDir anyVersion0 ++ "//*" %> \out -> do
let dv@DistroVersion{..} = distroVersionFromPath out rpmVersions
origFile = dropDirectoryPrefix (rpmStagedDocDir dv) out
copyFileChanged origFile out
writeBashCompletion stagedStackExeFile out = do
need [stagedStackExeFile]
(Stdout bashCompletionScript) <- cmd [stagedStackExeFile] "--bash-completion-script" [stackProgName]
writeFileChanged out bashCompletionScript
getBinaryPkgStageFiles = do
docFiles <- getDocFiles
let stageFiles = concat
[[releaseStageDir </> binaryName </> stackExeFileName]
,map ((releaseStageDir </> binaryName) </>) docFiles]
need stageFiles
return stageFiles
getDocFiles = getDirectoryFiles "." ["LICENSE", "*.md", "doc//*.md"]
distroVersionFromPath path versions =
let path' = dropDirectoryPrefix releaseDir path
version = takeDirectory1 (dropDirectory1 path')
in DistroVersion (takeDirectory1 path') version (lookupVersionCodeName version versions)
distroPhonies distro0 versions0 makePackageFileName =
forM_ versions0 $ \(version0,_) -> do
let dv@DistroVersion{..} = DistroVersion distro0 version0 (lookupVersionCodeName version0 versions0)
phony (distroUploadPhony dv) $ need [distroVersionDir dv </> makePackageFileName dv <.> uploadExt]
phony (distroBuildPhony dv) $ need [distroVersionDir dv </> makePackageFileName dv]
lookupVersionCodeName version versions =
fromMaybe (error $ "lookupVersionCodeName: could not find " ++ show version ++ " in " ++ show versions) $
lookup version versions
releasePhony = "release"
checkPhony = "check"
uploadPhony = "upload"
cleanPhony = "clean"
buildPhony = "build"
distroUploadPhony DistroVersion{..} = "upload-" ++ dvDistro ++ "-" ++ dvVersion
distroBuildPhony DistroVersion{..} = "build-" ++ dvDistro ++ "-" ++ dvVersion
releaseCheckDir = releaseDir </> "check"
releaseStageDir = releaseDir </> "stage"
releaseBinDir = releaseDir </> "bin"
distroVersionDir DistroVersion{..} = releaseDir </> dvDistro </> dvVersion
binaryPkgFileNames = binaryPkgArchiveFileNames ++ binaryPkgSignatureFileNames
binaryPkgSignatureFileNames = map (<.> ascExt) binaryPkgArchiveFileNames
binaryPkgArchiveFileNames =
case platformOS of
Windows -> [binaryPkgZipFileName, binaryPkgTarGzFileName]
_ -> [binaryPkgTarGzFileName]
binaryPkgZipFileName = binaryName <.> zipExt
binaryPkgTarGzFileName = binaryName <.> tarGzExt
binaryExeFileName = binaryName <.> exe
binaryName =
concat
[ stackProgName
, "-"
, stackVersionStr global
, "-"
, display platformOS
, "-"
, display gArch
, if null gBinarySuffix then "" else "-" ++ gBinarySuffix ]
stackExeFileName = stackProgName <.> exe
debStagedDocDir dv = debStagingDir dv </> "usr/share/doc" </> stackProgName
debStagedBashCompletionFile dv = debStagingDir dv </> "etc/bash_completion.d/stack"
debStagedExeFile dv = debStagingDir dv </> "usr/bin/stack"
debStagingDir dv = distroVersionDir dv </> debPackageName dv
debPackageFileName dv = debPackageName dv <.> debExt
debPackageName dv = stackProgName ++ "_" ++ debPackageVersionStr dv ++ "_amd64"
debPackageVersionStr DistroVersion{..} = stackVersionStr global ++ "-0~" ++ dvCodeName
rpmStagedDocDir dv = rpmStagingDir dv </> "usr/share/doc" </> (stackProgName ++ "-" ++ rpmPackageVersionStr dv)
rpmStagedBashCompletionFile dv = rpmStagingDir dv </> "etc/bash_completion.d/stack"
rpmStagedExeFile dv = rpmStagingDir dv </> "usr/bin/stack"
rpmStagingDir dv = distroVersionDir dv </> rpmPackageName dv
rpmPackageFileName dv = rpmPackageName dv <.> rpmExt
rpmPackageName dv = stackProgName ++ "-" ++ rpmPackageVersionStr dv ++ "-" ++ rpmPackageIterationStr dv ++ ".x86_64"
rpmPackageIterationStr DistroVersion{..} = "0." ++ dvCodeName
rpmPackageVersionStr _ = stackVersionStr global
ubuntuVersions =
[ ("12.04", "precise")
, ("14.04", "trusty")
, ("14.10", "utopic")
, ("15.04", "vivid")
, ("15.10", "wily")
, ("16.04", "xenial")
, ("16.10", "yakkety") ]
debianVersions =
[ ("7", "wheezy")
, ("8", "jessie") ]
centosVersions =
[ ("7", "el7")
, ("6", "el6") ]
fedoraVersions =
[ ("22", "fc22")
, ("23", "fc23")
, ("24", "fc24") ]
ubuntuDistro = "ubuntu"
debianDistro = "debian"
centosDistro = "centos"
fedoraDistro = "fedora"
anyDistroVersion distro = DistroVersion distro "*" "*"
zipExt = ".zip"
tarGzExt = tarExt <.> gzExt
gzExt = ".gz"
tarExt = ".tar"
ascExt = ".asc"
uploadExt = ".upload"
debExt = ".deb"
rpmExt = ".rpm"
-- | Upload file to Github release.
uploadToGithubRelease :: Global -> FilePath -> Maybe String -> Action ()
uploadToGithubRelease global@Global{..} file mUploadLabel = do
need [file]
putNormal $ "Uploading to Github: " ++ file
GithubRelease{..} <- getGithubRelease
resp <- liftIO $ callGithubApi global
[(CI.mk $ S8.pack "Content-Type", defaultMimeLookup (T.pack file))]
(Just file)
(replace
"{?name,label}"
("?name=" ++ urlEncodeStr (takeFileName file) ++
(case mUploadLabel of
Nothing -> ""
Just uploadLabel -> "&label=" ++ urlEncodeStr uploadLabel))
relUploadUrl)
case eitherDecode resp of
Left e -> error ("Could not parse Github asset upload response (" ++ e ++ "):\n" ++ L8.unpack resp ++ "\n")
Right (GithubReleaseAsset{..}) ->
when (assetState /= "uploaded") $
error ("Invalid asset state after Github asset upload: " ++ assetState)
where
urlEncodeStr = S8.unpack . urlEncode True . S8.pack
getGithubRelease = do
releases <- getGithubReleases
let tag = fromMaybe ("v" ++ stackVersionStr global) gGithubReleaseTag
return $ fromMaybe
(error ("Could not find Github release with tag '" ++ tag ++ "'.\n" ++
"Use --" ++ githubReleaseTagOptName ++ " option to specify a different tag."))
(find (\r -> relTagName r == tag) releases)
getGithubReleases :: Action [GithubRelease]
getGithubReleases = do
resp <- liftIO $ callGithubApi global
[] Nothing "https://api.github.com/repos/commercialhaskell/stack/releases"
case eitherDecode resp of
Left e -> error ("Could not parse Github releases (" ++ e ++ "):\n" ++ L8.unpack resp ++ "\n")
Right r -> return r
-- | Make a request to the Github API and return the response.
callGithubApi :: Global -> RequestHeaders -> Maybe FilePath -> String -> IO L8.ByteString
callGithubApi Global{..} headers mpostFile url = do
req0 <- parseUrl url
let authToken =
fromMaybe
(error $
"Github auth token required.\n" ++
"Use " ++ githubAuthTokenEnvVar ++ " environment variable\n" ++
"or --" ++ githubAuthTokenOptName ++ " option to specify.")
gGithubAuthToken
req1 =
req0
{ checkStatus = \_ _ _ -> Nothing
, requestHeaders =
[ (CI.mk $ S8.pack "Authorization", S8.pack $ "token " ++ authToken)
, (CI.mk $ S8.pack "User-Agent", S8.pack "commercialhaskell/stack") ] ++
headers }
req <- case mpostFile of
Nothing -> return req1
Just postFile -> do
lbs <- L8.readFile postFile
return $ req1
{ method = S8.pack "POST"
, requestBody = RequestBodyLBS lbs }
manager <- newManager tlsManagerSettings
runResourceT $ do
res <- http req manager
responseBody res $$+- CC.sinkLazy
-- | Create a .tar.gz files from files. The paths should be absolute, and will
-- be made relative to the base directory in the tarball.
writeTarGz :: FilePath -> FilePath -> [FilePath] -> Action ()
writeTarGz out baseDir inputFiles = liftIO $ do
content <- Tar.pack baseDir $ map (dropDirectoryPrefix baseDir) inputFiles
L8.writeFile out $ GZip.compress $ Tar.write content
-- | Drops a directory prefix from a path. The prefix automatically has a path
-- separator character appended. Fails if the path does not begin with the prefix.
dropDirectoryPrefix :: FilePath -> FilePath -> FilePath
dropDirectoryPrefix prefix path =
case stripPrefix (toStandard prefix ++ "/") (toStandard path) of
Nothing -> error ("dropDirectoryPrefix: cannot drop " ++ show prefix ++ " from " ++ show path)
Just stripped -> stripped
-- | String representation of stack package version.
stackVersionStr :: Global -> String
stackVersionStr =
display . pkgVersion . package . gStackPackageDescription
-- | Current operating system.
platformOS :: OS
platformOS =
let Platform _ os = buildPlatform
in os
-- | Directory in which to store build and intermediate files.
releaseDir :: FilePath
releaseDir = "_release"
-- | @GITHUB_AUTH_TOKEN@ environment variale name.
githubAuthTokenEnvVar :: String
githubAuthTokenEnvVar = "GITHUB_AUTH_TOKEN"
-- | @--github-auth-token@ command-line option name.
githubAuthTokenOptName :: String
githubAuthTokenOptName = "github-auth-token"
-- | @--github-release-tag@ command-line option name.
githubReleaseTagOptName :: String
githubReleaseTagOptName = "github-release-tag"
-- | @--gpg-key@ command-line option name.
gpgKeyOptName :: String
gpgKeyOptName = "gpg-key"
-- | @--allow-dirty@ command-line option name.
allowDirtyOptName :: String
allowDirtyOptName = "allow-dirty"
-- | @--arch@ command-line option name.
archOptName :: String
archOptName = "arch"
-- | @--binary-variant@ command-line option name.
binaryVariantOptName :: String
binaryVariantOptName = "binary-variant"
-- | @--upload-label@ command-line option name.
uploadLabelOptName :: String
uploadLabelOptName = "upload-label"
-- | @--no-test-haddocks@ command-line option name.
noTestHaddocksOptName :: String
noTestHaddocksOptName = "no-test-haddocks"
-- | @--build-args@ command-line option name.
buildArgsOptName :: String
buildArgsOptName = "build-args"
-- | @--static@ command-line option name.
staticOptName :: String
staticOptName = "static"
-- | Arguments to pass to all 'stack' invocations.
stackArgs :: Global -> [String]
stackArgs Global{..} = ["--install-ghc", "--arch=" ++ display gArch]
-- | Name of the 'stack' program.
stackProgName :: FilePath
stackProgName = "stack"
-- | Options to pass to invocations of gpg
gpgOptions :: String
gpgOptions = "--digest-algo=sha512"
-- | Linux distribution/version combination.
data DistroVersion = DistroVersion
{ dvDistro :: !String
, dvVersion :: !String
, dvCodeName :: !String }
-- | A Github release, as returned by the Github API.
data GithubRelease = GithubRelease
{ relUploadUrl :: !String
, relTagName :: !String }
deriving (Show)
instance FromJSON GithubRelease where
parseJSON = withObject "GithubRelease" $ \o ->
GithubRelease
<$> o .: T.pack "upload_url"
<*> o .: T.pack "tag_name"
-- | A Github release asset, as returned by the Github API.
data GithubReleaseAsset = GithubReleaseAsset
{ assetState :: !String }
deriving (Show)
instance FromJSON GithubReleaseAsset where
parseJSON = withObject "GithubReleaseAsset" $ \o ->
GithubReleaseAsset
<$> o .: T.pack "state"
-- | Global values and options.
data Global = Global
{ gStackPackageDescription :: !PackageDescription
, gGpgKey :: !String
, gAllowDirty :: !Bool
, gGithubAuthToken :: !(Maybe String)
, gGithubReleaseTag :: !(Maybe String)
, gGitRevCount :: !Int
, gGitSha :: !String
, gProjectRoot :: !FilePath
, gHomeDir :: !FilePath
, gArch :: !Arch
, gBinarySuffix :: !String
, gUploadLabel :: (Maybe String)
, gTestHaddocks :: !Bool
, gBuildArgs :: [String] }
deriving (Show)
| AndreasPK/stack | etc/scripts/release.hs | bsd-3-clause | 28,081 | 0 | 25 | 7,887 | 6,281 | 3,217 | 3,064 | 594 | 7 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ms-MY">
<title>Image Locaiton and Privacy Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/imagelocationscanner/src/main/javahelp/org/zaproxy/zap/extension/imagelocationscanner/resources/help_ms_MY/helpset_ms_MY.hs | apache-2.0 | 995 | 78 | 66 | 162 | 419 | 212 | 207 | -1 | -1 |
-- The @FamInst@ type: family instance heads
{-# LANGUAGE CPP, GADTs #-}
module FamInst (
FamInstEnvs, tcGetFamInstEnvs,
checkFamInstConsistency, tcExtendLocalFamInstEnv,
tcLookupFamInst,
tcLookupDataFamInst, tcLookupDataFamInst_maybe,
tcInstNewTyCon_maybe, tcTopNormaliseNewTypeTF_maybe,
newFamInst,
-- * Injectivity
makeInjectivityErrors
) where
import HscTypes
import FamInstEnv
import InstEnv( roughMatchTcs )
import Coercion hiding ( substTy )
import TcEvidence
import LoadIface
import TcRnMonad
import SrcLoc
import TyCon
import CoAxiom
import DynFlags
import Module
import Outputable
import UniqFM
import FastString
import Util
import RdrName
import DataCon ( dataConName )
import Maybes
import Type
import TypeRep
import TcMType
import Name
import Panic
import VarSet
import Control.Monad
import Data.Map (Map)
import qualified Data.Map as Map
import Control.Arrow ( first, second )
#include "HsVersions.h"
{-
************************************************************************
* *
Making a FamInst
* *
************************************************************************
-}
-- All type variables in a FamInst must be fresh. This function
-- creates the fresh variables and applies the necessary substitution
-- It is defined here to avoid a dependency from FamInstEnv on the monad
-- code.
newFamInst :: FamFlavor -> CoAxiom Unbranched -> TcRnIf gbl lcl FamInst
-- Freshen the type variables of the FamInst branches
-- Called from the vectoriser monad too, hence the rather general type
newFamInst flavor axiom@(CoAxiom { co_ax_tc = fam_tc })
= do { (subst, tvs') <- freshenTyVarBndrs tvs
; return (FamInst { fi_fam = tyConName fam_tc
, fi_flavor = flavor
, fi_tcs = roughMatchTcs lhs
, fi_tvs = tvs'
, fi_tys = substTys subst lhs
, fi_rhs = substTy subst rhs
, fi_axiom = axiom }) }
where
CoAxBranch { cab_tvs = tvs
, cab_lhs = lhs
, cab_rhs = rhs } = coAxiomSingleBranch axiom
{-
************************************************************************
* *
Optimised overlap checking for family instances
* *
************************************************************************
For any two family instance modules that we import directly or indirectly, we
check whether the instances in the two modules are consistent, *unless* we can
be certain that the instances of the two modules have already been checked for
consistency during the compilation of modules that we import.
Why do we need to check? Consider
module X1 where module X2 where
data T1 data T2
type instance F T1 b = Int type instance F a T2 = Char
f1 :: F T1 a -> Int f2 :: Char -> F a T2
f1 x = x f2 x = x
Now if we import both X1 and X2 we could make (f2 . f1) :: Int -> Char.
Notice that neither instance is an orphan.
How do we know which pairs of modules have already been checked? Any pair of
modules where both modules occur in the `HscTypes.dep_finsts' set (of the
`HscTypes.Dependencies') of one of our directly imported modules must have
already been checked. Everything else, we check now. (So that we can be
certain that the modules in our `HscTypes.dep_finsts' are consistent.)
-}
-- The optimisation of overlap tests is based on determining pairs of modules
-- whose family instances need to be checked for consistency.
--
data ModulePair = ModulePair Module Module
-- canonical order of the components of a module pair
--
canon :: ModulePair -> (Module, Module)
canon (ModulePair m1 m2) | m1 < m2 = (m1, m2)
| otherwise = (m2, m1)
instance Eq ModulePair where
mp1 == mp2 = canon mp1 == canon mp2
instance Ord ModulePair where
mp1 `compare` mp2 = canon mp1 `compare` canon mp2
instance Outputable ModulePair where
ppr (ModulePair m1 m2) = angleBrackets (ppr m1 <> comma <+> ppr m2)
-- Sets of module pairs
--
type ModulePairSet = Map ModulePair ()
listToSet :: [ModulePair] -> ModulePairSet
listToSet l = Map.fromList (zip l (repeat ()))
checkFamInstConsistency :: [Module] -> [Module] -> TcM ()
checkFamInstConsistency famInstMods directlyImpMods
= do { dflags <- getDynFlags
; (eps, hpt) <- getEpsAndHpt
; let { -- Fetch the iface of a given module. Must succeed as
-- all directly imported modules must already have been loaded.
modIface mod =
case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of
Nothing -> panicDoc "FamInst.checkFamInstConsistency"
(ppr mod $$ pprHPT hpt)
Just iface -> iface
; hmiModule = mi_module . hm_iface
; hmiFamInstEnv = extendFamInstEnvList emptyFamInstEnv
. md_fam_insts . hm_details
; hpt_fam_insts = mkModuleEnv [ (hmiModule hmi, hmiFamInstEnv hmi)
| hmi <- eltsUFM hpt]
; groups = map (dep_finsts . mi_deps . modIface)
directlyImpMods
; okPairs = listToSet $ concatMap allPairs groups
-- instances of okPairs are consistent
; criticalPairs = listToSet $ allPairs famInstMods
-- all pairs that we need to consider
; toCheckPairs = Map.keys $ criticalPairs `Map.difference` okPairs
-- the difference gives us the pairs we need to check now
}
; mapM_ (check hpt_fam_insts) toCheckPairs
}
where
allPairs [] = []
allPairs (m:ms) = map (ModulePair m) ms ++ allPairs ms
check hpt_fam_insts (ModulePair m1 m2)
= do { env1 <- getFamInsts hpt_fam_insts m1
; env2 <- getFamInsts hpt_fam_insts m2
; mapM_ (checkForConflicts (emptyFamInstEnv, env2))
(famInstEnvElts env1)
; mapM_ (checkForInjectivityConflicts (emptyFamInstEnv,env2))
(famInstEnvElts env1)
}
getFamInsts :: ModuleEnv FamInstEnv -> Module -> TcM FamInstEnv
getFamInsts hpt_fam_insts mod
| Just env <- lookupModuleEnv hpt_fam_insts mod = return env
| otherwise = do { _ <- initIfaceTcRn (loadSysInterface doc mod)
; eps <- getEps
; return (expectJust "checkFamInstConsistency" $
lookupModuleEnv (eps_mod_fam_inst_env eps) mod) }
where
doc = ppr mod <+> ptext (sLit "is a family-instance module")
{-
************************************************************************
* *
Lookup
* *
************************************************************************
Look up the instance tycon of a family instance.
The match may be ambiguous (as we know that overlapping instances have
identical right-hand sides under overlapping substitutions - see
'FamInstEnv.lookupFamInstEnvConflicts'). However, the type arguments used
for matching must be equal to or be more specific than those of the family
instance declaration. We pick one of the matches in case of ambiguity; as
the right-hand sides are identical under the match substitution, the choice
does not matter.
Return the instance tycon and its type instance. For example, if we have
tcLookupFamInst 'T' '[Int]' yields (':R42T', 'Int')
then we have a coercion (ie, type instance of family instance coercion)
:Co:R42T Int :: T [Int] ~ :R42T Int
which implies that :R42T was declared as 'data instance T [a]'.
-}
tcLookupFamInst :: FamInstEnvs -> TyCon -> [Type] -> Maybe FamInstMatch
tcLookupFamInst fam_envs tycon tys
| not (isOpenFamilyTyCon tycon)
= Nothing
| otherwise
= case lookupFamInstEnv fam_envs tycon tys of
match : _ -> Just match
[] -> Nothing
-- | If @co :: T ts ~ rep_ty@ then:
--
-- > instNewTyCon_maybe T ts = Just (rep_ty, co)
--
-- Checks for a newtype, and for being saturated
-- Just like Coercion.instNewTyCon_maybe, but returns a TcCoercion
tcInstNewTyCon_maybe :: TyCon -> [TcType] -> Maybe (TcType, TcCoercion)
tcInstNewTyCon_maybe tc tys = fmap (second TcCoercion) $
instNewTyCon_maybe tc tys
-- | Like 'tcLookupDataFamInst_maybe', but returns the arguments back if
-- there is no data family to unwrap.
-- Returns a Representational coercion
tcLookupDataFamInst :: FamInstEnvs -> TyCon -> [TcType]
-> (TyCon, [TcType], Coercion)
tcLookupDataFamInst fam_inst_envs tc tc_args
| Just (rep_tc, rep_args, co)
<- tcLookupDataFamInst_maybe fam_inst_envs tc tc_args
= (rep_tc, rep_args, co)
| otherwise
= (tc, tc_args, mkReflCo Representational (mkTyConApp tc tc_args))
tcLookupDataFamInst_maybe :: FamInstEnvs -> TyCon -> [TcType]
-> Maybe (TyCon, [TcType], Coercion)
-- ^ Converts a data family type (eg F [a]) to its representation type (eg FList a)
-- and returns a coercion between the two: co :: F [a] ~R FList a
tcLookupDataFamInst_maybe fam_inst_envs tc tc_args
| isDataFamilyTyCon tc
, match : _ <- lookupFamInstEnv fam_inst_envs tc tc_args
, FamInstMatch { fim_instance = rep_fam
, fim_tys = rep_args } <- match
, let ax = famInstAxiom rep_fam
rep_tc = dataFamInstRepTyCon rep_fam
co = mkUnbranchedAxInstCo Representational ax rep_args
= Just (rep_tc, rep_args, co)
| otherwise
= Nothing
-- | 'tcTopNormaliseNewTypeTF_maybe' gets rid of top-level newtypes,
-- potentially looking through newtype instances.
--
-- It is only used by the type inference engine (specifically, when
-- soliving 'Coercible' instances), and hence it is careful to unwrap
-- only if the relevant data constructor is in scope. That's why
-- it get a GlobalRdrEnv argument.
--
-- It is careful not to unwrap data/newtype instances if it can't
-- continue unwrapping. Such care is necessary for proper error
-- messages.
--
-- It does not look through type families.
-- It does not normalise arguments to a tycon.
--
-- Always produces a representational coercion.
tcTopNormaliseNewTypeTF_maybe :: FamInstEnvs
-> GlobalRdrEnv
-> Type
-> Maybe (TcCoercion, Type)
tcTopNormaliseNewTypeTF_maybe faminsts rdr_env ty
-- cf. FamInstEnv.topNormaliseType_maybe and Coercion.topNormaliseNewType_maybe
= fmap (first TcCoercion) $ topNormaliseTypeX_maybe stepper ty
where
stepper = unwrap_newtype `composeSteppers` unwrap_newtype_instance
-- For newtype instances we take a double step or nothing, so that
-- we don't return the reprsentation type of the newtype instance,
-- which would lead to terrible error messages
unwrap_newtype_instance rec_nts tc tys
| Just (tc', tys', co) <- tcLookupDataFamInst_maybe faminsts tc tys
= modifyStepResultCo (co `mkTransCo`) $
unwrap_newtype rec_nts tc' tys'
| otherwise = NS_Done
unwrap_newtype rec_nts tc tys
| data_cons_in_scope tc
= unwrapNewTypeStepper rec_nts tc tys
| otherwise
= NS_Done
data_cons_in_scope :: TyCon -> Bool
data_cons_in_scope tc
= isWiredInName (tyConName tc) ||
(not (isAbstractTyCon tc) && all in_scope data_con_names)
where
data_con_names = map dataConName (tyConDataCons tc)
in_scope dc = not $ null $ lookupGRE_Name rdr_env dc
{-
************************************************************************
* *
Extending the family instance environment
* *
************************************************************************
-}
-- Add new locally-defined family instances
tcExtendLocalFamInstEnv :: [FamInst] -> TcM a -> TcM a
tcExtendLocalFamInstEnv fam_insts thing_inside
= do { env <- getGblEnv
; (inst_env', fam_insts') <- foldlM addLocalFamInst
(tcg_fam_inst_env env, tcg_fam_insts env)
fam_insts
; let env' = env { tcg_fam_insts = fam_insts'
, tcg_fam_inst_env = inst_env' }
; setGblEnv env' thing_inside
}
-- Check that the proposed new instance is OK,
-- and then add it to the home inst env
-- This must be lazy in the fam_inst arguments, see Note [Lazy axiom match]
-- in FamInstEnv.hs
addLocalFamInst :: (FamInstEnv,[FamInst])
-> FamInst
-> TcM (FamInstEnv, [FamInst])
addLocalFamInst (home_fie, my_fis) fam_inst
-- home_fie includes home package and this module
-- my_fies is just the ones from this module
= do { traceTc "addLocalFamInst" (ppr fam_inst)
; isGHCi <- getIsGHCi
; mod <- getModule
; traceTc "alfi" (ppr mod $$ ppr isGHCi)
-- In GHCi, we *override* any identical instances
-- that are also defined in the interactive context
-- See Note [Override identical instances in GHCi] in HscTypes
; let home_fie'
| isGHCi = deleteFromFamInstEnv home_fie fam_inst
| otherwise = home_fie
-- Load imported instances, so that we report
-- overlaps correctly
; eps <- getEps
; let inst_envs = (eps_fam_inst_env eps, home_fie')
home_fie'' = extendFamInstEnv home_fie fam_inst
-- Check for conflicting instance decls and injectivity violations
; no_conflict <- checkForConflicts inst_envs fam_inst
; injectivity_ok <- checkForInjectivityConflicts inst_envs fam_inst
; if no_conflict && injectivity_ok then
return (home_fie'', fam_inst : my_fis)
else
return (home_fie, my_fis) }
{-
************************************************************************
* *
Checking an instance against conflicts with an instance env
* *
************************************************************************
Check whether a single family instance conflicts with those in two instance
environments (one for the EPS and one for the HPT).
-}
checkForConflicts :: FamInstEnvs -> FamInst -> TcM Bool
checkForConflicts inst_envs fam_inst
= do { let conflicts = lookupFamInstEnvConflicts inst_envs fam_inst
no_conflicts = null conflicts
; traceTc "checkForConflicts" $
vcat [ ppr (map fim_instance conflicts)
, ppr fam_inst
-- , ppr inst_envs
]
; unless no_conflicts $ conflictInstErr fam_inst conflicts
; return no_conflicts }
-- | Check whether a new open type family equation can be added without
-- violating injectivity annotation supplied by the user. Returns True when
-- this is possible and False if adding this equation would violate injectivity
-- annotation.
checkForInjectivityConflicts :: FamInstEnvs -> FamInst -> TcM Bool
checkForInjectivityConflicts instEnvs famInst
| isTypeFamilyTyCon tycon
-- type family is injective in at least one argument
, Injective inj <- familyTyConInjectivityInfo tycon = do
{ let axiom = coAxiomSingleBranch (fi_axiom famInst)
conflicts = lookupFamInstEnvInjectivityConflicts inj instEnvs famInst
-- see Note [Verifying injectivity annotation] in FamInstEnv
errs = makeInjectivityErrors tycon axiom inj conflicts
; mapM_ (\(err, span) -> setSrcSpan span $ addErr err) errs
; return (null errs)
}
-- if there was no injectivity annotation or tycon does not represent a
-- type family we report no conflicts
| otherwise = return True
where tycon = famInstTyCon famInst
-- | Build a list of injectivity errors together with their source locations.
makeInjectivityErrors
:: TyCon -- ^ Type family tycon for which we generate errors
-> CoAxBranch -- ^ Currently checked equation (represented by axiom)
-> [Bool] -- ^ Injectivity annotation
-> [CoAxBranch] -- ^ List of injectivity conflicts
-> [(SDoc, SrcSpan)]
makeInjectivityErrors tycon axiom inj conflicts
= ASSERT2( any id inj, text "No injective type variables" )
let lhs = coAxBranchLHS axiom
rhs = coAxBranchRHS axiom
are_conflicts = not $ null conflicts
unused_inj_tvs = unusedInjTvsInRHS inj lhs rhs
inj_tvs_unused = not $ isEmptyVarSet unused_inj_tvs
tf_headed = isTFHeaded rhs
bare_variables = bareTvInRHSViolated lhs rhs
wrong_bare_rhs = not $ null bare_variables
err_builder herald eqns
= ( herald $$ vcat (map (pprCoAxBranch tycon) eqns)
, coAxBranchSpan (head eqns) )
errorIf p f = if p then [f err_builder axiom] else []
in errorIf are_conflicts (conflictInjInstErr conflicts )
++ errorIf inj_tvs_unused (unusedInjectiveVarsErr unused_inj_tvs)
++ errorIf tf_headed tfHeadedErr
++ errorIf wrong_bare_rhs (bareVariableInRHSErr bare_variables)
-- | Return a list of type variables that the function is injective in and that
-- do not appear on injective positions in the RHS of a family instance
-- declaration.
unusedInjTvsInRHS :: [Bool] -> [Type] -> Type -> TyVarSet
-- INVARIANT: [Bool] list contains at least one True value
-- See Note [Verifying injectivity annotation]. This function implements fourth
-- check described there.
-- In theory, instead of implementing this whole check in this way, we could
-- attempt to unify equation with itself. We would reject exactly the same
-- equations but this method gives us more precise error messages by returning
-- precise names of variables that are not mentioned in the RHS.
unusedInjTvsInRHS injList lhs rhs =
injLHSVars `minusVarSet` injRhsVars
where
-- set of type and kind variables in which type family is injective
injLHSVars = tyVarsOfTypes (filterByList injList lhs)
-- set of type variables appearing in the RHS on an injective position.
-- For all returned variables we assume their associated kind variables
-- also appear in the RHS.
injRhsVars = closeOverKinds $ collectInjVars rhs
-- Collect all type variables that are either arguments to a type
-- constructor or to injective type families.
collectInjVars :: Type -> VarSet
collectInjVars ty | Just (ty1, ty2) <- splitAppTy_maybe ty
= collectInjVars ty1 `unionVarSet` collectInjVars ty2
collectInjVars (TyVarTy v)
= unitVarSet v
collectInjVars (TyConApp tc tys)
| isTypeFamilyTyCon tc = collectInjTFVars tys
(familyTyConInjectivityInfo tc)
| otherwise = mapUnionVarSet collectInjVars tys
collectInjVars (LitTy {})
= emptyVarSet
collectInjVars (FunTy arg res)
= collectInjVars arg `unionVarSet` collectInjVars res
collectInjVars (AppTy fun arg)
= collectInjVars fun `unionVarSet` collectInjVars arg
-- no forall types in the RHS of a type family
collectInjVars (ForAllTy _ _) =
panic "unusedInjTvsInRHS.collectInjVars"
collectInjTFVars :: [Type] -> Injectivity -> VarSet
collectInjTFVars _ NotInjective
= emptyVarSet
collectInjTFVars tys (Injective injList)
= mapUnionVarSet collectInjVars (filterByList injList tys)
-- | Is type headed by a type family application?
isTFHeaded :: Type -> Bool
-- See Note [Verifying injectivity annotation]. This function implements third
-- check described there.
isTFHeaded ty | Just ty' <- tcView ty
= isTFHeaded ty'
isTFHeaded ty | (TyConApp tc args) <- ty
, isTypeFamilyTyCon tc
= tyConArity tc == length args
isTFHeaded _ = False
-- | If a RHS is a bare type variable return a set of LHS patterns that are not
-- bare type variables.
bareTvInRHSViolated :: [Type] -> Type -> [Type]
-- See Note [Verifying injectivity annotation]. This function implements second
-- check described there.
bareTvInRHSViolated pats rhs | isTyVarTy rhs
= filter (not . isTyVarTy) pats
bareTvInRHSViolated _ _ = []
conflictInstErr :: FamInst -> [FamInstMatch] -> TcRn ()
conflictInstErr fam_inst conflictingMatch
| (FamInstMatch { fim_instance = confInst }) : _ <- conflictingMatch
= let (err, span) = makeFamInstsErr
(text "Conflicting family instance declarations:")
[fam_inst, confInst]
in setSrcSpan span $ addErr err
| otherwise
= panic "conflictInstErr"
-- | Type of functions that use error message and a list of axioms to build full
-- error message (with a source location) for injective type families.
type InjErrorBuilder = SDoc -> [CoAxBranch] -> (SDoc, SrcSpan)
-- | Build injecivity error herald common to all injectivity errors.
injectivityErrorHerald :: Bool -> SDoc
injectivityErrorHerald isSingular =
text "Type family equation" <> s isSingular <+> text "violate" <>
s (not isSingular) <+> text "injectivity annotation" <>
if isSingular then dot else colon
-- Above is an ugly hack. We want this: "sentence. herald:" (note the dot and
-- colon). But if herald is empty we want "sentence:" (note the colon). We
-- can't test herald for emptiness so we rely on the fact that herald is empty
-- only when isSingular is False. If herald is non empty it must end with a
-- colon.
where
s False = text "s"
s True = empty
-- | Build error message for a pair of equations violating an injectivity
-- annotation.
conflictInjInstErr :: [CoAxBranch] -> InjErrorBuilder -> CoAxBranch
-> (SDoc, SrcSpan)
conflictInjInstErr conflictingEqns errorBuilder tyfamEqn
| confEqn : _ <- conflictingEqns
= errorBuilder (injectivityErrorHerald False) [confEqn, tyfamEqn]
| otherwise
= panic "conflictInjInstErr"
-- | Build error message for equation with injective type variables unused in
-- the RHS.
unusedInjectiveVarsErr :: TyVarSet -> InjErrorBuilder -> CoAxBranch
-> (SDoc, SrcSpan)
unusedInjectiveVarsErr unused_tyvars errorBuilder tyfamEqn
= errorBuilder (injectivityErrorHerald True $$ unusedInjectiveVarsErr)
[tyfamEqn]
where
tvs = varSetElemsKvsFirst unused_tyvars
has_types = any isTypeVar tvs
has_kinds = any isKindVar tvs
doc = sep [ what <+> text "variable" <>
plural tvs <+> pprQuotedList tvs
, text "cannot be inferred from the right-hand side." ]
what = case (has_types, has_kinds) of
(True, True) -> text "Type and kind"
(True, False) -> text "Type"
(False, True) -> text "Kind"
(False, False) -> pprPanic "mkUnusedInjectiveVarsErr" $
ppr unused_tyvars
print_kinds_info = sdocWithDynFlags $ \ dflags ->
if has_kinds && not (gopt Opt_PrintExplicitKinds dflags)
then text "(enabling -fprint-explicit-kinds might help)"
else empty
unusedInjectiveVarsErr = doc $$ print_kinds_info $$
text "In the type family equation:"
-- | Build error message for equation that has a type family call at the top
-- level of RHS
tfHeadedErr :: InjErrorBuilder -> CoAxBranch
-> (SDoc, SrcSpan)
tfHeadedErr errorBuilder famInst
= errorBuilder (injectivityErrorHerald True $$
text "RHS of injective type family equation cannot" <+>
text "be a type family:") [famInst]
-- | Build error message for equation that has a bare type variable in the RHS
-- but LHS pattern is not a bare type variable.
bareVariableInRHSErr :: [Type] -> InjErrorBuilder -> CoAxBranch
-> (SDoc, SrcSpan)
bareVariableInRHSErr tys errorBuilder famInst
= errorBuilder (injectivityErrorHerald True $$
text "RHS of injective type family equation is a bare" <+>
text "type variable" $$
text "but these LHS type and kind patterns are not bare" <+>
text "variables:" <+> pprQuotedList tys) [famInst]
makeFamInstsErr :: SDoc -> [FamInst] -> (SDoc, SrcSpan)
makeFamInstsErr herald insts
= ASSERT( not (null insts) )
( hang herald
2 (vcat [ pprCoAxBranchHdr (famInstAxiom fi) 0
| fi <- sorted ])
, srcSpan )
where
getSpan = getSrcLoc . famInstAxiom
sorted = sortWith getSpan insts
fi1 = head sorted
srcSpan = coAxBranchSpan (coAxiomSingleBranch (famInstAxiom fi1))
-- The sortWith just arranges that instances are dislayed in order
-- of source location, which reduced wobbling in error messages,
-- and is better for users
tcGetFamInstEnvs :: TcM FamInstEnvs
-- Gets both the external-package inst-env
-- and the home-pkg inst env (includes module being compiled)
tcGetFamInstEnvs
= do { eps <- getEps; env <- getGblEnv
; return (eps_fam_inst_env eps, tcg_fam_inst_env env) }
| ghc-android/ghc | compiler/typecheck/FamInst.hs | bsd-3-clause | 25,943 | 0 | 16 | 7,307 | 4,199 | 2,207 | 1,992 | -1 | -1 |
module FunIn3 where
--Any unused parameter to a definition can be removed.
--In this example: remove the parameter 'x' (or 'z') to 'foo'
foo z@x y = h + t where (h,t) = head $ zip [1..7] [3..y]
main :: Int
main = foo 10 20
| kmate/HaRe | old/testing/rmOneParameter/FunIn3.hs | bsd-3-clause | 232 | 0 | 9 | 55 | 71 | 40 | 31 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for "Ganeti.Errors".
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Errors (testErrors) where
import Test.QuickCheck
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import qualified Ganeti.Errors as Errors
$(genArbitrary ''Errors.ErrorCode)
$(genArbitrary ''Errors.GanetiException)
-- | Tests error serialisation.
prop_GenericError_serialisation :: Errors.GanetiException -> Property
prop_GenericError_serialisation = testSerialisation
testSuite "Errors"
[ 'prop_GenericError_serialisation
]
| ribag/ganeti-experiments | test/hs/Test/Ganeti/Errors.hs | gpl-2.0 | 1,316 | 0 | 9 | 207 | 98 | 57 | 41 | 13 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module PackageTests.Freeze.Check
( tests
) where
import PackageTests.PackageTester
import Test.Tasty
import Test.Tasty.HUnit
import qualified Control.Exception.Extensible as E
import Data.List (intercalate, isInfixOf)
import System.Directory (doesFileExist, removeFile)
import System.FilePath ((</>))
import System.IO.Error (isDoesNotExistError)
dir :: FilePath
dir = packageTestsDirectory </> "Freeze"
tests :: TestsPaths -> [TestTree]
tests paths =
[ testCase "runs without error" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
, testCase "freezes direct dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen base\n" ++ c) $
" base ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "freezes transitory dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen ghc-prim\n" ++ c) $
" ghc-prim ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "does not freeze packages which are not dependend upon" $ do
-- XXX Test this against a package installed in the sandbox but
-- not depended upon.
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen exceptions\n" ++ c) $ not $
" exceptions ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "does not include a constraint for the package being frozen" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen self\n" ++ c) $ not $
" my ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--dry-run does not modify the cabal.config file" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--dry-run"]
assertFreezeSucceeded result
c <- doesFileExist $ dir </> "cabal.config"
assertBool "cabal.config file should not have been created" (not c)
, testCase "--enable-tests freezes test dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--enable-tests"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen test-framework\n" ++ c) $
" test-framework ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--disable-tests does not freeze test dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-tests"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen test-framework\n" ++ c) $ not $
" test-framework ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--enable-benchmarks freezes benchmark dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-benchmarks"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen criterion\n" ++ c) $ not $
" criterion ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--disable-benchmarks does not freeze benchmark dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-benchmarks"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen criterion\n" ++ c) $ not $
" criterion ==" `isInfixOf` (intercalate " " $ lines $ c)
]
removeCabalConfig :: IO ()
removeCabalConfig = do
removeFile (dir </> "cabal.config")
`E.catch` \ (e :: IOError) ->
if isDoesNotExistError e
then return ()
else E.throw e
readCabalConfig :: IO String
readCabalConfig = do
config <- readFile $ dir </> "cabal.config"
-- Ensure that the file is closed so that it can be
-- deleted by the next test on Windows.
length config `seq` return config
| rimmington/cabal | cabal-install/tests/PackageTests/Freeze/Check.hs | bsd-3-clause | 4,459 | 0 | 14 | 1,262 | 993 | 491 | 502 | 92 | 2 |
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
type Empty a = ()
foo :: expr a -> expr a -> expr (Empty a)
foo = undefined
newtype Expr a = SPT {run :: String}
pt1 :: forall a ptexpr . ptexpr a -> ptexpr (Empty a)
pt1 a = foo a a
pt2 :: forall a ptexpr . ptexpr a -> ptexpr _
pt2 a = foo a a
main :: IO ()
main = do
-- This typechecks without any trouble.
putStrLn $ run $ pt1 @Int @Expr undefined
-- This should also typecheck, but doesn't since GHC seems to mix up the
-- order of the type variables.
putStrLn $ run $ pt2 @Int @Expr undefined
| shlevy/ghc | testsuite/tests/typecheck/should_compile/T13524.hs | bsd-3-clause | 697 | 0 | 9 | 162 | 197 | 104 | 93 | 16 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module ReifyPlusTypeInferenceBugs where
import Language.Haskell.TH
import System.IO
a = 1
$(return [])
b = $(do VarI _ t _ <- reify 'a
runIO $ putStrLn ("inside b: " ++ pprint t)
[| undefined |])
c = $([| True |])
$(return [])
d = $(do VarI _ t _ <- reify 'c
runIO $ putStrLn ("inside d: " ++ pprint t)
[| undefined |] )
$(do VarI _ t _ <- reify 'c
runIO $ putStrLn ("type of c: " ++ pprint t)
return [] )
e = $([| True |])
$(return [])
f = $(do VarI _ t _ <- reify 'e
runIO $ putStrLn ("inside f: " ++ pprint t)
[| undefined |] )
$(do VarI _ t _ <- reify 'e
runIO $ putStrLn ("type of e: " ++ pprint t)
return [] )
$( runIO $ do hFlush stdout
hFlush stderr
return [] )
| sdiehl/ghc | testsuite/tests/th/T2222.hs | bsd-3-clause | 820 | 0 | 13 | 268 | 377 | 181 | 196 | 28 | 1 |
module Package03 where
import Data.Map
import Map
import Data.Set
import Data.IntMap
| urbanslug/ghc | testsuite/tests/package/package03.hs | bsd-3-clause | 85 | 0 | 4 | 11 | 22 | 14 | 8 | 5 | 0 |
import Common
p = "/sys/devices/platform/applesmc.768/leds/smc::kbd_backlight"
b = p ++ "/brightness"
m = p ++ "/max_brightness"
main = changeBl b m | cvb/hs-mac-bl | src/MacKb.hs | mit | 150 | 0 | 5 | 21 | 37 | 20 | 17 | 5 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.RequestAnimationFrameCallback (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.RequestAnimationFrameCallback
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.RequestAnimationFrameCallback
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/RequestAnimationFrameCallback.hs | mit | 400 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Exception
import Test.Hspec
import Test.Kafka.Managed
import qualified Network as N
import qualified System.IO.Temp as Temp
import qualified Kafka
import qualified KafkaSpec
getRandomUnusedPort :: IO Int
getRandomUnusedPort =
bracket (N.listenOn $ N.PortNumber 0) N.sClose $ \socket -> do
(N.PortNumber num) <- N.socketPort socket
return (fromIntegral num)
main :: IO ()
main = do
kafkaPort <- getRandomUnusedPort
Temp.withSystemTempDirectory "kafka-logs" $ \logDir ->
let kafkaConfig = KafkaConfig {
kafkaLogDirectory = logDir
, kafkaServerPort = kafkaPort
}
in withManagedKafka kafkaConfig $
Kafka.withConnection "localhost" kafkaPort $
hspec . KafkaSpec.spec
| abhinav/kafka-client | integration-test/IntegrationTest.hs | mit | 831 | 0 | 15 | 187 | 212 | 115 | 97 | 24 | 1 |
--------------------------------------------------------------------------------
-- | Socket.IO Protocol 1.0
{-# LANGUAGE OverloadedStrings #-}
module Web.SocketIO.Protocol
( demultiplexMessage
, parseFramedMessage
, parsePath
) where
--------------------------------------------------------------------------------
import Web.SocketIO.Types
--------------------------------------------------------------------------------
import Control.Applicative ((<$>), (<*>))
import Data.Aeson
import qualified Data.ByteString as B
import Data.Conduit
import Data.Conduit.Attoparsec (conduitParserEither)
import Data.Attoparsec.ByteString.Lazy
import Data.Attoparsec.ByteString.Char8 (digit, decimal)
import Prelude hiding (take, takeWhile)
--------------------------------------------------------------------------------
-- | Demultiplexing messages
demultiplexMessage :: Conduit ByteString IO Message
demultiplexMessage = do
conduitParserEither framedMessageParser =$= awaitForever go
where go (Left s) = error $ show s
go (Right (_, p)) = mapM yield p
----------------------------------------------------------------------------------
---- | Using U+FFFD as delimiter
frameParser :: Parser a -> Parser a
frameParser parser = do
string "�"
len <- decimal
string "�"
x <- take len
case parseOnly parser x of
Left e -> error e
Right r -> return r
--------------------------------------------------------------------------------
-- | Message, framed with List
framedMessageParser :: Parser [Message]
framedMessageParser = choice [many1 (frameParser messageParser), many' messageParser]
--------------------------------------------------------------------------------
-- | Wrapped for testing
parseFramedMessage :: ByteString -> Framed Message
parseFramedMessage input = case parseOnly framedMessageParser input of
Left e -> error e
Right r -> Framed r
--------------------------------------------------------------------------------
-- | Message, not framed
messageParser :: Parser Message
messageParser = do
n <- digit
case n of
'0' -> choice
[ idParser >> endpointParser >>= return . MsgDisconnect
, return $ MsgDisconnect NoEndpoint
]
'1' -> choice
[ idParser >> endpointParser >>= return . MsgConnect
, return $ MsgConnect NoEndpoint
]
'2' -> return MsgHeartbeat
'3' -> Msg <$> idParser
<*> endpointParser
<*> dataParser
'4' -> MsgJSON <$> idParser
<*> endpointParser
<*> dataParser
'5' -> MsgEvent <$> idParser
<*> endpointParser
<*> eventParser
'6' -> choice
[ do string ":::"
d <- decimal
string "+"
x <- takeWhile (const True)
return $ MsgACK (ID d) (if B.null x then NoData else Data x)
, do string ":::"
d <- decimal
return $ MsgACK (ID d) NoData
]
'7' -> string ":" >> MsgError <$> endpointParser <*> dataParser
'8' -> return MsgNoop
_ -> return MsgNoop
idParser :: Parser ID
idParser = choice
[ string ":" >> decimal >>= plus >>= return . IDPlus
, string ":" >> decimal >>= return . ID
, string ":" >> return NoID
]
where plus n = string "+" >> return n
endpointParser :: Parser Endpoint
endpointParser = do
string ":"
option NoEndpoint (takeWhile1 (/= 58) >>= return . Endpoint)
dataParser :: Parser Data
dataParser = do
string ":"
option NoData (takeWhile1 (/= 58) >>= return . Data)
eventParser :: Parser Event
eventParser = do
string ":"
t <- takeWhile (const True)
case decode (serialize t) of
Just e -> return e
Nothing -> return NoEvent
------------------------------------------------------------------------------
-- | Parse given HTTP request
parsePath :: ByteString -> Path
parsePath p = case parseOnly pathParser p of
Left _ -> WithoutSession "" ""
Right x -> x
pathParser :: Parser Path
pathParser = do
string "/"
namespace <- takeTill (== 47) -- 0x47: slash
take 1 -- slip the second slash
protocol <- takeTill (== 47)
take 1 -- slip the third slash
option (WithoutSession namespace protocol) $ do
transport <- transportParser
string "/"
sessionID <- takeTill (== 47)
return $ WithSession namespace protocol transport sessionID
transportParser :: Parser Transport
transportParser = choice
[ string "websocket" >> return WebSocket
, string "xhr-polling" >> return XHRPolling
, string "unknown" >> return NoTransport
, skipWhile (/= 47) >> return NoTransport
] | banacorn/socket.io-haskell | Web/SocketIO/Protocol.hs | mit | 5,359 | 0 | 19 | 1,731 | 1,187 | 588 | 599 | 109 | 11 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK ignore-exports #-}
module Buchhaltung.AQBanking where
import Buchhaltung.Common
import Control.Monad.RWS.Strict
import Data.Maybe
import qualified Data.Text as T
import Formatting ((%))
import qualified Formatting.ShortFormatters as F
import System.Directory
import System.FilePath
import System.Process as P
-- * The Monad Stack and its runner
type AQM = CommonM (AQConnection, AQBankingConf)
-- | Runs an AQBanking Action for all connections of the selected user
runAQ :: FullOptions () -> AQM a -> ErrorT IO [a]
runAQ options action = fst <$> evalRWST action2 options ()
where action2 = do
user <- user
aqconf <- maybeThrow ("AQBanking not configured for user "%F.sh)
($ user) return $ aqBanking user
forM (connections aqconf) $ \conn ->
withRWST (\r s -> (r{oEnv = (conn,aqconf)}, s)) action
-- * Direct access to the executables
runProc ::
(FilePath -> [String] -> IO a)
-> [String] -> ([FilePath], FilePath)
-> AQM a
runProc run args (argsC, bin) = liftIO $ run bin $ argsC ++ args
callAqhbci :: AAQM ()
callAqhbci args = runProc callProcess args
=<< askExec aqhbciToolExecutable "aqhbci-tool4" "-C"
runAqbanking'
:: (FilePath -> [String] -> IO b) -> AAQM b
runAqbanking' prc args = do
args' <- addContext args
runProc prc args'
=<< askExec aqBankingExecutable "aqbanking-cli" "-D"
callAqbanking :: AAQM ()
callAqbanking = runAqbanking' callProcess
readAqbanking :: AAQM String
readAqbanking = runAqbanking' $ readProcess'
type AAQM a = [String] -> AQM a
-- * Higher Level of Abstraction
aqbankingListtrans :: Bool
-- ^ request new transactions
-> AQM T.Text
aqbankingListtrans doRequest = do
when doRequest $
callAqbanking ["request"
, "--transactions"
, "--ignoreUnsupported"
]
T.pack <$> readAqbanking ["listtrans"
]
aqbankingSetup :: AQM ()
aqbankingSetup = do
path <- askConfigPath
conn <- readConn return
exists <- liftIO $ doesPathExist path
when exists $ throwFormat
("Path '"%F.s%"' already exists. Cannot install into existing path.")
($ path)
typ <- readConn $ return . aqType
when (typ /= PinTan) $ throwError $ mconcat
["modes other than PinTan have to be setup manually. Refer to the "
,"AQBanking manual. Use the '-C' to point to the configured "
,"'configDir'."]
liftIO $ createDirectoryIfMissing True path
callAqhbci [ "adduser", "-t", "pintan", "--context=1"
, "-b", aqBlz conn
, "-u", aqUser conn
, "-s", aqUrl conn
, "-N", aqName conn
, "--hbciversion=" <> toArg (aqHbciv conn)]
callAqhbci [ "getsysid" ]
callAqhbci [ "getaccounts" ]
callAqhbci [ "listaccounts" ]
-- * Utils
addContext :: AAQM [FilePath]
addContext [] = return []
addContext args@(cmd:_) = do
withC <- withContext cmd
fmap (args ++) $
if withC then (\x -> ["-c", x <.> "context"]) <$> askConfigPath
else return []
withContext "listbal" = return True
withContext "listtrans" = return True
withContext "request" = return True
withContext "listaccs" = return False
withContext cmd = throwFormat
("'withContext' not defined for command '"%F.s%"'.")
($ cmd)
askConfigPath :: AQM FilePath
askConfigPath = do
conn <- readConn return
makeValid . (</> aqBlz conn <> "-" <> aqUser conn)
<$> readConf (absolute . configDir)
readConn :: (AQConnection -> AQM a) -> AQM a
readConn f = f =<< reader (fst . oEnv)
readConf :: (AQBankingConf -> AQM a) -> AQM a
readConf f = f =<< reader (snd . oEnv)
-- | Find out executable path and two args selecting the config file
askExec
:: (AQBankingConf -> Maybe FilePath)
-> FilePath -- ^ default
-> String -- ^ config path argument
-> AQM ([FilePath], FilePath)
-- ^ Path and Args
askExec get def arg = do
path <- askConfigPath
readConf $ return . ((,) [arg, path]) . fromMaybe def . get
| johannesgerer/buchhaltung | src/Buchhaltung/AQBanking.hs | mit | 4,207 | 0 | 17 | 1,065 | 1,203 | 626 | 577 | 104 | 2 |
import Assignments
import Submissions
import Data.Time
config = Configuration
(read "2016-01-26 23:49:46.861565 UTC" :: UTCTime)
(read "2016-01-26 23:49:46.861565 UTC" :: UTCTime)
(read "2016-01-26 23:49:46.861565 UTC" :: UTCTime)
["Assignment.hs", "Homework.hs", "Exercises.hs"]
5 10 7
assign= Assignment 2015 Homework 3
sub = Submission "Marin" assign ["Exercises.hs", "Homework.hs"]
main :: IO ()
main = do
putStrLn "da"
| cromulen/puh-project | src/Main.hs | mit | 436 | 10 | 7 | 66 | 129 | 69 | 60 | 14 | 1 |
module Core
(CDevice(..),
CSequencer(..),
CComponent(..),
CInst(..),
CArgType(..),
CFormatAtom(..),
SInst(..),
SArgType(..)) where
data CDevice
= CDevice {
cDeviceRomName :: String,
cDeviceSequencer :: CSequencer,
cDeviceComponents :: [(String,CComponent)],
cDeviceSeqOutputs :: [String],
cDeviceSeqInputs :: [(String,String)]}
deriving (Show)
data CSequencer
= CSequencer {
cSequencerName :: String,
cSequencerWordSize :: Int,
cSequencerAddressSize :: Int,
cSequencerInputs :: Int,
cSequencerInputsSize :: Int,
cSequencerOutputs :: Int,
cSequencerOutputsSize :: Int,
cSequencerInstructionSize :: Int,
cSequencerCommandSize :: Int,
cSequencerComponentCommandSize :: Int,
cSequencerInstructions :: [(String,CInst)]}
deriving (Show)
data CComponent
= CComponent {
cComponentName :: String,
cComponentCommandSize :: Int,
cComponentArgumentSize :: Int,
cComponentOutputs :: [String],
cComponentInstructions :: [(String,CInst)]}
deriving (Show)
data CInst
= CInst {
cInstName :: String,
cInstOpCode :: Int,
cInstArguments :: [(String,CArgType)],
cInstFormat :: [CFormatAtom]}
deriving (Show)
data CArgType
= CImmediate
| CLabel
| CComponentCommand
| CComponentInput
deriving (Show)
data CFormatAtom
= CLiteral {
cLiteralValue :: String}
| CReference {
cReferenceValue :: String}
deriving (Show)
data SInst
= SInst {
sInstLabel :: String,
sInstAddress :: Int,
sInstInstName :: String,
sInstOperands :: [SArgType],
sInstLine :: String,
sInstLineNumber :: Int}
deriving (Show)
data SArgType
= SImmediate {
sArgImmediateValue :: String}
| SLabel {
sArgLabelValue :: String}
| SComponentItemPair {
sArgComponentCommandComponent :: String,
sArgComponentCommandItem :: String}
deriving (Show)
| horia141/bachelor-thesis | dev/SeqAsm/Core.hs | mit | 2,120 | 0 | 10 | 631 | 491 | 316 | 175 | 76 | 0 |
module FantasyRace where
import Data.Char
import StringHelpers
import System.Random
import Probability()
data RacialModifier = Dark | Deep | Brutal | Weird | High | Sea | Sky | Star | Cold | Fire | Construct | Insectile | Chaos | Corrupted | Augmented | Titanic | Crystal | Elder | Exiled | Mountain | Swamp | Ice | Dream | Desert | Magma | Urban | Arctic | Dusk | Savage | Barbarian | Neo | Arena | Mystic
deriving (Eq, Show, Read, Enum, Bounded)
data Species = Halfling | Elf | Human | Dwarf
-- | Orc | Gnome | Fae | Drakeling | Aberration | Giant | Outsider | Minotaur | Centaur | Imp | Dryad | Goblin | Sylph | Sprite | Pixie | Lycanthrope | Changeling | Vampire | Zombie | Golem | Satyr | Naiad | Cyclops
deriving (Eq, Show, Read, Enum, Bounded)
data Race = Race { species :: Species, modifier :: RacialModifier }
deriving (Eq, Show, Read)
humanizedRace :: Race -> String
humanizedRace race = toLower (show (modifier race) ++ " " ++ show (species race))
genRace :: IO Race
genRace = do
racialModifier <- randomIO :: IO RacialModifier
sp <- randomIO :: IO Species
return Race { species = sp, modifier = racialModifier }
| jweissman/heroes | src/FantasyRace.hs | mit | 1,248 | 0 | 11 | 336 | 333 | 193 | 140 | 18 | 1 |
module Main where
import Input.Parsers
import Watcher.Parametros
import Watcher.Watch
import Watcher.FileWatcher
import Watcher.Arguments
import Help.Printer
main :: IO()
main = do
params <- fmap (parseParameters emptyParams) getArguments
case params of
Left msg -> putStrLn msg >>
putStrLn (printHelp (TwoColumns (35,100)) fileWatcher)
Right p -> watch (filters p) (directory p) (actions p) (delay p) | Miguel-Fontes/hs-file-watcher | app/main.hs | mit | 448 | 0 | 16 | 99 | 155 | 79 | 76 | 14 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module HsToCoq.ConvertHaskell.Literals (
convertInteger, convertFastString, convertFractional
) where
import Prelude hiding (Num)
import Control.Monad.IO.Class
import HsToCoq.Util.GHC.FastString
import HsToCoq.Coq.Gallina
import HsToCoq.Coq.Gallina.Util
import BasicTypes
import Data.Ratio (numerator, denominator)
convertInteger :: String -> Integer -> Either String Num
convertInteger what int | int >= 0 = Right $ fromInteger int
| otherwise = Left $ "negative " ++ what
convertFastString :: FastString -> Term
convertFastString = HsString . fsToText
convertFractional :: MonadIO f => FractionalLit -> f Term
convertFractional (FL _ _ fl_v) = do
let fr = Var "fromRational"
let qn = App2 (Var "Q.Qmake") (Num (fromInteger (numerator fl_v)))
(Num (fromInteger (denominator fl_v)))
pure $ App1 fr qn
| antalsz/hs-to-coq | src/lib/HsToCoq/ConvertHaskell/Literals.hs | mit | 915 | 0 | 16 | 189 | 270 | 143 | 127 | 21 | 1 |
module Antemodulum.Default (
module Export
) where
--------------------------------------------------------------------------------
import Data.Default as Export
| docmunch/antemodulum | src/Antemodulum/Default.hs | mit | 166 | 0 | 4 | 16 | 20 | 14 | 6 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.