code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
import Test.HUnit
import qualified Data.Map as M
import Control.Monad(liftM)
import Wy.Parser
import Wy.Types
import Wy.Interpr
main = runTestTT $ TestList [adjustTests, patternMatchTests]
--
-- Adjustment of parameters on function application for optional and var args
adjustTests = TestList [adjustOptTests, adjustSlurpyTests]
testAdjustEmpty = TestCase $ assertEqual
"Should be empty with no params" (Just []) $ adjust WyNull WyList [] [] 0
testAdjustFixed = TestCase $ assertEqual
"Should leave fixed params unchanged" (Just [WyInt 1, WyInt 2]) $ adjust WyNull WyList ["a", "b"] [WyInt 1, WyInt 2] 0
testAdjustFixedErr = TestCase $ assertEqual
"Should fail for insufficent fixed values" Nothing $ adjust WyNull WyList ["a", "b"] [WyInt 1] 0
testAdjustFailTooManyParams = TestCase $ assertEqual
"Should fail with too many parameters" Nothing $
adjust WyNull WyList ["a", "b"] [WyInt 0, WyInt 1, WyInt 2] (-1)
testAdjustFixedErr2 = TestCase $ assertEqual
"Should fail for too many fixed values" Nothing $ adjust WyNull WyList ["a", "b"] [WyInt 1, WyInt 2, WyInt 3] 1
testAdjustOneOpt = TestCase $ assertEqual
"Should attribute one optional" (Just [WyInt 1, WyInt 2]) $ adjust WyNull WyList ["a", "b?"] [WyInt 1, WyInt 2] 1
testAdjustOneOptMissing = TestCase $ assertEqual
"Should nullify one missing optional" (Just [WyInt 1, WyNull]) $ adjust WyNull WyList ["a", "b?"] [WyInt 1] 0
testAdjustTwoOpt = TestCase $ assertEqual
"Should attribute two optionals" (Just [WyInt 0, WyInt 1, WyInt 2]) $
adjust WyNull WyList ["a", "b?", "c?"] [WyInt 0, WyInt 1, WyInt 2] 2
testAdjustTwoOptOneMiss = TestCase $ assertEqual
"Should attribute one optional, nullify one missing" (Just [WyInt 0, WyInt 1, WyNull]) $
adjust WyNull WyList ["a", "b?", "c?"] [WyInt 0, WyInt 1] 1
testAdjustTwoOptTwoMiss = TestCase $ assertEqual
"Should nullify two missing optionals" (Just [WyInt 0, WyNull, WyNull]) $
adjust WyNull WyList ["a", "b?", "c?"] [WyInt 0] 0
testAdjustInter = TestCase $ assertEqual
"Should nullify an intermediate optional" (Just [WyInt 0, WyNull, WyInt 1]) $
adjust WyNull WyList ["a", "b?", "c"] [WyInt 0, WyInt 1] 0
testAdjustTwoInterEnd = TestCase $ assertEqual
"Should nullify two intermediate optionals" (Just [WyInt 0, WyNull, WyInt 1]) $
adjust WyNull WyList ["a", "b?", "c?", "d"] [WyInt 0, WyInt 1] 0
testAdjustInterEndMissing = TestCase $ assertEqual
"Should nullify intermediate and ending optionals" (Just [WyInt 0, WyInt 1, WyInt 2, WyNull]) $
adjust WyNull WyList ["a", "b?", "c", "d?"] [WyInt 0, WyInt 1, WyInt 2] 1
adjustOptTests = TestList [testAdjustEmpty, testAdjustFixed, testAdjustFixedErr, testAdjustOneOpt,
testAdjustOneOptMissing, testAdjustTwoOpt, testAdjustTwoOptOneMiss, testAdjustTwoOptTwoMiss,
testAdjustInter, testAdjustFailTooManyParams]
testAdjustEmptySlurpy = TestCase $ assertEqual
"Should nullify missing slurpy" (Just [WyInt 0, WyNull]) $
adjust WyNull WyList ["a", "b~"] [WyInt 0] 0
testAdjustEndingSlurpy = TestCase $ assertEqual
"Should fill ending slurpy" (Just [WyInt 0, WyList [WyInt 1, WyInt 2]]) $
adjust WyNull WyList ["a", "b~"] [WyInt 0, WyInt 1, WyInt 2] 2
testAdjustMiddleSlurpy = TestCase $ assertEqual
"Should fill middle slurpy" (Just [WyInt 0, WyList [WyInt 1, WyInt 2], WyInt 3]) $
adjust WyNull WyList ["a", "b~", "c"] [WyInt 0, WyInt 1, WyInt 2, WyInt 3] 2
testAdjustEmptyMiddleSlurpy = TestCase $ assertEqual
"Should insert empty middle slurpy" (Just [WyInt 0, WyList [], WyInt 3]) $
adjust WyNull WyList ["a", "b~", "c"] [WyInt 0, WyInt 3] 0
testAdjustFailFixedMissSlurpy = TestCase $ assertEqual
"Should fail with slurpy but missing fixed params" (Just [WyInt 0]) $
adjust WyNull WyList ["a", "b~", "c"] [WyInt 0] (-1)
adjustSlurpyTests = TestList [testAdjustEmptySlurpy, testAdjustEndingSlurpy, testAdjustMiddleSlurpy,
testAdjustEmptyMiddleSlurpy, testAdjustFailFixedMissSlurpy]
--
-- Pattern matching
patternMatchTests = TestList [testEmptyFn, testParamFn, testTwoVarExpr, testMissingOptionalPatt]
testEmptyFn = TestCase $ assertEqual
"Should match empty application" (Just M.empty) $
patternMatch (WyApplic (WyId "foo" NoPos) [] NoPos) (WyApplic (WyId "foo" NoPos) [] NoPos) (Just M.empty)
testParamFn = TestCase $ assertEqual
"Should match parametered application" (Just [("a", WyString "bar"), ("b", WyInt 3)]) $ liftM M.toList $
patternMatch (WyApplic (WyId "foo" NoPos) [WyId "`a" NoPos, WyId "`b" NoPos] NoPos)
(WyApplic (WyId "foo" NoPos) [WyString "bar", WyInt 3] NoPos) (Just M.empty)
testTwoVarExpr = TestCase $ assertEqual
"Should match expression with two variables" (Just [("a", WyInt 2), ("b", WyInt 3)]) $ liftM M.toList $
patternMatch (WyStmt [(WyId "`a" NoPos) , (WyId "+" NoPos), (WyId "`b" NoPos)])
(WyStmt [(WyInt 2) , (WyId "+" NoPos), (WyInt 3)]) (Just M.empty)
testMissingOptionalPatt = TestCase $ assertEqual
"Should match application with missing optional" (Just [("a", WyString "bar"), ("b", (WyId "null" NoPos))]) $ liftM M.toList $
patternMatch (WyApplic (WyId "foo" NoPos) [WyId "`a" NoPos, WyId "`b?" NoPos] NoPos)
(WyApplic (WyId "foo" NoPos) [WyString "bar"] NoPos) (Just M.empty)
|
matthieu/witty
|
haskell/test/hunit.hs
|
apache-2.0
| 5,199
| 0
| 14
| 843
| 1,851
| 968
| 883
| 77
| 1
|
module ESE20181213 where
import Control.Monad
type Log = [String]
newtype Logger a = Logger { run :: (a, Log)}
instance (Show a) => Show (Logger a) where
show (Logger a) = show a
instance (Eq a) => Eq (Logger a) where
Logger (x,y) /= Logger (a,b) = (x /= a) || (y /= b)
logmapp :: (a -> b) -> Logger a -> Logger b
logmapp f log =
let (a,l) = run log
b = f a
in Logger (b, l)
instance Functor Logger where
fmap = logmapp
logbind :: Logger (a -> b) -> Logger a -> Logger b
logbind lf lv =
let (f, llf) = run lf
(b, llf') =
instance Applicative Logger where
pure a = Logger (a, [])
(<*>) =. logbind
|
riccardotommasini/plp16
|
haskell/actual/ESE20181213.hs
|
apache-2.0
| 701
| 13
| 11
| 232
| 350
| 181
| 169
| -1
| -1
|
-- Copyright 2012-2014 Samplecount S.L.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE TemplateHaskell #-}
{-|
Description: Build flags record for building @C@ language projects
The `BuildFlags` record is an abstraction for various toolchain flags for
building executables and libraries from source files in a @C@-based language.
It's intended to be toolchain-independent, but currently there's a
bias towards binutils\/gcc/clang toolchains.
-}
module Development.Shake.Language.C.BuildFlags (
-- * Source Language
Language(..)
-- * Build flags
, BuildFlags
-- Poor man's documentation for TH generated functions.
, systemIncludes -- | System include directories, referenced by @#include \<...\>@ in code and usually passed to the compiler with the @-isystem@ flag.
, userIncludes -- | User include directories, referenced by @#include "..."@ in code and usually passed to the compiler with the @-I@ flag.
, defines -- | Preprocessor defines, a list of pairs of names with or without a value.
, preprocessorFlags -- | Other preprocessor flags.
, compilerFlags -- | Compiler flags, either generic ones or for a specific source 'Language'.
, libraryPath -- | Linker search path for libraries.
, libraries -- | List of libraries to link against. Note that you should use the library name without the @lib@ prefix and without extension.
, linkerFlags -- | Flags passed to the linker.
, localLibraries -- | Locally built static libraries to be linked against. See also the corresponding section in the <https://github.com/samplecount/shake-language-c/blob/master/docs/Manual.md#locally-built-libraries manual>.
, archiverFlags -- | Flags passed to the object archiver.
-- ** Utilities for toolchain writers
, defineFlags
, compilerFlagsFor
-- ** Working with config files
, fromConfig
-- * Utilities
, (>>>=)
, append
, prepend
) where
import Control.Category ((>>>))
import Control.Monad
import Data.Char (isSpace)
import Data.Default.Class (Default(..))
import Data.List
import Data.List.Split
import Data.Maybe
import Data.Semigroup
import Development.Shake.Language.C.Language (Language(..))
import Development.Shake.Language.C.Label
import Development.Shake.Language.C.Util
{-| Record type for abstracting various toolchain command line flags.
`BuildFlags` is an instance of `Default`, you can create a default record with
`def`. `BuildFlags` is also an instance `Monoid`, you can create an empty record with
`mempty` and append flags with `mappend`. `def` and `mempty` are synonyms:
>>> (def :: BuildFlags) == (mempty :: BuildFlags)
True
Record accessors are `Data.Label.Mono.Lens`es from the
<https://hackage.haskell.org/package/fclabels fclabels> package, which
makes accessing and modifying record fields a bit more convenient.
@fclabels@ was chosen over <https://hackage.haskell.org/package/lens lens>
because it has far fewer dependencies, which is convenient when installing
the Shake build system in a per-project cabal sandbox. We might switch to
@lens@ when it gets included in the Haskell platform.
There are two convenience functions for working with `BuildFlags` record fields
containing lists of flags, `append` and `prepend`. Since most combinators in
this library expect a function @BuildFlags -> BuildFlags@, the following is a
common idiom:
@
buildFlags . append `systemIncludes` ["path"]
@
Note that when modifying the same record field, order of function composition
matters and you might want to use the arrow combinator '>>>' for appending in
source statement order:
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] . append systemIncludes ["path2"]
$ mempty
:}
["path2","path1"]
>>> :{
get systemIncludes
$ append systemIncludes ["path1"] >>> append systemIncludes ["path2"]
$ mempty
:}
["path1","path2"]
See "Development.Shake.Language.C.Rules" for how to use 'BuildFlags' in build
product rules.
-}
data BuildFlags = BuildFlags {
_systemIncludes :: [FilePath]
, _userIncludes :: [FilePath]
, _defines :: [(String, Maybe String)]
, _preprocessorFlags :: [String]
, _compilerFlags :: [(Maybe Language, [String])]
, _libraryPath :: [FilePath]
, _libraries :: [String]
, _linkerFlags :: [String]
-- This is needed for linking against local libraries built by shake (the linker `needs' its inputs).
, _localLibraries :: [FilePath]
, _archiverFlags :: [String]
} deriving (Eq, Show)
mkLabel ''BuildFlags
defaultBuildFlags :: BuildFlags
defaultBuildFlags =
BuildFlags {
_systemIncludes = []
, _userIncludes = []
, _defines = []
, _preprocessorFlags = []
, _compilerFlags = []
, _libraryPath = []
, _libraries = []
, _linkerFlags = []
, _localLibraries = []
, _archiverFlags = []
}
instance Default BuildFlags where
def = defaultBuildFlags
instance Semigroup BuildFlags where
a <> b =
append systemIncludes (get systemIncludes a)
. append userIncludes (get userIncludes a)
. append defines (get defines a)
. append preprocessorFlags (get preprocessorFlags a)
. append compilerFlags (get compilerFlags a)
. append libraryPath (get libraryPath a)
. append libraries (get libraries a)
. append linkerFlags (get linkerFlags a)
. append localLibraries (get localLibraries a)
. append archiverFlags (get archiverFlags a)
$ b
instance Monoid BuildFlags where
mempty = defaultBuildFlags
mappend = (<>)
-- | Construct preprocessor flags from the 'defines' field of 'BuildFlags'.
defineFlags :: BuildFlags -> [String]
defineFlags = concatMapFlag "-D"
. map (\(a, b) -> maybe a (\b' -> a++"="++b') b)
. get defines
-- | Return a list of compiler flags for a specific source language.
compilerFlagsFor :: Maybe Language -> BuildFlags -> [String]
compilerFlagsFor lang = concat
. maybe (map snd . filter (isNothing.fst))
(mapMaybe . f) lang
. get compilerFlags
where f _ (Nothing, x) = Just x
f l (Just l', x) | l == l' = Just x
| otherwise = Nothing
-- | Construct a 'BuildFlags' modifier function from a config file.
--
-- See also "Development.Shake.Language.C.Config".
fromConfig :: (Functor m, Monad m) => (String -> m (Maybe String)) -> m (BuildFlags -> BuildFlags)
fromConfig getConfig = do
let parseConfig parser = fmap (maybe [] parser) . getConfig . ("BuildFlags."++)
config_systemIncludes <- parseConfig paths "systemIncludes"
config_userIncludes <- parseConfig paths "userIncludes"
config_defines <- parseConfig defines' "defines"
config_preprocessorFlags <- parseConfig flags "preprocessorFlags"
config_compilerFlags <- parseConfig ((:[]) . ((,)Nothing) . flags) "compilerFlags"
config_compilerFlags_c <- parseConfig ((:[]) . ((,)(Just C)) . flags) "compilerFlags.c"
config_compilerFlags_cxx <- parseConfig ((:[]) . ((,)(Just Cpp)) . flags) "compilerFlags.cxx"
config_libraryPath <- parseConfig paths "libraryPath"
config_libraries <- parseConfig flags "libraries"
config_linkerFlags <- parseConfig flags "linkerFlags"
config_localLibraries <- parseConfig paths "localLibraries"
config_archiverFlags <- parseConfig flags "archiverFlags"
return $ append systemIncludes config_systemIncludes
. append userIncludes config_userIncludes
. append defines config_defines
. append preprocessorFlags config_preprocessorFlags
. append compilerFlags (config_compilerFlags ++ config_compilerFlags_c ++ config_compilerFlags_cxx)
. append libraryPath config_libraryPath
. append libraries config_libraries
. append linkerFlags config_linkerFlags
. append localLibraries config_localLibraries
. append archiverFlags config_archiverFlags
where
flags = words' . dropWhile isSpace
paths = words' . dropWhile isSpace
define [] = error "Empty preprocessor definition"
define [k] = (k, Nothing)
define [k,v] = (k, Just v)
define (k:vs) = (k, Just (intercalate "=" vs))
defines' = map (define . splitOn "=") . flags
-- | Utility function for composing functions in a monad.
(>>>=) :: Monad m => m (a -> b) -> m (b -> c) -> m (a -> c)
(>>>=) = liftM2 (>>>)
|
samplecount/shake-language-c
|
src/Development/Shake/Language/C/BuildFlags.hs
|
apache-2.0
| 9,083
| 0
| 18
| 1,964
| 1,492
| 817
| 675
| 121
| 4
|
-- |
-- Module : Graphics.Rendering.Hieroglyph.Cairo
-- Copyright : (c) Renaissance Computing Institute 2009
-- License : BSD3
--
--
-- [@Author@] Jeff Heard
--
-- [@Copyright@] © 2008 Renaissance Computing Institute
--
-- [@License@] A LICENSE file should be included as part of this distribution
--
-- [@Version@] 0.5
--
module Graphics.Rendering.Hieroglyph.Cairo where
import qualified Graphics.Rendering.Hieroglyph.Cache as Cache
import qualified Data.Set as Set
import qualified Graphics.UI.Gtk.Cairo as Gtk
import Graphics.UI.Gtk.Pango.Context
import Graphics.UI.Gtk.Pango.Layout
import Data.Map (Map)
import qualified Data.Map as M
import System.Mem.Weak
import Control.Concurrent
import Control.Monad.Trans (liftIO)
import Graphics.Rendering.Hieroglyph.Primitives
import Graphics.Rendering.Hieroglyph.Visual
import Graphics.UI.Gtk.Gdk.Pixbuf
import qualified Graphics.UI.Gtk.Cairo as Cairo
import qualified Graphics.Rendering.Cairo as Cairo
import Control.Monad
import Control.Monad.IfElse
import Data.Foldable (foldlM)
import Data.List (sort)
import Data.Colour
import Data.Colour.SRGB
import Data.Colour.Names (black)
import qualified Text.PrettyPrint as Pretty
type ImageCache = MVar (Cache.Cache Primitive Pixbuf)
toCairoAntialias AntialiasDefault = Cairo.AntialiasDefault
toCairoAntialias AntialiasNone = Cairo.AntialiasNone
toCairoAntialias AntialiasGray = Cairo.AntialiasGray
toCairoAntialias AntialiasSubpixel = Cairo.AntialiasSubpixel
toCairoFillRule FillRuleWinding = Cairo.FillRuleWinding
toCairoFillRule FillRuleEvenOdd = Cairo.FillRuleEvenOdd
toCairoLineCap LineCapButt = Cairo.LineCapButt
toCairoLineCap LineCapRound = Cairo.LineCapRound
toCairoLineCap LineCapSquare = Cairo.LineCapSquare
toCairoLineJoin LineJoinMiter = Cairo.LineJoinMiter
toCairoLineJoin LineJoinRound = Cairo.LineJoinRound
toCairoLineJoin LineJoinBevel = Cairo.LineJoinBevel
toCairoOperator OperatorClear = Cairo.OperatorClear
toCairoOperator OperatorSource = Cairo.OperatorSource
toCairoOperator OperatorOver = Cairo.OperatorOver
toCairoOperator OperatorIn = Cairo.OperatorIn
toCairoOperator OperatorOut = Cairo.OperatorOut
toCairoOperator OperatorAtop = Cairo.OperatorAtop
toCairoOperator OperatorDest = Cairo.OperatorDest
toCairoOperator OperatorXor = Cairo.OperatorXor
toCairoOperator OperatorAdd = Cairo.OperatorAdd
toCairoOperator OperatorSaturate = Cairo.OperatorSaturate
colourToTuple :: AlphaColour Double -> (Double,Double,Double,Double)
colourToTuple c = (r,g,b,alpha)
where alpha = alphaChannel c
c' = (1/alpha) `darken` (c `Data.Colour.over` black)
RGB r g b = toSRGB c'
fillStrokeAndClip state action = do
let (fr,fg,fb,fa) = colourToTuple . afillRGBA $ state
(sr,sg,sb,sa) = colourToTuple . astrokeRGBA $ state
when (afilled state) $ Cairo.setSourceRGBA fr fg fb fa >> action >> Cairo.fill
when (aoutlined state) $ Cairo.setSourceRGBA sr sg sb sa >> action >> Cairo.stroke
when (aclipped state) $ Cairo.clip
renderCurveSegs (Line (Point x0 y0)) = Cairo.lineTo x0 y0
renderCurveSegs (EndPoint (Point x0 y0)) = Cairo.moveTo x0 y0
renderCurveSegs (Spline (Point x0 y0) (Point x1 y1) (Point x2 y2)) = Cairo.curveTo x0 y0 x1 y1 x2 y2
-- | @renderPrimitive state prim@ draws a single primitive.
renderPrimitive :: PangoContext -> ImageCache -> Attributes -> Primitive -> Cairo.Render Attributes
renderPrimitive _ _ s0 (Arc (Point cx cy) radius angle0 angle1 isnegative state _) = do
applyAttributeDelta s0 state
fillStrokeAndClip state $
if isnegative then Cairo.arcNegative cx cy radius angle0 angle1 else Cairo.arc cx cy radius angle0 angle1
return state
renderPrimitive _ _ s0 (Dots ats attrs sig) = do
applyAttributeDelta s0 attrs
fillStrokeAndClip attrs $ do
forM_ ats $ \(Point ox oy) -> do
Cairo.moveTo ox oy
Cairo.arc ox oy (alinewidth attrs) 0 (2*pi)
return attrs
renderPrimitive _ _ s0 (Path (Point ox oy) segs isclosed state _) = do
applyAttributeDelta s0 state
fillStrokeAndClip state $ do
Cairo.moveTo ox oy
forM_ segs $ renderCurveSegs
when isclosed (Cairo.lineTo ox oy)
return state
renderPrimitive _ images s0 i@(Image filename (Left (Point ox oy)) _ state _) = do
applyAttributeDelta s0 state
pbuf <- loadImage images i
w <- liftIO $ pixbufGetWidth pbuf
h <- liftIO $ pixbufGetHeight pbuf
Cairo.save
Cairo.setSourcePixbuf pbuf ox oy
Cairo.rectangle ox oy (fromIntegral w) (fromIntegral h)
Cairo.fill
Cairo.restore
return state
renderPrimitive _ images s0 i@(Image filename (Right (Rect ox oy w h)) _ state _) = do
applyAttributeDelta s0 state
pbuf <- loadImage images i
Cairo.save
Cairo.setSourcePixbuf pbuf ox oy
Cairo.rectangle ox oy w h
Cairo.fill
Cairo.restore
return state
renderPrimitive _ _ s0 (Hidden _ _) = return s0
renderPrimitive _ _ s0 (Rectangle (Point ox oy) w h state _) = do
applyAttributeDelta s0 state
fillStrokeAndClip state $ Cairo.rectangle ox oy w h
return state
renderPrimitive context _ s0 txt@(Text _ (Point ox oy) _ _ _ _ _ _ _ _ _) = do
layout <- liftIO $ layoutEmpty context >>= \layout -> do
layoutSetMarkup layout . Pretty.render . str $ txt
layoutSetAlignment layout . align $ txt
layoutSetJustify layout . justify $ txt
layoutSetWidth layout . wrapwidth $ txt
layoutSetWrap layout . wrapmode $ txt
layoutSetIndent layout . indent $ txt
return layout
applyAttributeDelta s0 (attribs txt)
fillStrokeAndClip (attribs txt) $ do
Cairo.moveTo ox oy
Cairo.showLayout layout
return (attribs txt)
renderPrimitive context images s0 (Union prims state _) = do
let unfoc prim = prim{ attribs = (attribs prim){afilled=False, aoutlined=False, aclipped=False } }
applyAttributeDelta s0 state
fillStrokeAndClip state $ forM_ prims (renderPrimitive context images state . unfoc)
return state
render context images d = loadStateIntoCairo attrs0
>> (foldlM (renderPrimitive context images) attrs0 . sort $ vis)
>> return ()
where vis = sort $ primitives d
attrs0 = attribs . head $ vis
applyAttributeDelta a b = do
let different f = ((f %=> (/=)) a b)
whendifferent f = when (different f)
whendifferent afillrule . Cairo.setFillRule . toCairoFillRule . afillrule $ b
whendifferent adash . maybe (return ()) (uncurry Cairo.setDash) . adash $ b
whendifferent aantialias . Cairo.setAntialias . toCairoAntialias . aantialias $ b
whendifferent alinewidth . Cairo.setLineWidth . alinewidth $ b
whendifferent alinecap . Cairo.setLineCap . toCairoLineCap . alinecap $ b
whendifferent alinejoin . Cairo.setLineJoin . toCairoLineJoin . alinejoin $ b
whendifferent amiterlimit . Cairo.setMiterLimit . amiterlimit $ b
whendifferent atolerance . Cairo.setTolerance . atolerance $ b
whendifferent aoperator . Cairo.setOperator . toCairoOperator . aoperator $ b
when (different ascalex || different ascaley || different arotation || different atranslatex || different atranslatey) $ do
Cairo.translate (atranslatex b) (atranslatey b)
Cairo.scale (ascalex b) (ascaley b)
Cairo.rotate (arotation b)
return b
-- | Load the Cairo state with a 'RenderState' Drawing.
loadStateIntoCairo :: Attributes -> Cairo.Render ()
loadStateIntoCairo s = do
Cairo.setFillRule . toCairoFillRule . afillrule $ s
awhen (adash s) $ \(a,b) -> Cairo.setDash a b
Cairo.setAntialias . toCairoAntialias . aantialias $ s
Cairo.setLineJoin . toCairoLineJoin . alinejoin $ s
Cairo.setLineWidth . alinewidth $ s
Cairo.setMiterLimit . amiterlimit $ s
Cairo.setTolerance . atolerance $ s
Cairo.setOperator . toCairoOperator . aoperator $ s
Cairo.translate (atranslatex s) (atranslatey s)
Cairo.scale (ascalex s) (ascaley s)
Cairo.rotate (arotation s)
-- | @renderFrameToSurface surface frame@ renders a frame to a particular surface
renderToSurfaceWithImageCache :: Visual t => PangoContext -> ImageCache -> Cairo.Surface -> t -> IO ()
renderToSurfaceWithImageCache context images surf frame = Cairo.renderWith surf (render context images frame)
renderToSurface :: Visual t => PangoContext -> Cairo.Surface -> t -> IO ()
renderToSurface c s o = do { i <- newMVar (Cache.empty 1024 33) ;renderToSurfaceWithImageCache c i s o }
-- | @renderframeToPNGWithImageCache filename xres yres frame@ renders a frame to an image file
renderToPNGWithImageCache :: Visual t => PangoContext -> ImageCache -> FilePath -> Int -> Int -> t -> IO ()
renderToPNGWithImageCache c images filename xres yres frame = Cairo.withImageSurface Cairo.FormatARGB32 xres yres $ \s -> renderToSurfaceWithImageCache c images s frame >> Cairo.surfaceWriteToPNG s filename
renderToPNG f w h o = do { c <- Gtk.cairoCreateContext Nothing ; i <- newMVar (Cache.empty 1024 33) ; renderToPNGWithImageCache c i f w h o }
-- | @renderToPDFWithImageCache filename width height frame@ renders a frame to a PDF file. width and height are in points.
renderToPDFWithImageCache :: Visual t => PangoContext -> ImageCache -> FilePath -> Double -> Double -> t -> IO ()
renderToPDFWithImageCache c images filename width height frame = Cairo.withPDFSurface filename width height $ \s -> renderToSurfaceWithImageCache c images s frame
renderToPDF f w h o = do { c <- Gtk.cairoCreateContext Nothing ; i <- newMVar (Cache.empty 1024 33) ; renderToPDFWithImageCache c i f w h o }
-- | @renderToPostscriptWithImageCache filename width height frame@ renders a frame to a Postscript file. width and height are in points.
renderToPostscriptWithImageCache :: Visual t => PangoContext -> ImageCache -> FilePath -> Double -> Double -> t -> IO ()
renderToPostscriptWithImageCache c images filename width height frame = Cairo.withPSSurface filename width height $ \s -> renderToSurfaceWithImageCache c images s frame
renderToPostscript f w h o = do { c <- Gtk.cairoCreateContext Nothing ; i <- newMVar (Cache.empty 1024 33) ; renderToPostscriptWithImageCache c i f w h o }
-- | @renderToSVGWithImageCache filename width height frame@ renders a frame to a SVG file. width and height are in points.
renderToSVGWithImageCache :: Visual t => PangoContext -> ImageCache -> FilePath -> Double -> Double -> t -> IO ()
renderToSVGWithImageCache c images filename width height frame = Cairo.withSVGSurface filename width height $ \s -> renderToSurfaceWithImageCache c images s frame
renderToSVG f w h o = do { c <- Gtk.cairoCreateContext Nothing ; i <- newMVar (Cache.empty 1024 33) ; renderToSVGWithImageCache c i f w h o }
-- | @loadImage dictRef image@ pulls an image out of the cache's hat.
loadImage :: ImageCache -> Primitive -> Cairo.Render (Pixbuf)
loadImage dictRef im@(Image filename (Right (Rect x y w h)) aspect _ _) = do
liftIO $ modifyMVar dictRef $ \dict ->
if im `Cache.member` dict
then do let (cache',value) = Cache.get im dict
pbuf <- case value of
Just pb -> return pb
Nothing -> pixbufNewFromFileAtScale filename (round w) (round h) aspect
return (cache',pbuf)
else do pbuf <- pixbufNewFromFileAtScale filename (round w) (round h) aspect
return ((Cache.put im pbuf dict), pbuf)
loadImage dictRef im@(Image filename (Left (Point x y)) _ _ _) = do
liftIO $ modifyMVar dictRef $ \dict ->
if im `Cache.member` dict
then do let (cache',value) = Cache.get im dict
pbuf <- case value of
Just pb -> return pb
Nothing -> pixbufNewFromFile filename
return (dict,pbuf)
else do pbuf <- pixbufNewFromFile filename
return ((Cache.put im pbuf dict), pbuf)
|
JeffHeard/Hieroglyph
|
Graphics/Rendering/Hieroglyph/Cairo.hs
|
bsd-2-clause
| 12,020
| 0
| 18
| 2,496
| 3,732
| 1,840
| 1,892
| 197
| 5
|
{-# LANGUAGE BangPatterns, CPP, MagicHash, RankNTypes, UnboxedTuples #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-- Module: Data.Text.Lazy.Builder.Int
-- Copyright: (c) 2013 Bryan O'Sullivan
-- (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <bos@serpentine.com>
-- Stability: experimental
-- Portability: portable
--
-- Efficiently write an integral value to a 'Builder'.
module Data.Text.Lazy.Builder.Int
(
decimal
, hexadecimal
) where
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Monoid (mempty)
import qualified Data.ByteString.Unsafe as B
import Data.Text.Internal.Builder.Functions ((<>), i2d)
import Data.Text.Internal.Builder
import Data.Text.Internal.Builder.Int.Digits (digits)
import Data.Text.Array
import Data.Word (Word, Word8, Word16, Word32, Word64)
import GHC.Base (quotInt, remInt)
import GHC.Num (quotRemInteger)
import GHC.Types (Int(..))
import Control.Monad.ST
#ifdef __GLASGOW_HASKELL__
# if defined(INTEGER_GMP)
import GHC.Integer.GMP.Internals
# elif defined(INTEGER_SIMPLE)
import GHC.Integer
# else
# error "You need to use either GMP or integer-simple."
# endif
#endif
#if defined(INTEGER_GMP) || defined(INTEGER_SIMPLE)
# define PAIR(a,b) (# a,b #)
#else
# define PAIR(a,b) (a,b)
#endif
decimal :: Integral a => a -> Builder
{-# RULES "decimal/Int8" decimal = boundedDecimal :: Int8 -> Builder #-}
{-# RULES "decimal/Int" decimal = boundedDecimal :: Int -> Builder #-}
{-# RULES "decimal/Int16" decimal = boundedDecimal :: Int16 -> Builder #-}
{-# RULES "decimal/Int32" decimal = boundedDecimal :: Int32 -> Builder #-}
{-# RULES "decimal/Int64" decimal = boundedDecimal :: Int64 -> Builder #-}
{-# RULES "decimal/Word" decimal = positive :: Word -> Builder #-}
{-# RULES "decimal/Word8" decimal = positive :: Word8 -> Builder #-}
{-# RULES "decimal/Word16" decimal = positive :: Word16 -> Builder #-}
{-# RULES "decimal/Word32" decimal = positive :: Word32 -> Builder #-}
{-# RULES "decimal/Word64" decimal = positive :: Word64 -> Builder #-}
{-# RULES "decimal/Integer" decimal = integer 10 :: Integer -> Builder #-}
decimal i = decimal' (<= -128) i
{-# NOINLINE decimal #-}
boundedDecimal :: (Integral a, Bounded a) => a -> Builder
{-# SPECIALIZE boundedDecimal :: Int -> Builder #-}
{-# SPECIALIZE boundedDecimal :: Int8 -> Builder #-}
{-# SPECIALIZE boundedDecimal :: Int16 -> Builder #-}
{-# SPECIALIZE boundedDecimal :: Int32 -> Builder #-}
{-# SPECIALIZE boundedDecimal :: Int64 -> Builder #-}
boundedDecimal i = decimal' (== minBound) i
decimal' :: (Integral a) => (a -> Bool) -> a -> Builder
{-# INLINE decimal' #-}
decimal' p i
| i < 0 = if p i
then let (q, r) = i `quotRem` 10
qq = -q
!n = countDigits qq
in writeN (n + 2) $ \marr off -> do
unsafeWrite marr off minus
posDecimal marr (off+1) n qq
unsafeWrite marr (off+n+1) (i2w (-r))
else let j = -i
!n = countDigits j
in writeN (n + 1) $ \marr off ->
unsafeWrite marr off minus >> posDecimal marr (off+1) n j
| otherwise = positive i
positive :: (Integral a) => a -> Builder
{-# SPECIALIZE positive :: Int -> Builder #-}
{-# SPECIALIZE positive :: Int8 -> Builder #-}
{-# SPECIALIZE positive :: Int16 -> Builder #-}
{-# SPECIALIZE positive :: Int32 -> Builder #-}
{-# SPECIALIZE positive :: Int64 -> Builder #-}
{-# SPECIALIZE positive :: Word -> Builder #-}
{-# SPECIALIZE positive :: Word8 -> Builder #-}
{-# SPECIALIZE positive :: Word16 -> Builder #-}
{-# SPECIALIZE positive :: Word32 -> Builder #-}
{-# SPECIALIZE positive :: Word64 -> Builder #-}
positive i
| i < 10 = writeN 1 $ \marr off -> unsafeWrite marr off (i2w i)
| otherwise = let !n = countDigits i
in writeN n $ \marr off -> posDecimal marr off n i
posDecimal :: (Integral a) =>
forall s. MArray s -> Int -> Int -> a -> ST s ()
{-# INLINE posDecimal #-}
posDecimal marr off0 ds v0 = go (off0 + ds - 1) v0
where go off v
| v >= 100 = do
let (q, r) = v `quotRem` 100
write2 off r
go (off - 2) q
| v < 10 = unsafeWrite marr off (i2w v)
| otherwise = write2 off v
write2 off i0 = do
let i = fromIntegral i0; j = i + i
unsafeWrite marr off $ get (j + 1)
unsafeWrite marr (off - 1) $ get j
get = fromIntegral . B.unsafeIndex digits
minus, zero :: Word16
{-# INLINE minus #-}
{-# INLINE zero #-}
minus = 45
zero = 48
i2w :: (Integral a) => a -> Word16
{-# INLINE i2w #-}
i2w v = zero + fromIntegral v
countDigits :: (Integral a) => a -> Int
{-# INLINE countDigits #-}
countDigits v0 = go 1 (fromIntegral v0 :: Word64)
where go !k v
| v < 10 = k
| v < 100 = k + 1
| v < 1000 = k + 2
| v < 1000000000000 =
k + if v < 100000000
then if v < 1000000
then if v < 10000
then 3
else 4 + fin v 100000
else 6 + fin v 10000000
else if v < 10000000000
then 8 + fin v 1000000000
else 10 + fin v 100000000000
| otherwise = go (k + 12) (v `quot` 1000000000000)
fin v n = if v >= n then 1 else 0
hexadecimal :: Integral a => a -> Builder
{-# SPECIALIZE hexadecimal :: Int -> Builder #-}
{-# SPECIALIZE hexadecimal :: Int8 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Int16 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Int32 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Int64 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Word -> Builder #-}
{-# SPECIALIZE hexadecimal :: Word8 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Word16 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Word32 -> Builder #-}
{-# SPECIALIZE hexadecimal :: Word64 -> Builder #-}
{-# RULES "hexadecimal/Integer"
hexadecimal = hexInteger :: Integer -> Builder #-}
hexadecimal i
| i < 0 = error hexErrMsg
| otherwise = go i
where
go n | n < 16 = hexDigit n
| otherwise = go (n `quot` 16) <> hexDigit (n `rem` 16)
{-# NOINLINE[0] hexadecimal #-}
hexInteger :: Integer -> Builder
hexInteger i
| i < 0 = error hexErrMsg
| otherwise = integer 16 i
hexErrMsg :: String
hexErrMsg = "Data.Text.Lazy.Builder.Int.hexadecimal: applied to negative number"
hexDigit :: Integral a => a -> Builder
hexDigit n
| n <= 9 = singleton $! i2d (fromIntegral n)
| otherwise = singleton $! toEnum (fromIntegral n + 87)
{-# INLINE hexDigit #-}
int :: Int -> Builder
int = decimal
{-# INLINE int #-}
data T = T !Integer !Int
integer :: Int -> Integer -> Builder
#ifdef INTEGER_GMP
integer 10 (S# i#) = decimal (I# i#)
integer 16 (S# i#) = hexadecimal (I# i#)
#else
integer 10 i = decimal i
integer 16 i = hexadecimal i
#endif
integer base i
| i < 0 = singleton '-' <> go (-i)
| otherwise = go i
where
go n | n < maxInt = int (fromInteger n)
| otherwise = putH (splitf (maxInt * maxInt) n)
splitf p n
| p > n = [n]
| otherwise = splith p (splitf (p*p) n)
splith p (n:ns) = case n `quotRemInteger` p of
PAIR(q,r) | q > 0 -> q : r : splitb p ns
| otherwise -> r : splitb p ns
splith _ _ = error "splith: the impossible happened."
splitb p (n:ns) = case n `quotRemInteger` p of
PAIR(q,r) -> q : r : splitb p ns
splitb _ _ = []
T maxInt10 maxDigits10 =
until ((>mi) . (*10) . fstT) (\(T n d) -> T (n*10) (d+1)) (T 10 1)
where mi = fromIntegral (maxBound :: Int)
T maxInt16 maxDigits16 =
until ((>mi) . (*16) . fstT) (\(T n d) -> T (n*16) (d+1)) (T 16 1)
where mi = fromIntegral (maxBound :: Int)
fstT (T a _) = a
maxInt | base == 10 = maxInt10
| otherwise = maxInt16
maxDigits | base == 10 = maxDigits10
| otherwise = maxDigits16
putH (n:ns) = case n `quotRemInteger` maxInt of
PAIR(x,y)
| q > 0 -> int q <> pblock r <> putB ns
| otherwise -> int r <> putB ns
where q = fromInteger x
r = fromInteger y
putH _ = error "putH: the impossible happened"
putB (n:ns) = case n `quotRemInteger` maxInt of
PAIR(x,y) -> pblock q <> pblock r <> putB ns
where q = fromInteger x
r = fromInteger y
putB _ = mempty
pblock = loop maxDigits
where
loop !d !n
| d == 1 = hexDigit n
| otherwise = loop (d-1) q <> hexDigit r
where q = n `quotInt` base
r = n `remInt` base
|
ekmett/text
|
Data/Text/Lazy/Builder/Int.hs
|
bsd-2-clause
| 9,034
| 0
| 17
| 2,764
| 2,461
| 1,286
| 1,175
| 197
| 6
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Pattern-matching bindings (HsBinds and MonoBinds)
Handles @HsBinds@; those at the top level require different handling,
in that the @Rec@/@NonRec@/etc structure is thrown away (whereas at
lower levels it is preserved with @let@/@letrec@s).
-}
{-# LANGUAGE CPP #-}
module Language.Haskell.Liquid.Desugar710.DsBinds ( dsTopLHsBinds, dsLHsBinds, decomposeRuleLhs, dsSpec,
dsHsWrapper, dsTcEvBinds, dsEvBinds
) where
-- #include "HsVersions.h"
import {-# SOURCE #-} Language.Haskell.Liquid.Desugar710.DsExpr( dsLExpr )
import {-# SOURCE #-} Language.Haskell.Liquid.Desugar710.Match( matchWrapper )
import Prelude hiding (error)
import DsMonad
import Language.Haskell.Liquid.Desugar710.DsGRHSs
import Language.Haskell.Liquid.Desugar710.DsUtils
import HsSyn -- lots of things
import CoreSyn -- lots of things
import Literal ( Literal(MachStr) )
import CoreSubst
import OccurAnal ( occurAnalyseExpr )
import MkCore
import CoreUtils
import CoreArity ( etaExpand )
import CoreUnfold
import CoreFVs
import UniqSupply
import Digraph
import Module
import PrelNames
import TysPrim ( mkProxyPrimTy )
import TyCon ( tyConDataCons_maybe
, tyConName, isPromotedTyCon, isPromotedDataCon )
import TcEvidence
import TcType
import Type
import Coercion hiding (substCo)
import TysWiredIn ( eqBoxDataCon, coercibleDataCon, mkListTy
, mkBoxedTupleTy, stringTy, tupleCon )
import Id
import MkId(proxyHashId)
import Class
import DataCon ( dataConWorkId, dataConTyCon )
import Name
import MkId ( seqId )
import IdInfo ( IdDetails(..) )
import Var
import VarSet
import Rules
import VarEnv
import Outputable
import SrcLoc
import Maybes
import OrdList
import Bag
import BasicTypes hiding ( TopLevel )
import DynFlags
import FastString
import ErrUtils( MsgDoc )
import ListSetOps( getNth )
import Util
import Control.Monad( when )
import MonadUtils
import Control.Monad(liftM)
import Fingerprint(Fingerprint(..), fingerprintString)
{-
************************************************************************
* *
\subsection[dsMonoBinds]{Desugaring a @MonoBinds@}
* *
************************************************************************
-}
dsTopLHsBinds :: LHsBinds Id -> DsM (OrdList (Id,CoreExpr))
dsTopLHsBinds binds = ds_lhs_binds binds
dsLHsBinds :: LHsBinds Id -> DsM [(Id,CoreExpr)]
dsLHsBinds binds = do { binds' <- ds_lhs_binds binds
; return (fromOL binds') }
------------------------
ds_lhs_binds :: LHsBinds Id -> DsM (OrdList (Id,CoreExpr))
ds_lhs_binds binds = do { ds_bs <- mapBagM dsLHsBind binds
; return (foldBag appOL id nilOL ds_bs) }
dsLHsBind :: LHsBind Id -> DsM (OrdList (Id,CoreExpr))
dsLHsBind (L loc bind) = putSrcSpanDs loc $ dsHsBind bind
dsHsBind :: HsBind Id -> DsM (OrdList (Id,CoreExpr))
dsHsBind (VarBind { var_id = var, var_rhs = expr, var_inline = inline_regardless })
= do { dflags <- getDynFlags
; core_expr <- dsLExpr expr
-- Dictionary bindings are always VarBinds,
-- so we only need do this here
; let var' | inline_regardless = var `setIdUnfolding` mkCompulsoryUnfolding core_expr
| otherwise = var
; return (unitOL (makeCorePair dflags var' False 0 core_expr)) }
dsHsBind (FunBind { fun_id = L _ fun, fun_matches = matches
, fun_co_fn = co_fn, fun_tick = tick
, fun_infix = inf })
= do { dflags <- getDynFlags
; (args, body) <- matchWrapper (FunRhs (idName fun) inf) matches
; let body' = mkOptTickBox tick body
; rhs <- dsHsWrapper co_fn (mkLams args body')
; {- pprTrace "dsHsBind" (ppr fun <+> ppr (idInlinePragma fun)) $ -}
return (unitOL (makeCorePair dflags fun False 0 rhs)) }
dsHsBind (PatBind { pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty
, pat_ticks = (rhs_tick, var_ticks) })
= do { body_expr <- dsGuarded grhss ty
; let body' = mkOptTickBox rhs_tick body_expr
; sel_binds <- mkSelectorBinds var_ticks pat body'
-- We silently ignore inline pragmas; no makeCorePair
-- Not so cool, but really doesn't matter
; return (toOL sel_binds) }
-- A common case: one exported variable
-- Non-recursive bindings come through this way
-- So do self-recursive bindings, and recursive bindings
-- that have been chopped up with type signatures
dsHsBind (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dicts
, abs_exports = [export]
, abs_ev_binds = ev_binds, abs_binds = binds })
| ABE { abe_wrap = wrap, abe_poly = global
, abe_mono = local, abe_prags = prags } <- export
= do { dflags <- getDynFlags
; bind_prs <- ds_lhs_binds binds
; let core_bind = Rec (fromOL bind_prs)
; ds_binds <- dsTcEvBinds ev_binds
; rhs <- dsHsWrapper wrap $ -- Usually the identity
mkLams tyvars $ mkLams dicts $
mkCoreLets ds_binds $
Let core_bind $
Var local
; (spec_binds, rules) <- dsSpecs rhs prags
; let global' = addIdSpecialisations global rules
main_bind = makeCorePair dflags global' (isDefaultMethod prags)
(dictArity dicts) rhs
; return (main_bind `consOL` spec_binds) }
dsHsBind (AbsBinds { abs_tvs = tyvars, abs_ev_vars = dicts
, abs_exports = exports, abs_ev_binds = ev_binds
, abs_binds = binds })
-- See Note [Desugaring AbsBinds]
= do { dflags <- getDynFlags
; bind_prs <- ds_lhs_binds binds
; let core_bind = Rec [ makeCorePair dflags (add_inline lcl_id) False 0 rhs
| (lcl_id, rhs) <- fromOL bind_prs ]
-- Monomorphic recursion possible, hence Rec
locals = map abe_mono exports
tup_expr = mkBigCoreVarTup locals
tup_ty = exprType tup_expr
; ds_binds <- dsTcEvBinds ev_binds
; let poly_tup_rhs = mkLams tyvars $ mkLams dicts $
mkCoreLets ds_binds $
Let core_bind $
tup_expr
; poly_tup_id <- newSysLocalDs (exprType poly_tup_rhs)
; let mk_bind (ABE { abe_wrap = wrap, abe_poly = global
, abe_mono = local, abe_prags = spec_prags })
= do { tup_id <- newSysLocalDs tup_ty
; rhs <- dsHsWrapper wrap $
mkLams tyvars $ mkLams dicts $
mkTupleSelector locals local tup_id $
mkVarApps (Var poly_tup_id) (tyvars ++ dicts)
; let rhs_for_spec = Let (NonRec poly_tup_id poly_tup_rhs) rhs
; (spec_binds, rules) <- dsSpecs rhs_for_spec spec_prags
; let global' = (global `setInlinePragma` defaultInlinePragma)
`addIdSpecialisations` rules
-- Kill the INLINE pragma because it applies to
-- the user written (local) function. The global
-- Id is just the selector. Hmm.
; return ((global', rhs) `consOL` spec_binds) }
; export_binds_s <- mapM mk_bind exports
; return ((poly_tup_id, poly_tup_rhs) `consOL`
concatOL export_binds_s) }
where
inline_env :: IdEnv Id -- Maps a monomorphic local Id to one with
-- the inline pragma from the source
-- The type checker put the inline pragma
-- on the *global* Id, so we need to transfer it
inline_env = mkVarEnv [ (lcl_id, setInlinePragma lcl_id prag)
| ABE { abe_mono = lcl_id, abe_poly = gbl_id } <- exports
, let prag = idInlinePragma gbl_id ]
add_inline :: Id -> Id -- tran
add_inline lcl_id = lookupVarEnv inline_env lcl_id `orElse` lcl_id
dsHsBind (PatSynBind{}) = panic "dsHsBind: PatSynBind"
------------------------
makeCorePair :: DynFlags -> Id -> Bool -> Arity -> CoreExpr -> (Id, CoreExpr)
makeCorePair dflags gbl_id is_default_method dict_arity rhs
| is_default_method -- Default methods are *always* inlined
= (gbl_id `setIdUnfolding` mkCompulsoryUnfolding rhs, rhs)
| DFunId _ is_newtype <- idDetails gbl_id
= (mk_dfun_w_stuff is_newtype, rhs)
| otherwise
= case inlinePragmaSpec inline_prag of
EmptyInlineSpec -> (gbl_id, rhs)
NoInline -> (gbl_id, rhs)
Inlinable -> (gbl_id `setIdUnfolding` inlinable_unf, rhs)
Inline -> inline_pair
where
inline_prag = idInlinePragma gbl_id
inlinable_unf = mkInlinableUnfolding dflags rhs
inline_pair
| Just arity <- inlinePragmaSat inline_prag
-- Add an Unfolding for an INLINE (but not for NOINLINE)
-- And eta-expand the RHS; see Note [Eta-expanding INLINE things]
, let real_arity = dict_arity + arity
-- NB: The arity in the InlineRule takes account of the dictionaries
= ( gbl_id `setIdUnfolding` mkInlineUnfolding (Just real_arity) rhs
, etaExpand real_arity rhs)
| otherwise
= pprTrace "makeCorePair: arity missing" (ppr gbl_id) $
(gbl_id `setIdUnfolding` mkInlineUnfolding Nothing rhs, rhs)
-- See Note [ClassOp/DFun selection] in TcInstDcls
-- See Note [Single-method classes] in TcInstDcls
mk_dfun_w_stuff is_newtype
| is_newtype
= gbl_id `setIdUnfolding` mkInlineUnfolding (Just 0) rhs
`setInlinePragma` alwaysInlinePragma { inl_sat = Just 0 }
| otherwise
= gbl_id `setIdUnfolding` mkDFunUnfolding dfun_bndrs dfun_constr dfun_args
`setInlinePragma` dfunInlinePragma
(dfun_bndrs, dfun_body) = collectBinders (simpleOptExpr rhs)
(dfun_con, dfun_args, _) = collectArgsTicks (const True) dfun_body
dfun_constr | Var id <- dfun_con
, DataConWorkId con <- idDetails id
= con
| otherwise = pprPanic "makeCorePair: dfun" (ppr rhs)
dictArity :: [Var] -> Arity
-- Don't count coercion variables in arity
dictArity dicts = count isId dicts
{-
[Desugaring AbsBinds]
~~~~~~~~~~~~~~~~~~~~~
In the general AbsBinds case we desugar the binding to this:
tup a (d:Num a) = let fm = ...gm...
gm = ...fm...
in (fm,gm)
f a d = case tup a d of { (fm,gm) -> fm }
g a d = case tup a d of { (fm,gm) -> fm }
Note [Rules and inlining]
~~~~~~~~~~~~~~~~~~~~~~~~~
Common special case: no type or dictionary abstraction
This is a bit less trivial than you might suppose
The naive way woudl be to desguar to something like
f_lcl = ...f_lcl... -- The "binds" from AbsBinds
M.f = f_lcl -- Generated from "exports"
But we don't want that, because if M.f isn't exported,
it'll be inlined unconditionally at every call site (its rhs is
trivial). That would be ok unless it has RULES, which would
thereby be completely lost. Bad, bad, bad.
Instead we want to generate
M.f = ...f_lcl...
f_lcl = M.f
Now all is cool. The RULES are attached to M.f (by SimplCore),
and f_lcl is rapidly inlined away.
This does not happen in the same way to polymorphic binds,
because they desugar to
M.f = /\a. let f_lcl = ...f_lcl... in f_lcl
Although I'm a bit worried about whether full laziness might
float the f_lcl binding out and then inline M.f at its call site
Note [Specialising in no-dict case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even if there are no tyvars or dicts, we may have specialisation pragmas.
Class methods can generate
AbsBinds [] [] [( ... spec-prag]
{ AbsBinds [tvs] [dicts] ...blah }
So the overloading is in the nested AbsBinds. A good example is in GHC.Float:
class (Real a, Fractional a) => RealFrac a where
round :: (Integral b) => a -> b
instance RealFrac Float where
{-# SPECIALIZE round :: Float -> Int #-}
The top-level AbsBinds for $cround has no tyvars or dicts (because the
instance does not). But the method is locally overloaded!
Note [Abstracting over tyvars only]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When abstracting over type variable only (not dictionaries), we don't really need to
built a tuple and select from it, as we do in the general case. Instead we can take
AbsBinds [a,b] [ ([a,b], fg, fl, _),
([b], gg, gl, _) ]
{ fl = e1
gl = e2
h = e3 }
and desugar it to
fg = /\ab. let B in e1
gg = /\b. let a = () in let B in S(e2)
h = /\ab. let B in e3
where B is the *non-recursive* binding
fl = fg a b
gl = gg b
h = h a b -- See (b); note shadowing!
Notice (a) g has a different number of type variables to f, so we must
use the mkArbitraryType thing to fill in the gaps.
We use a type-let to do that.
(b) The local variable h isn't in the exports, and rather than
clone a fresh copy we simply replace h by (h a b), where
the two h's have different types! Shadowing happens here,
which looks confusing but works fine.
(c) The result is *still* quadratic-sized if there are a lot of
small bindings. So if there are more than some small
number (10), we filter the binding set B by the free
variables of the particular RHS. Tiresome.
Why got to this trouble? It's a common case, and it removes the
quadratic-sized tuple desugaring. Less clutter, hopefully faster
compilation, especially in a case where there are a *lot* of
bindings.
Note [Eta-expanding INLINE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
foo :: Eq a => a -> a
{-# INLINE foo #-}
foo x = ...
If (foo d) ever gets floated out as a common sub-expression (which can
happen as a result of method sharing), there's a danger that we never
get to do the inlining, which is a Terribly Bad thing given that the
user said "inline"!
To avoid this we pre-emptively eta-expand the definition, so that foo
has the arity with which it is declared in the source code. In this
example it has arity 2 (one for the Eq and one for x). Doing this
should mean that (foo d) is a PAP and we don't share it.
Note [Nested arities]
~~~~~~~~~~~~~~~~~~~~~
For reasons that are not entirely clear, method bindings come out looking like
this:
AbsBinds [] [] [$cfromT <= [] fromT]
$cfromT [InlPrag=INLINE] :: T Bool -> Bool
{ AbsBinds [] [] [fromT <= [] fromT_1]
fromT :: T Bool -> Bool
{ fromT_1 ((TBool b)) = not b } } }
Note the nested AbsBind. The arity for the InlineRule on $cfromT should be
gotten from the binding for fromT_1.
It might be better to have just one level of AbsBinds, but that requires more
thought!
Note [Implementing SPECIALISE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example:
f :: (Eq a, Ix b) => a -> b -> Bool
{-# SPECIALISE f :: (Ix p, Ix q) => Int -> (p,q) -> Bool #-}
f = <poly_rhs>
From this the typechecker generates
AbsBinds [ab] [d1,d2] [([ab], f, f_mono, prags)] binds
SpecPrag (wrap_fn :: forall a b. (Eq a, Ix b) => XXX
-> forall p q. (Ix p, Ix q) => XXX[ Int/a, (p,q)/b ])
Note that wrap_fn can transform *any* function with the right type prefix
forall ab. (Eq a, Ix b) => XXX
regardless of XXX. It's sort of polymorphic in XXX. This is
useful: we use the same wrapper to transform each of the class ops, as
well as the dict.
From these we generate:
Rule: forall p, q, (dp:Ix p), (dq:Ix q).
f Int (p,q) dInt ($dfInPair dp dq) = f_spec p q dp dq
Spec bind: f_spec = wrap_fn <poly_rhs>
Note that
* The LHS of the rule may mention dictionary *expressions* (eg
$dfIxPair dp dq), and that is essential because the dp, dq are
needed on the RHS.
* The RHS of f_spec, <poly_rhs> has a *copy* of 'binds', so that it
can fully specialise it.
-}
------------------------
dsSpecs :: CoreExpr -- Its rhs
-> TcSpecPrags
-> DsM ( OrdList (Id,CoreExpr) -- Binding for specialised Ids
, [CoreRule] ) -- Rules for the Global Ids
-- See Note [Implementing SPECIALISE pragmas]
dsSpecs _ IsDefaultMethod = return (nilOL, [])
dsSpecs poly_rhs (SpecPrags sps)
= do { pairs <- mapMaybeM (dsSpec (Just poly_rhs)) sps
; let (spec_binds_s, rules) = unzip pairs
; return (concatOL spec_binds_s, rules) }
dsSpec :: Maybe CoreExpr -- Just rhs => RULE is for a local binding
-- Nothing => RULE is for an imported Id
-- rhs is in the Id's unfolding
-> Located TcSpecPrag
-> DsM (Maybe (OrdList (Id,CoreExpr), CoreRule))
dsSpec mb_poly_rhs (L loc (SpecPrag poly_id spec_co spec_inl))
| isJust (isClassOpId_maybe poly_id)
= putSrcSpanDs loc $
do { warnDs (ptext (sLit "Ignoring useless SPECIALISE pragma for class method selector")
<+> quotes (ppr poly_id))
; return Nothing } -- There is no point in trying to specialise a class op
-- Moreover, classops don't (currently) have an inl_sat arity set
-- (it would be Just 0) and that in turn makes makeCorePair bleat
| no_act_spec && isNeverActive rule_act
= putSrcSpanDs loc $
do { warnDs (ptext (sLit "Ignoring useless SPECIALISE pragma for NOINLINE function:")
<+> quotes (ppr poly_id))
; return Nothing } -- Function is NOINLINE, and the specialiation inherits that
-- See Note [Activation pragmas for SPECIALISE]
| otherwise
= putSrcSpanDs loc $
do { uniq <- newUnique
; let poly_name = idName poly_id
spec_occ = mkSpecOcc (getOccName poly_name)
spec_name = mkInternalName uniq spec_occ (getSrcSpan poly_name)
; (bndrs, ds_lhs) <- liftM collectBinders
(dsHsWrapper spec_co (Var poly_id))
; let spec_ty = mkPiTypes bndrs (exprType ds_lhs)
; -- pprTrace "dsRule" (vcat [ ptext (sLit "Id:") <+> ppr poly_id
-- , ptext (sLit "spec_co:") <+> ppr spec_co
-- , ptext (sLit "ds_rhs:") <+> ppr ds_lhs ]) $
case decomposeRuleLhs bndrs ds_lhs of {
Left msg -> do { warnDs msg; return Nothing } ;
Right (rule_bndrs, _fn, args) -> do
{ dflags <- getDynFlags
; let fn_unf = realIdUnfolding poly_id
unf_fvs = stableUnfoldingVars fn_unf `orElse` emptyVarSet
in_scope = mkInScopeSet (unf_fvs `unionVarSet` exprsFreeVars args)
spec_unf = specUnfolding dflags (mkEmptySubst in_scope) bndrs args fn_unf
spec_id = mkLocalId spec_name spec_ty
`setInlinePragma` inl_prag
`setIdUnfolding` spec_unf
rule = mkRule False {- Not auto -} is_local_id
(mkFastString ("SPEC " ++ showPpr dflags poly_name))
rule_act poly_name
rule_bndrs args
(mkVarApps (Var spec_id) bndrs)
; spec_rhs <- dsHsWrapper spec_co poly_rhs
; when (isInlinePragma id_inl && wopt Opt_WarnPointlessPragmas dflags)
(warnDs (specOnInline poly_name))
; return (Just (unitOL (spec_id, spec_rhs), rule))
-- NB: do *not* use makeCorePair on (spec_id,spec_rhs), because
-- makeCorePair overwrites the unfolding, which we have
-- just created using specUnfolding
} } }
where
is_local_id = isJust mb_poly_rhs
poly_rhs | Just rhs <- mb_poly_rhs
= rhs -- Local Id; this is its rhs
| Just unfolding <- maybeUnfoldingTemplate (realIdUnfolding poly_id)
= unfolding -- Imported Id; this is its unfolding
-- Use realIdUnfolding so we get the unfolding
-- even when it is a loop breaker.
-- We want to specialise recursive functions!
| otherwise = pprPanic "dsImpSpecs" (ppr poly_id)
-- The type checker has checked that it *has* an unfolding
id_inl = idInlinePragma poly_id
-- See Note [Activation pragmas for SPECIALISE]
inl_prag | not (isDefaultInlinePragma spec_inl) = spec_inl
| not is_local_id -- See Note [Specialising imported functions]
-- in OccurAnal
, isStrongLoopBreaker (idOccInfo poly_id) = neverInlinePragma
| otherwise = id_inl
-- Get the INLINE pragma from SPECIALISE declaration, or,
-- failing that, from the original Id
spec_prag_act = inlinePragmaActivation spec_inl
-- See Note [Activation pragmas for SPECIALISE]
-- no_act_spec is True if the user didn't write an explicit
-- phase specification in the SPECIALISE pragma
no_act_spec = case inlinePragmaSpec spec_inl of
NoInline -> isNeverActive spec_prag_act
_ -> isAlwaysActive spec_prag_act
rule_act | no_act_spec = inlinePragmaActivation id_inl -- Inherit
| otherwise = spec_prag_act -- Specified by user
specOnInline :: Name -> MsgDoc
specOnInline f = ptext (sLit "SPECIALISE pragma on INLINE function probably won't fire:")
<+> quotes (ppr f)
{-
Note [Activation pragmas for SPECIALISE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From a user SPECIALISE pragma for f, we generate
a) A top-level binding spec_fn = rhs
b) A RULE f dOrd = spec_fn
We need two pragma-like things:
* spec_fn's inline pragma: inherited from f's inline pragma (ignoring
activation on SPEC), unless overriden by SPEC INLINE
* Activation of RULE: from SPECIALISE pragma (if activation given)
otherwise from f's inline pragma
This is not obvious (see Trac #5237)!
Examples Rule activation Inline prag on spec'd fn
---------------------------------------------------------------------
SPEC [n] f :: ty [n] Always, or NOINLINE [n]
copy f's prag
NOINLINE f
SPEC [n] f :: ty [n] NOINLINE
copy f's prag
NOINLINE [k] f
SPEC [n] f :: ty [n] NOINLINE [k]
copy f's prag
INLINE [k] f
SPEC [n] f :: ty [n] INLINE [k]
copy f's prag
SPEC INLINE [n] f :: ty [n] INLINE [n]
(ignore INLINE prag on f,
same activation for rule and spec'd fn)
NOINLINE [k] f
SPEC f :: ty [n] INLINE [k]
************************************************************************
* *
\subsection{Adding inline pragmas}
* *
************************************************************************
-}
decomposeRuleLhs :: [Var] -> CoreExpr -> Either SDoc ([Var], Id, [CoreExpr])
-- (decomposeRuleLhs bndrs lhs) takes apart the LHS of a RULE,
-- The 'bndrs' are the quantified binders of the rules, but decomposeRuleLhs
-- may add some extra dictionary binders (see Note [Free dictionaries])
--
-- Returns Nothing if the LHS isn't of the expected shape
-- Note [Decomposing the left-hand side of a RULE]
decomposeRuleLhs orig_bndrs orig_lhs
| not (null unbound) -- Check for things unbound on LHS
-- See Note [Unused spec binders]
= Left (vcat (map dead_msg unbound))
| Var fn_var <- fun
, not (fn_var `elemVarSet` orig_bndr_set)
= -- pprTrace "decmposeRuleLhs" (vcat [ ptext (sLit "orig_bndrs:") <+> ppr orig_bndrs
-- , ptext (sLit "orig_lhs:") <+> ppr orig_lhs
-- , ptext (sLit "lhs1:") <+> ppr lhs1
-- , ptext (sLit "bndrs1:") <+> ppr bndrs1
-- , ptext (sLit "fn_var:") <+> ppr fn_var
-- , ptext (sLit "args:") <+> ppr args]) $
Right (bndrs1, fn_var, args)
| Case scrut bndr ty [(DEFAULT, _, body)] <- fun
, isDeadBinder bndr -- Note [Matching seqId]
, let args' = [Type (idType bndr), Type ty, scrut, body]
= Right (bndrs1, seqId, args' ++ args)
| otherwise
= Left bad_shape_msg
where
lhs1 = drop_dicts orig_lhs
lhs2 = simpleOptExpr lhs1 -- See Note [Simplify rule LHS]
(fun,args) = collectArgs lhs2
lhs_fvs = exprFreeVars lhs2
unbound = filterOut (`elemVarSet` lhs_fvs) orig_bndrs
bndrs1 = orig_bndrs ++ extra_dict_bndrs
orig_bndr_set = mkVarSet orig_bndrs
-- Add extra dict binders: Note [Free dictionaries]
extra_dict_bndrs = [ mkLocalId (localiseName (idName d)) (idType d)
| d <- varSetElems (lhs_fvs `delVarSetList` orig_bndrs)
, isDictId d ]
bad_shape_msg = hang (ptext (sLit "RULE left-hand side too complicated to desugar"))
2 (vcat [ text "Optimised lhs:" <+> ppr lhs2
, text "Orig lhs:" <+> ppr orig_lhs])
dead_msg bndr = hang (sep [ ptext (sLit "Forall'd") <+> pp_bndr bndr
, ptext (sLit "is not bound in RULE lhs")])
2 (vcat [ text "Orig bndrs:" <+> ppr orig_bndrs
, text "Orig lhs:" <+> ppr orig_lhs
, text "optimised lhs:" <+> ppr lhs2 ])
pp_bndr bndr
| isTyVar bndr = ptext (sLit "type variable") <+> quotes (ppr bndr)
| Just pred <- evVarPred_maybe bndr = ptext (sLit "constraint") <+> quotes (ppr pred)
| otherwise = ptext (sLit "variable") <+> quotes (ppr bndr)
drop_dicts :: CoreExpr -> CoreExpr
drop_dicts e
= wrap_lets needed bnds body
where
needed = orig_bndr_set `minusVarSet` exprFreeVars body
(bnds, body) = split_lets (occurAnalyseExpr e)
-- The occurAnalyseExpr drops dead bindings which is
-- crucial to ensure that every binding is used later;
-- which in turn makes wrap_lets work right
split_lets :: CoreExpr -> ([(DictId,CoreExpr)], CoreExpr)
split_lets e
| Let (NonRec d r) body <- e
, isDictId d
, (bs, body') <- split_lets body
= ((d,r):bs, body')
| otherwise
= ([], e)
wrap_lets :: VarSet -> [(DictId,CoreExpr)] -> CoreExpr -> CoreExpr
wrap_lets _ [] body = body
wrap_lets needed ((d, r) : bs) body
| rhs_fvs `intersectsVarSet` needed = Let (NonRec d r) (wrap_lets needed' bs body)
| otherwise = wrap_lets needed bs body
where
rhs_fvs = exprFreeVars r
needed' = (needed `minusVarSet` rhs_fvs) `extendVarSet` d
{-
Note [Decomposing the left-hand side of a RULE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are several things going on here.
* drop_dicts: see Note [Drop dictionary bindings on rule LHS]
* simpleOptExpr: see Note [Simplify rule LHS]
* extra_dict_bndrs: see Note [Free dictionaries]
Note [Drop dictionary bindings on rule LHS]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
drop_dicts drops dictionary bindings on the LHS where possible.
E.g. let d:Eq [Int] = $fEqList $fEqInt in f d
--> f d
Reasoning here is that there is only one d:Eq [Int], and so we can
quantify over it. That makes 'd' free in the LHS, but that is later
picked up by extra_dict_bndrs (Note [Dead spec binders]).
NB 1: We can only drop the binding if the RHS doesn't bind
one of the orig_bndrs, which we assume occur on RHS.
Example
f :: (Eq a) => b -> a -> a
{-# SPECIALISE f :: Eq a => b -> [a] -> [a] #-}
Here we want to end up with
RULE forall d:Eq a. f ($dfEqList d) = f_spec d
Of course, the ($dfEqlist d) in the pattern makes it less likely
to match, but ther is no other way to get d:Eq a
NB 2: We do drop_dicts *before* simplOptEpxr, so that we expect all
the evidence bindings to be wrapped around the outside of the
LHS. (After simplOptExpr they'll usually have been inlined.)
dsHsWrapper does dependency analysis, so that civilised ones
will be simple NonRec bindings. We don't handle recursive
dictionaries!
NB3: In the common case of a non-overloaded, but perhaps-polymorphic
specialisation, we don't need to bind *any* dictionaries for use
in the RHS. For example (Trac #8331)
{-# SPECIALIZE INLINE useAbstractMonad :: ReaderST s Int #-}
useAbstractMonad :: MonadAbstractIOST m => m Int
Here, deriving (MonadAbstractIOST (ReaderST s)) is a lot of code
but the RHS uses no dictionaries, so we want to end up with
RULE forall s (d :: MonadBstractIOST (ReaderT s)).
useAbstractMonad (ReaderT s) d = $suseAbstractMonad s
Trac #8848 is a good example of where there are some intersting
dictionary bindings to discard.
The drop_dicts algorithm is based on these observations:
* Given (let d = rhs in e) where d is a DictId,
matching 'e' will bind e's free variables.
* So we want to keep the binding if one of the needed variables (for
which we need a binding) is in fv(rhs) but not already in fv(e).
* The "needed variables" are simply the orig_bndrs. Consider
f :: (Eq a, Show b) => a -> b -> String
... SPECIALISE f :: (Show b) => Int -> b -> String ...
Then orig_bndrs includes the *quantified* dictionaries of the type
namely (dsb::Show b), but not the one for Eq Int
So we work inside out, applying the above criterion at each step.
Note [Simplify rule LHS]
~~~~~~~~~~~~~~~~~~~~~~~~
simplOptExpr occurrence-analyses and simplifies the LHS:
(a) Inline any remaining dictionary bindings (which hopefully
occur just once)
(b) Substitute trivial lets so that they don't get in the way
Note that we substitute the function too; we might
have this as a LHS: let f71 = M.f Int in f71
(c) Do eta reduction. To see why, consider the fold/build rule,
which without simplification looked like:
fold k z (build (/\a. g a)) ==> ...
This doesn't match unless you do eta reduction on the build argument.
Similarly for a LHS like
augment g (build h)
we do not want to get
augment (\a. g a) (build h)
otherwise we don't match when given an argument like
augment (\a. h a a) (build h)
Note [Matching seqId]
~~~~~~~~~~~~~~~~~~~
The desugarer turns (seq e r) into (case e of _ -> r), via a special-case hack
and this code turns it back into an application of seq!
See Note [Rules for seq] in MkId for the details.
Note [Unused spec binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: a -> a
... SPECIALISE f :: Eq a => a -> a ...
It's true that this *is* a more specialised type, but the rule
we get is something like this:
f_spec d = f
RULE: f = f_spec d
Note that the rule is bogus, because it mentions a 'd' that is
not bound on the LHS! But it's a silly specialisation anyway, because
the constraint is unused. We could bind 'd' to (error "unused")
but it seems better to reject the program because it's almost certainly
a mistake. That's what the isDeadBinder call detects.
Note [Free dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~
When the LHS of a specialisation rule, (/\as\ds. f es) has a free dict,
which is presumably in scope at the function definition site, we can quantify
over it too. *Any* dict with that type will do.
So for example when you have
f :: Eq a => a -> a
f = <rhs>
... SPECIALISE f :: Int -> Int ...
Then we get the SpecPrag
SpecPrag (f Int dInt)
And from that we want the rule
RULE forall dInt. f Int dInt = f_spec
f_spec = let f = <rhs> in f Int dInt
But be careful! That dInt might be GHC.Base.$fOrdInt, which is an External
Name, and you can't bind them in a lambda or forall without getting things
confused. Likewise it might have an InlineRule or something, which would be
utterly bogus. So we really make a fresh Id, with the same unique and type
as the old one, but with an Internal name and no IdInfo.
************************************************************************
* *
Desugaring evidence
* *
************************************************************************
-}
dsHsWrapper :: HsWrapper -> CoreExpr -> DsM CoreExpr
dsHsWrapper WpHole e = return e
dsHsWrapper (WpTyApp ty) e = return $ App e (Type ty)
dsHsWrapper (WpLet ev_binds) e = do bs <- dsTcEvBinds ev_binds
return (mkCoreLets bs e)
dsHsWrapper (WpCompose c1 c2) e = do { e1 <- dsHsWrapper c2 e
; dsHsWrapper c1 e1 }
dsHsWrapper (WpFun c1 c2 t1 _) e = do { x <- newSysLocalDs t1
; e1 <- dsHsWrapper c1 (Var x)
; e2 <- dsHsWrapper c2 (e `mkCoreAppDs` e1)
; return (Lam x e2) }
dsHsWrapper (WpCast co) e = -- ASSERT(tcCoercionRole co == Representational)
dsTcCoercion co (mkCast e)
dsHsWrapper (WpEvLam ev) e = return $ Lam ev e
dsHsWrapper (WpTyLam tv) e = return $ Lam tv e
dsHsWrapper (WpEvApp tm) e = liftM (App e) (dsEvTerm tm)
--------------------------------------
dsTcEvBinds :: TcEvBinds -> DsM [CoreBind]
dsTcEvBinds (TcEvBinds {}) = panic "dsEvBinds" -- Zonker has got rid of this
dsTcEvBinds (EvBinds bs) = dsEvBinds bs
dsEvBinds :: Bag EvBind -> DsM [CoreBind]
dsEvBinds bs = mapM ds_scc (sccEvBinds bs)
where
ds_scc (AcyclicSCC (EvBind v r)) = liftM (NonRec v) (dsEvTerm r)
ds_scc (CyclicSCC bs) = liftM Rec (mapM ds_pair bs)
ds_pair (EvBind v r) = liftM ((,) v) (dsEvTerm r)
sccEvBinds :: Bag EvBind -> [SCC EvBind]
sccEvBinds bs = stronglyConnCompFromEdgedVertices edges
where
edges :: [(EvBind, EvVar, [EvVar])]
edges = foldrBag ((:) . mk_node) [] bs
mk_node :: EvBind -> (EvBind, EvVar, [EvVar])
mk_node b@(EvBind var term) = (b, var, varSetElems (evVarsOfTerm term))
---------------------------------------
dsEvTerm :: EvTerm -> DsM CoreExpr
dsEvTerm (EvId v) = return (Var v)
dsEvTerm (EvCast tm co)
= do { tm' <- dsEvTerm tm
; dsTcCoercion co $ mkCast tm' }
-- 'v' is always a lifted evidence variable so it is
-- unnecessary to call varToCoreExpr v here.
dsEvTerm (EvDFunApp df tys tms) = do { tms' <- mapM dsEvTerm tms
; return (Var df `mkTyApps` tys `mkApps` tms') }
dsEvTerm (EvCoercion (TcCoVarCo v)) = return (Var v) -- See Note [Simple coercions]
dsEvTerm (EvCoercion co) = dsTcCoercion co mkEqBox
dsEvTerm (EvTupleSel v n)
= do { tm' <- dsEvTerm v
; let scrut_ty = exprType tm'
(tc, tys) = splitTyConApp scrut_ty
Just [dc] = tyConDataCons_maybe tc
xs = mkTemplateLocals tys
the_x = getNth xs n
; -- ASSERT( isTupleTyCon tc )
return $
Case tm' (mkWildValBinder scrut_ty) (idType the_x) [(DataAlt dc, xs, Var the_x)] }
dsEvTerm (EvTupleMk tms)
= do { tms' <- mapM dsEvTerm tms
; let tys = map exprType tms'
; return $ Var (dataConWorkId dc) `mkTyApps` tys `mkApps` tms' }
where
dc = tupleCon ConstraintTuple (length tms)
dsEvTerm (EvSuperClass d n)
= do { d' <- dsEvTerm d
; let (cls, tys) = getClassPredTys (exprType d')
sc_sel_id = classSCSelId cls n -- Zero-indexed
; return $ Var sc_sel_id `mkTyApps` tys `App` d' }
where
dsEvTerm (EvDelayedError ty msg) = return $ Var errorId `mkTyApps` [ty] `mkApps` [litMsg]
where
errorId = rUNTIME_ERROR_ID
litMsg = Lit (MachStr (fastStringToByteString msg))
dsEvTerm (EvLit l) =
case l of
EvNum n -> mkIntegerExpr n
EvStr s -> mkStringExprFS s
dsEvTerm (EvCallStack cs) = dsEvCallStack cs
dsEvTerm (EvTypeable ev) = dsEvTypeable ev
dsEvTypeable :: EvTypeable -> DsM CoreExpr
dsEvTypeable ev =
do tyCl <- dsLookupTyCon typeableClassName
typeRepTc <- dsLookupTyCon typeRepTyConName
let tyRepType = mkTyConApp typeRepTc []
(ty, rep) <-
case ev of
EvTypeableTyCon tc ks ->
do ctr <- dsLookupGlobalId mkPolyTyConAppName
mkTyCon <- dsLookupGlobalId mkTyConName
dflags <- getDynFlags
let mkRep cRep kReps tReps =
mkApps (Var ctr) [ cRep, mkListExpr tyRepType kReps
, mkListExpr tyRepType tReps ]
let kindRep k =
case splitTyConApp_maybe k of
Nothing -> panic "dsEvTypeable: not a kind constructor"
Just (kc,ks) ->
do kcRep <- tyConRep dflags mkTyCon kc
reps <- mapM kindRep ks
return (mkRep kcRep [] reps)
tcRep <- tyConRep dflags mkTyCon tc
kReps <- mapM kindRep ks
return ( mkTyConApp tc ks
, mkRep tcRep kReps []
)
EvTypeableTyApp t1 t2 ->
do e1 <- getRep tyCl t1
e2 <- getRep tyCl t2
ctr <- dsLookupGlobalId mkAppTyName
return ( mkAppTy (snd t1) (snd t2)
, mkApps (Var ctr) [ e1, e2 ]
)
EvTypeableTyLit ty ->
do str <- case (isNumLitTy ty, isStrLitTy ty) of
(Just n, _) -> return (show n)
(_, Just n) -> return (show n)
_ -> panic "dsEvTypeable: malformed TyLit evidence"
ctr <- dsLookupGlobalId typeLitTypeRepName
tag <- mkStringExpr str
return (ty, mkApps (Var ctr) [ tag ])
-- TyRep -> Typeable t
-- see also: Note [Memoising typeOf]
repName <- newSysLocalDs tyRepType
let proxyT = mkProxyPrimTy (typeKind ty) ty
method = bindNonRec repName rep
$ mkLams [mkWildValBinder proxyT] (Var repName)
-- package up the method as `Typeable` dictionary
return $ mkCast method $ mkSymCo $ getTypeableCo tyCl ty
where
-- co: method -> Typeable k t
getTypeableCo tc t =
case instNewTyCon_maybe tc [typeKind t, t] of
Just (_,co) -> co
_ -> panic "Class `Typeable` is not a `newtype`."
-- Typeable t -> TyRep
getRep tc (ev,t) =
do typeableExpr <- dsEvTerm ev
let co = getTypeableCo tc t
method = mkCast typeableExpr co
proxy = mkTyApps (Var proxyHashId) [typeKind t, t]
return (mkApps method [proxy])
-- This part could be cached
tyConRep dflags mkTyCon tc =
do pkgStr <- mkStringExprFS pkg_fs
modStr <- mkStringExprFS modl_fs
nameStr <- mkStringExprFS name_fs
return (mkApps (Var mkTyCon) [ int64 high, int64 low
, pkgStr, modStr, nameStr
])
where
tycon_name = tyConName tc
modl = nameModule tycon_name
pkg = modulePackageKey modl
modl_fs = moduleNameFS (moduleName modl)
pkg_fs = packageKeyFS pkg
name_fs = occNameFS (nameOccName tycon_name)
hash_name_fs
| isPromotedTyCon tc = appendFS (mkFastString "$k") name_fs
| isPromotedDataCon tc = appendFS (mkFastString "$c") name_fs
| otherwise = name_fs
hashThis = unwords $ map unpackFS [pkg_fs, modl_fs, hash_name_fs]
Fingerprint high low = fingerprintString hashThis
int64
| wORD_SIZE dflags == 4 = mkWord64LitWord64
| otherwise = mkWordLit dflags . fromIntegral
{- Note [Memoising typeOf]
~~~~~~~~~~~~~~~~~~~~~~~~~~
See #3245, #9203
IMPORTANT: we don't want to recalculate the TypeRep once per call with
the proxy argument. This is what went wrong in #3245 and #9203. So we
help GHC by manually keeping the 'rep' *outside* the lambda.
-}
dsEvCallStack :: EvCallStack -> DsM CoreExpr
-- See Note [Overview of implicit CallStacks] in TcEvidence.hs
dsEvCallStack cs = do
df <- getDynFlags
m <- getModule
srcLocDataCon <- dsLookupDataCon srcLocDataConName
let srcLocTyCon = dataConTyCon srcLocDataCon
let srcLocTy = mkTyConTy srcLocTyCon
let mkSrcLoc l =
liftM (mkCoreConApps srcLocDataCon)
(sequence [ mkStringExprFS (packageKeyFS $ modulePackageKey m)
, mkStringExprFS (moduleNameFS $ moduleName m)
, mkStringExprFS (srcSpanFile l)
, return $ mkIntExprInt df (srcSpanStartLine l)
, return $ mkIntExprInt df (srcSpanStartCol l)
, return $ mkIntExprInt df (srcSpanEndLine l)
, return $ mkIntExprInt df (srcSpanEndCol l)
])
let callSiteTy = mkBoxedTupleTy [stringTy, srcLocTy]
matchId <- newSysLocalDs $ mkListTy callSiteTy
callStackDataCon <- dsLookupDataCon callStackDataConName
let callStackTyCon = dataConTyCon callStackDataCon
let callStackTy = mkTyConTy callStackTyCon
let emptyCS = mkCoreConApps callStackDataCon [mkNilExpr callSiteTy]
let pushCS name loc rest =
mkWildCase rest callStackTy callStackTy
[( DataAlt callStackDataCon
, [matchId]
, mkCoreConApps callStackDataCon
[mkConsExpr callSiteTy
(mkCoreTup [name, loc])
(Var matchId)]
)]
let mkPush name loc tm = do
nameExpr <- mkStringExprFS name
locExpr <- mkSrcLoc loc
case tm of
EvCallStack EvCsEmpty -> return (pushCS nameExpr locExpr emptyCS)
_ -> do tmExpr <- dsEvTerm tm
-- at this point tmExpr :: IP sym CallStack
-- but we need the actual CallStack to pass to pushCS,
-- so we use unwrapIP to strip the dictionary wrapper
-- See Note [Overview of implicit CallStacks]
let ip_co = unwrapIP (exprType tmExpr)
return (pushCS nameExpr locExpr (mkCastDs tmExpr ip_co))
case cs of
EvCsTop name loc tm -> mkPush name loc tm
EvCsPushCall name loc tm -> mkPush (occNameFS $ getOccName name) loc tm
EvCsEmpty -> panic "Cannot have an empty CallStack"
---------------------------------------
dsTcCoercion :: TcCoercion -> (Coercion -> CoreExpr) -> DsM CoreExpr
-- This is the crucial function that moves
-- from TcCoercions to Coercions; see Note [TcCoercions] in Coercion
-- e.g. dsTcCoercion (trans g1 g2) k
-- = case g1 of EqBox g1# ->
-- case g2 of EqBox g2# ->
-- k (trans g1# g2#)
-- thing_inside will get a coercion at the role requested
dsTcCoercion co thing_inside
= do { us <- newUniqueSupply
; let eqvs_covs :: [(EqVar,CoVar)]
eqvs_covs = zipWith mk_co_var (varSetElems (coVarsOfTcCo co))
(uniqsFromSupply us)
subst = mkCvSubst emptyInScopeSet [(eqv, mkCoVarCo cov) | (eqv, cov) <- eqvs_covs]
result_expr = thing_inside (ds_tc_coercion subst co)
result_ty = exprType result_expr
; return (foldr (wrap_in_case result_ty) result_expr eqvs_covs) }
where
mk_co_var :: Id -> Unique -> (Id, Id)
mk_co_var eqv uniq = (eqv, mkUserLocal occ uniq ty loc)
where
eq_nm = idName eqv
occ = nameOccName eq_nm
loc = nameSrcSpan eq_nm
ty = mkCoercionType (getEqPredRole (evVarPred eqv)) ty1 ty2
(ty1, ty2) = getEqPredTys (evVarPred eqv)
wrap_in_case result_ty (eqv, cov) body
= case getEqPredRole (evVarPred eqv) of
Nominal -> Case (Var eqv) eqv result_ty [(DataAlt eqBoxDataCon, [cov], body)]
Representational -> Case (Var eqv) eqv result_ty [(DataAlt coercibleDataCon, [cov], body)]
Phantom -> panic "wrap_in_case/phantom"
ds_tc_coercion :: CvSubst -> TcCoercion -> Coercion
-- If the incoming TcCoercion if of type (a ~ b) (resp. Coercible a b)
-- the result is of type (a ~# b) (reps. a ~# b)
-- The VarEnv maps EqVars of type (a ~ b) to Coercions of type (a ~# b) (resp. and so on)
-- No need for InScope set etc because the
ds_tc_coercion subst tc_co
= go tc_co
where
go (TcRefl r ty) = Refl r (Coercion.substTy subst ty)
go (TcTyConAppCo r tc cos) = mkTyConAppCo r tc (map go cos)
go (TcAppCo co1 co2) = mkAppCo (go co1) (go co2)
go (TcForAllCo tv co) = mkForAllCo tv' (ds_tc_coercion subst' co)
where
(subst', tv') = Coercion.substTyVarBndr subst tv
go (TcAxiomInstCo ax ind cos)
= AxiomInstCo ax ind (map go cos)
go (TcPhantomCo ty1 ty2) = UnivCo (fsLit "ds_tc_coercion") Phantom ty1 ty2
go (TcSymCo co) = mkSymCo (go co)
go (TcTransCo co1 co2) = mkTransCo (go co1) (go co2)
go (TcNthCo n co) = mkNthCo n (go co)
go (TcLRCo lr co) = mkLRCo lr (go co)
go (TcSubCo co) = mkSubCo (go co)
go (TcLetCo bs co) = ds_tc_coercion (ds_co_binds bs) co
go (TcCastCo co1 co2) = mkCoCast (go co1) (go co2)
go (TcCoVarCo v) = ds_ev_id subst v
go (TcAxiomRuleCo co ts cs) = AxiomRuleCo co (map (Coercion.substTy subst) ts) (map go cs)
go (TcCoercion co) = co
ds_co_binds :: TcEvBinds -> CvSubst
ds_co_binds (EvBinds bs) = foldl ds_scc subst (sccEvBinds bs)
ds_co_binds eb@(TcEvBinds {}) = pprPanic "ds_co_binds" (ppr eb)
ds_scc :: CvSubst -> SCC EvBind -> CvSubst
ds_scc subst (AcyclicSCC (EvBind v ev_term))
= extendCvSubstAndInScope subst v (ds_co_term subst ev_term)
ds_scc _ (CyclicSCC other) = pprPanic "ds_scc:cyclic" (ppr other $$ ppr tc_co)
ds_co_term :: CvSubst -> EvTerm -> Coercion
ds_co_term subst (EvCoercion tc_co) = ds_tc_coercion subst tc_co
ds_co_term subst (EvId v) = ds_ev_id subst v
ds_co_term subst (EvCast tm co) = mkCoCast (ds_co_term subst tm) (ds_tc_coercion subst co)
ds_co_term _ other = pprPanic "ds_co_term" (ppr other $$ ppr tc_co)
ds_ev_id :: CvSubst -> EqVar -> Coercion
ds_ev_id subst v
| Just co <- Coercion.lookupCoVar subst v = co
| otherwise = pprPanic "ds_tc_coercion" (ppr v $$ ppr tc_co)
{-
Note [Simple coercions]
~~~~~~~~~~~~~~~~~~~~~~~
We have a special case for coercions that are simple variables.
Suppose cv :: a ~ b is in scope
Lacking the special case, if we see
f a b cv
we'd desguar to
f a b (case cv of EqBox (cv# :: a ~# b) -> EqBox cv#)
which is a bit stupid. The special case does the obvious thing.
This turns out to be important when desugaring the LHS of a RULE
(see Trac #7837). Suppose we have
normalise :: (a ~ Scalar a) => a -> a
normalise_Double :: Double -> Double
{-# RULES "normalise" normalise = normalise_Double #-}
Then the RULE we want looks like
forall a, (cv:a~Scalar a).
normalise a cv = normalise_Double
But without the special case we generate the redundant box/unbox,
which simpleOpt (currently) doesn't remove. So the rule never matches.
Maybe simpleOpt should be smarter. But it seems like a good plan
to simply never generate the redundant box/unbox in the first place.
-}
|
ssaavedra/liquidhaskell
|
src/Language/Haskell/Liquid/Desugar710/DsBinds.hs
|
bsd-3-clause
| 49,258
| 0
| 24
| 15,515
| 8,484
| 4,342
| 4,142
| 553
| 21
|
module Web.Zenfolio.RPC (
zfRemote,
zfRemoteSsl,
zfAgentHeaders,
zfTokenHeader
) where
import Network.JsonRpc (MethodName, Remote, remote)
import qualified Network.HTTP as H (Header(..), HeaderName(..))
import Network.URI (URI, parseURI)
import Web.Zenfolio.Types (AuthToken)
zfUserAgent :: String
zfUserAgent = "hs-zenfolio/0.1"
zfAgentHeaders :: [H.Header]
zfAgentHeaders = [ H.Header H.HdrUserAgent zfUserAgent
, H.Header (H.HdrCustom "X-Zenfolio-User-Agent") zfUserAgent
]
zfTokenHeader :: AuthToken -> H.Header
zfTokenHeader token = H.Header (H.HdrCustom "X-Zenfolio-Token") token
zfBaseUri :: URI
zfBaseUri = maybe (error $ "Invalid base URI: " ++ base) id (parseURI base)
where base = "http://www.zenfolio.com/api/1.2/zfapi.asmx"
zfSecureBaseUri :: URI
zfSecureBaseUri = maybe (error $ "Invalid secure base URI: " ++ secureBase) id (parseURI secureBase)
where secureBase = "https://www.zenfolio.com/api/1.2/zfapi.asmx"
zfRemote :: Remote a => MethodName -> a
zfRemote = remote zfBaseUri zfAgentHeaders
zfRemoteSsl :: Remote a => MethodName -> a
zfRemoteSsl = remote zfSecureBaseUri zfAgentHeaders
|
md5/hs-zenfolio
|
Web/Zenfolio/RPC.hs
|
bsd-3-clause
| 1,171
| 0
| 9
| 197
| 308
| 173
| 135
| 26
| 1
|
-- Copyright (c)2012, IlyaPortnov
--
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- * Redistributions in binary form must reproduce the above
-- copyright notice, this list of conditions and the following
-- disclaimer in the documentation and/or other materials provided
-- with the distribution.
--
-- * Neither the name of IlyaPortnov nor the names of other
-- contributors may be used to endorse or promote products derived
-- from this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE ForeignFunctionInterface #-}
module Network.SSH.Client.SimpleSSH.GCrypt
( gcryptFix )
where
foreign import ccall "gcrypt_fix" gcryptFix :: IO ()
|
jprider63/simplessh-modified
|
src/Network/SSH/Client/SimpleSSH/GCrypt.hs
|
bsd-3-clause
| 1,774
| 0
| 7
| 329
| 62
| 50
| 12
| 4
| 0
|
module React.Flux.Mui
( module X
, muiThemeWrapper_
) where
import Protolude
import React.Flux
import React.Flux.Mui.AppBar as X
import React.Flux.Mui.AutoComplete as X
import React.Flux.Mui.Avatar as X
import React.Flux.Mui.Badge as X
import React.Flux.Mui.BottomNavigation as X
import React.Flux.Mui.BottomNavigation.BottomNavigationItem as X
import React.Flux.Mui.Card as X
import React.Flux.Mui.Card.CardActions as X
import React.Flux.Mui.Card.CardExpandable as X
import React.Flux.Mui.Card.CardHeader as X
import React.Flux.Mui.Card.CardMedia as X
import React.Flux.Mui.Card.CardText as X
import React.Flux.Mui.Card.CardTitle as X
import React.Flux.Mui.Checkbox as X
import React.Flux.Mui.Chip as X
import React.Flux.Mui.CircularProgress as X
import React.Flux.Mui.DatePicker as X
import React.Flux.Mui.Dialog as X
import React.Flux.Mui.Divider as X
import React.Flux.Mui.Drawer as X
import React.Flux.Mui.DropDownMenu as X
import React.Flux.Mui.FlatButton as X
import React.Flux.Mui.FloatingActionButton as X
import React.Flux.Mui.FontIcon as X
import React.Flux.Mui.GridList as X
import React.Flux.Mui.GridList.GridTile as X
import React.Flux.Mui.IconButton as X
import React.Flux.Mui.IconMenu as X
import React.Flux.Mui.LinearProgress as X
import React.Flux.Mui.List as X
import React.Flux.Mui.List.ListItem as X
import React.Flux.Mui.Menu as X
import React.Flux.Mui.MenuItem as X
import React.Flux.Mui.Paper as X
import React.Flux.Mui.Popover as X
import React.Flux.Mui.Popover.PopoverAnimationVertical as X
import React.Flux.Mui.RadioButton as X
import React.Flux.Mui.RadioButton.RadioButtonGroup as X
import React.Flux.Mui.RaisedButton as X
import React.Flux.Mui.RefreshIndicator as X
import React.Flux.Mui.SelectField as X
import React.Flux.Mui.Slider as X
import React.Flux.Mui.Snackbar as X
import React.Flux.Mui.Stepper as X
import React.Flux.Mui.Stepper.Step as X
import React.Flux.Mui.Stepper.StepButton as X
import React.Flux.Mui.Stepper.StepContent as X
import React.Flux.Mui.Stepper.StepLabel as X
import React.Flux.Mui.Styles.MuiThemeProvider as X
import React.Flux.Mui.Subheader as X
import React.Flux.Mui.SvgIcon as X
import React.Flux.Mui.Table as X
import React.Flux.Mui.Table.TableBody as X
import React.Flux.Mui.Table.TableFooter as X
import React.Flux.Mui.Table.TableHeader as X
import React.Flux.Mui.Table.TableHeaderColumn as X
import React.Flux.Mui.Table.TableRow as X
import React.Flux.Mui.Table.TableRowColumn as X
import React.Flux.Mui.Tabs as X
import React.Flux.Mui.Tabs.Tab as X
import React.Flux.Mui.TextField as X
import React.Flux.Mui.TimePicker as X
import React.Flux.Mui.Toggle as X
import React.Flux.Mui.Toolbar as X
import React.Flux.Mui.Toolbar.ToolbarGroup as X
import React.Flux.Mui.Toolbar.ToolbarSeparator as X
import React.Flux.Mui.Toolbar.ToolbarTitle as X
import React.Flux.Mui.Types as X
import React.Flux.Mui.Util as X
muiThemeWrapper_ ::
ReactElementM eventHandler () -> ReactElementM eventHandler ()
muiThemeWrapper_ xs = muiThemeProvider_ defMuiThemeProvider mempty $ div_ xs
|
pbogdan/react-flux-mui
|
react-flux-mui/src/React/Flux/Mui.hs
|
bsd-3-clause
| 3,048
| 0
| 7
| 322
| 714
| 545
| 169
| 77
| 1
|
-- |Partial binding to CoreFoundation, as required for `System.Midi`.
-- At the moment only CFString is supported.
-- In the future this module should grow into a separate entity.
{-# LANGUAGE ForeignFunctionInterface, EmptyDataDecls #-}
module System.MacOSX.CoreFoundation
( newCFString
, releaseCFString
, peekCFString
, withCFString
, osStatusString
, osStatusError
, UInt8
, UInt16
, UInt32
, UInt64
, SInt8
, SInt16
, SInt32
, SInt64
, OSErr
, OSStatus
, UniChar
, CFIndex
, ItemCount
, ByteCount
, CFDataRef
, CFStringRef
, CFAllocatorRef
) where
import Data.Bits
import Data.Word
import Data.Int
import Control.Monad
import Foreign
import Foreign.C
import Foreign.Marshal
type UInt8 = Word8
type UInt16 = Word16
type UInt32 = Word32
type UInt64 = Word64
type SInt8 = Int8
type SInt16 = Int16
type SInt32 = Int32
type SInt64 = Int64
type OSErr = SInt16
type OSStatus = SInt32
type UniChar = Char
type CFIndex = SInt32
type ItemCount = UInt32
type ByteCount = UInt32
data CFData
data CFString
data CFAllocator
type CFDataRef = Ptr CFData
type CFStringRef = Ptr CFString
type CFAllocatorRef = Ptr CFAllocator
kCFAllocatorDefault = nullPtr
----- error "handling" :) -----
osStatusString :: OSStatus -> String
osStatusString osstatus = "OSStatus = " ++ show osstatus
osStatusError :: OSStatus -> IO a
osStatusError osstatus = fail $ osStatusString osstatus
----- Base -----
foreign import ccall unsafe "CFBase.h CFRelease"
c_CFRelease :: Ptr a -> IO ()
----- CFStrings -----
foreign import ccall unsafe "CFString.h CFStringGetLength"
c_CFStringGetLength :: CFStringRef -> IO CFIndex
foreign import ccall unsafe "CFString.h CFStringGetCharactersPtr"
c_CFStringGetCharactersPtr :: CFStringRef -> IO (Ptr UniChar)
foreign import ccall unsafe "CFString.h CFStringGetCharacterAtIndex"
c_CFStringGetCharacterAtIndex :: CFStringRef -> CFIndex -> IO UniChar
foreign import ccall unsafe "CFString.h CFStringCreateWithCharacters"
c_CFStringCreateWithCharacters :: CFAllocatorRef -> Ptr UniChar -> CFIndex -> IO CFStringRef
-- | Manually releasing a CFString.
releaseCFString :: CFStringRef -> IO ()
releaseCFString = c_CFRelease
-- | Peeks a CFString.
peekCFString :: CFStringRef -> IO String
peekCFString cfstring = do
n <- c_CFStringGetLength cfstring
p <- c_CFStringGetCharactersPtr cfstring
if p /= nullPtr
then forM [0..n-1] $ \i -> peekElemOff p (fromIntegral i)
else forM [0..n-1] $ \i -> c_CFStringGetCharacterAtIndex cfstring i
-- | Creates a new CFString. You have to release it manually.
newCFString :: String -> IO CFStringRef
newCFString string =
let n = length string in allocaArray n $ \p ->
c_CFStringCreateWithCharacters kCFAllocatorDefault p (fromIntegral n)
-- | Safe passing of a CFString to the OS (releases it afterwards).
withCFString :: String -> (CFStringRef -> IO a) -> IO a
withCFString string action = do
cfstring <- newCFString string
x <- action cfstring
releaseCFString cfstring
return x
|
hanshoglund/hamid
|
src/System/MacOSX/CoreFoundation.hs
|
bsd-3-clause
| 3,169
| 0
| 12
| 681
| 688
| 378
| 310
| -1
| -1
|
{-------------------------------------------------------------------------------
A mapper/reducer for Hadoop streams for building BoW models from
a CONLL-parsed file.
(c) 2013 Jan Snajder
-------------------------------------------------------------------------------}
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative
import Control.Monad
import Data.Char
import Data.Function (on)
import Data.List
import Data.Map (Map)
import qualified Data.Map as M
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Set (Set)
import qualified Data.Set as S
import Data.Counts (Counts)
import qualified Data.Counts as C
import qualified IdMap as IM
import Prelude hiding (Word)
import System.Console.ParseArgs
import System.Environment
import System.IO
import System.Random (randomRIO)
type Word = Text
type Sentence = [Word]
type Corpus = [Sentence]
type Index = Int
type WordCounts = C.Counts Text
type ContextIndex = IM.IdMap Index Word
{-
mrMap
:: Set Word
-> ContextIndex
-> Int
-> (Token -> [String])
-> (Token -> [String])
-> Sentence
-> [(Word,Index)]
mrMap ts ci n ft fc =
filter ((`S.member` ts) . fst) . concatMap pairs . windows n . sentenceTokens
where pairs (x,xs) = [(T.pack $ t, c2) | t <- ft x, c <- xs,
c1 <- fc c, Just c2 <- [IM.lookup' ci $ T.pack c1]]
-}
-- Targets found in a sentence
sentenceTargets :: (Word -> Word) -> Set Word -> Sentence -> Set Word
sentenceTargets ft ts =
S.fromList . filter (`S.member` ts) . map ft
sampleSentenceTargets
:: SamplingProportions
-> (Word -> Word)
-> Set Word
-> Sentence
-> IO (Set Word)
sampleSentenceTargets sp ft ts = sampleTargets sp . sentenceTargets ft ts
{-
mrMap2
:: Set Word
-> SamplingProportions
-> ContextIndex
-> Int
-> (Token -> [String])
-> (Token -> [String])
-> Sentence
-> IO (Counts (Word, Index))
mrMap2 ts sp ci n ft fc s = do
ts' <- sampleSentenceTargets sp ft ts s
return . C.fromList . filter ((`S.member` ts') . fst) .
concatMap pairs . windows n $ sentenceTokens s
where pairs (x,xs) = [(T.pack $ t, c2) | t <- ft x, c <- xs,
c1 <- fc c, Just c2 <- [IM.lookup' ci $ T.pack c1]]
-}
mrMap3
:: Set Word
-> SamplingProportions
-> ContextIndex
-> Int
-> (Word -> Word)
-> (Word -> Word)
-> Sentence
-> IO (Counts (Word, Index))
mrMap3 ts sp ci n ft fc s = do
ts' <- sampleSentenceTargets sp ft ts s
return . C.fromList . filter ((`S.member` ts') . fst) .
concatMap pairs $ windows n s
where pairs (x, xs) = [(ft x, c2) | c <- xs,
Just c2 <- [IM.lookup' ci $ fc c]]
-- extracts +-n sublists
windows :: Int -> [a] -> [(a, [a])]
windows k ws =
[ (w, takeFromTo l1 l2 ws ++ takeFromTo r1 r2 ws) |
(w,i) <- xs, let (l1,l2) = (i-k,i-1), let (r1,r2) = (i+1,i+k) ]
where xs = zip ws [0..]
takeFromTo :: Int -> Int -> [a] -> [a]
takeFromTo i j
| i < 0 && j < 0 = const []
| i < 0 = takeFromTo 0 j
| j < 0 = takeFromTo i 0
| otherwise = take (j-i+1) . drop i
mrReduce :: [(Word, Index)] -> [(Word, [(Index, Int)])]
mrReduce xs = vs
where cs = C.counts $ C.fromList xs
ys = map (\((t,c),f) -> (t,(c,f))) cs
vs = [ (t,map snd x) | x@((t,_):_) <- groupBy ((==) `on` fst) ys]
foldM' :: (Monad m) => (a -> b -> m a) -> a -> [b] -> m a
foldM' _ z [] = return z
foldM' f z (x:xs) = do
z' <- f z x
z' `seq` foldM' f z' xs
mkModel
:: Set Word
-> SamplingProportions
-> ContextIndex
-> Int
-> (Word -> Word)
-> (Word -> Word)
-> Corpus
-> IO [(Word, [(Index, Int)])]
mkModel ts sp ci n ft fc c = do
xs <- foldM' (\cs s -> do
cs' <- mrMap3 ts sp ci n ft fc s
return $ cs `seq` cs `C.union` cs') C.empty c
let cs = C.counts xs
ys = map (\((t, c),f) -> (t, (c, f))) cs
vs = [ (t, map snd x) | x@((t, _):_) <- groupBy ((==) `on` fst) ys]
return vs
showVec :: (Word, [(Index,Int)]) -> Text
showVec (t,cf) = T.intercalate "\t" $ t : map (\(c,f) ->
T.pack (show c ++ ":" ++ show f)) cf
readFile' :: FilePath -> IO Text
readFile' f = do
h <- openFile f ReadMode
hSetEncoding h utf8
T.hGetContents h
type SamplingProportions = Map Text Double
readProportions :: FilePath -> IO SamplingProportions
readProportions f =
M.fromList . map (parse . words) . lines <$> readFile f
where parse (w:p:_) = (T.pack w, read p)
flipCoin :: Double -> IO Bool
flipCoin p = (<=p) <$> randomRIO (0, 1)
sampleTargets :: SamplingProportions -> Set Word -> IO (Set Word)
sampleTargets sp ws = S.fromList <$> (filterM flipCoin' $ S.toList ws)
where flipCoin' w = case M.lookup w sp of
Just p -> flipCoin p
Nothing -> return True
arg =
[ Arg 0 (Just 't') (Just "targets")
(argDataRequired "filename" ArgtypeString) "targets list"
, Arg 1 (Just 'c')
(Just "contexts") (argDataRequired "filename" ArgtypeString)
"contexts list"
, Arg 2 (Just 'w') (Just "window")
(argDataDefaulted "size" ArgtypeInt 5)
"window size around target (default=5)"
, Arg 3 Nothing (Just "tlower") Nothing
"lowercase corpus targets"
, Arg 4 Nothing (Just "clower") Nothing
"lowercase corpus contexts"
, Arg 5 (Just 'p') (Just "proportions")
(argDataOptional "filename" ArgtypeString)
"target words sampling proportions, one word per line"
, Arg 6 Nothing Nothing (argDataRequired "corpus" ArgtypeString)
"corpus (tokenized, one line per sentence)" ]
main = do
args <- parseArgsIO ArgsComplete arg
let tf = getRequiredArg args 0
cf = getRequiredArg args 1
w = getRequiredArg args 2
ft = if gotArg args 3 then T.toLower else id
fc = if gotArg args 4 then T.toLower else id
hSetEncoding stdin utf8
hSetEncoding stdout utf8
ts <- S.fromList . map (head . T.words) . T.lines <$> readFile' tf
ci <- (IM.fromList1 . map (head . T.words) . T.lines) <$> readFile' cf
sp <- case getArg args 5 of
Nothing -> return M.empty
Just f -> readProportions f
c <- map T.words . T.lines <$> (readFile' $ getRequiredArg args 6)
(T.unlines . map showVec <$> mkModel ts sp ci w ft fc c) >>= T.putStr
|
jsnajder/dsem
|
src/txt2bow.hs
|
bsd-3-clause
| 6,317
| 0
| 16
| 1,612
| 2,255
| 1,188
| 1,067
| 144
| 4
|
{-
- Hacq (c) 2013 NEC Laboratories America, Inc. All rights reserved.
-
- This file is part of Hacq.
- Hacq is distributed under the 3-clause BSD license.
- See the LICENSE file for more details.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Data.Quantum.Cost.Class (Cost(..), WrappedCost(..)) where
import Data.Monoid (Monoid)
import qualified Data.Monoid
import Data.Typeable (Typeable)
import Data.Quantum.CountKey (GateKey)
class Cost c where
empty :: c
newWire :: c
gate :: GateKey -> c
-- |Combine the costs of two circuits applied sequentially.
sequential :: c -> c -> c
-- |Combine the costs of two circuits applied in parallel.
parallel :: c -> c -> c
-- |Combine the costs of two circuits in the "with" construct.
with :: c -> c -> c
-- |The cost of the inverse of a circuit.
adjoint :: c -> c
-- |The cost of the sequential repetition of a circuit.
replicateSequential :: Int -> c -> c
replicateSequential = genericReplicateSequential
{-# INLINABLE replicateSequential #-}
-- |The generic version of `replicateSequential`.
genericReplicateSequential :: Integral i => i -> c -> c
-- |The cost of the parallel repetition of a circuit.
replicateParallel :: Int -> c -> c
replicateParallel = genericReplicateParallel
{-# INLINABLE replicateParallel #-}
-- |The generic version of `replicateParallel`.
genericReplicateParallel :: Integral i => i -> c -> c
newtype WrappedCost c = WrappedCost {
unwrapCost :: c
} deriving (Eq, Typeable)
-- Monoid Wrapper for Cost
--
-- We do not make Cost a subclass of Monoid because the Monoid instances for tuples
-- in Data.Monoid are lazy whereas we want strict implementations.
instance Cost c => Monoid (WrappedCost c) where
mempty = WrappedCost empty
{-# INLINE mempty #-}
WrappedCost a `mappend` WrappedCost b = WrappedCost $ sequential a b
{-# INLINE mappend #-}
-- Instances for tuples
--
-- When a tuple is used as the log type of a Writer monad,
-- we usually want to synchronize the computation of all components of a tuple.
-- Therefore, we define these instances with seq.
-- I decided not to introduce types for strict tuples after reading a discussion
-- about strict tuple types on haskell-prime mailing list in March 2006,
-- in particular the following message by John Meacham:
-- <http://www.haskell.org/pipermail/haskell-prime/2006-March/001006.html>.
strictPair :: a1 -> a2 -> (a1, a2)
strictPair a1 a2 =
a1 `seq` a2 `seq` (a1, a2)
instance (Cost c1, Cost c2) => Cost (c1, c2) where
empty =
strictPair empty empty
{-# INLINABLE empty #-}
newWire =
strictPair newWire newWire
{-# INLINABLE newWire #-}
gate key =
strictPair (gate key) (gate key)
{-# INLINABLE gate #-}
sequential (c1, c2) (c1', c2') =
strictPair (sequential c1 c1') (sequential c2 c2')
{-# INLINABLE sequential #-}
parallel (c1, c2) (c1', c2') =
strictPair (parallel c1 c1') (parallel c2 c2')
{-# INLINABLE parallel #-}
with (c1, c2) (c1', c2') =
strictPair (with c1 c1') (with c2 c2')
{-# INLINABLE with #-}
adjoint (c1, c2) =
strictPair (adjoint c1) (adjoint c2)
{-# INLINABLE adjoint #-}
replicateSequential n (c1, c2) =
strictPair (replicateSequential n c1) (replicateSequential n c2)
{-# INLINABLE replicateSequential #-}
genericReplicateSequential n (c1, c2) =
strictPair (genericReplicateSequential n c1) (genericReplicateSequential n c2)
{-# INLINABLE genericReplicateSequential #-}
replicateParallel n (c1, c2) =
strictPair (replicateParallel n c1) (replicateParallel n c2)
{-# INLINABLE replicateParallel #-}
genericReplicateParallel n (c1, c2) =
strictPair (genericReplicateParallel n c1) (genericReplicateParallel n c2)
{-# INLINABLE genericReplicateParallel #-}
strictTuple3 :: a1 -> a2 -> a3 -> (a1, a2, a3)
strictTuple3 a1 a2 a3 =
a1 `seq` a2 `seq` a3 `seq` (a1, a2, a3)
instance (Cost c1, Cost c2, Cost c3) => Cost (c1, c2, c3) where
empty =
strictTuple3 empty empty empty
{-# INLINABLE empty #-}
newWire =
strictTuple3 newWire newWire newWire
{-# INLINABLE newWire #-}
gate key =
strictTuple3 (gate key) (gate key) (gate key)
{-# INLINABLE gate #-}
sequential (c1, c2, c3) (c1', c2', c3') =
strictTuple3 (sequential c1 c1') (sequential c2 c2') (sequential c3 c3')
{-# INLINABLE sequential #-}
parallel (c1, c2, c3) (c1', c2', c3') =
strictTuple3 (parallel c1 c1') (parallel c2 c2') (parallel c3 c3')
{-# INLINABLE parallel #-}
with (c1, c2, c3) (c1', c2', c3') =
strictTuple3 (with c1 c1') (with c2 c2') (with c3 c3')
{-# INLINABLE with #-}
adjoint (c1, c2, c3) =
strictTuple3 (adjoint c1) (adjoint c2) (adjoint c3)
{-# INLINABLE adjoint #-}
replicateSequential n (c1, c2, c3) =
strictTuple3 (replicateSequential n c1) (replicateSequential n c2) (replicateSequential n c3)
{-# INLINABLE replicateSequential #-}
genericReplicateSequential n (c1, c2, c3) =
strictTuple3 (genericReplicateSequential n c1) (genericReplicateSequential n c2) (genericReplicateSequential n c3)
{-# INLINABLE genericReplicateSequential #-}
replicateParallel n (c1, c2, c3) =
strictTuple3 (replicateParallel n c1) (replicateParallel n c2) (replicateParallel n c3)
{-# INLINABLE replicateParallel #-}
genericReplicateParallel n (c1, c2, c3) =
strictTuple3 (genericReplicateParallel n c1) (genericReplicateParallel n c2) (genericReplicateParallel n c3)
{-# INLINABLE genericReplicateParallel #-}
strictTuple4 :: a1 -> a2 -> a3 -> a4 -> (a1, a2, a3, a4)
strictTuple4 a1 a2 a3 a4 =
a1 `seq` a2 `seq` a3 `seq` a4 `seq` (a1, a2, a3, a4)
instance (Cost c1, Cost c2, Cost c3, Cost c4) => Cost (c1, c2, c3, c4) where
empty =
strictTuple4 empty empty empty empty
{-# INLINABLE empty #-}
newWire =
strictTuple4 newWire newWire newWire newWire
{-# INLINABLE newWire #-}
gate key =
strictTuple4 (gate key) (gate key) (gate key) (gate key)
{-# INLINABLE gate #-}
sequential (c1, c2, c3, c4) (c1', c2', c3', c4') =
strictTuple4 (sequential c1 c1') (sequential c2 c2') (sequential c3 c3') (sequential c4 c4')
{-# INLINABLE sequential #-}
parallel (c1, c2, c3, c4) (c1', c2', c3', c4') =
strictTuple4 (parallel c1 c1') (parallel c2 c2') (parallel c3 c3') (parallel c4 c4')
{-# INLINABLE parallel #-}
with (c1, c2, c3, c4) (c1', c2', c3', c4') =
strictTuple4 (with c1 c1') (with c2 c2') (with c3 c3') (with c4 c4')
{-# INLINABLE with #-}
adjoint (c1, c2, c3, c4) =
strictTuple4 (adjoint c1) (adjoint c2) (adjoint c3) (adjoint c4)
{-# INLINABLE adjoint #-}
replicateSequential n (c1, c2, c3, c4) =
strictTuple4 (replicateSequential n c1) (replicateSequential n c2) (replicateSequential n c3) (replicateSequential n c4)
{-# INLINABLE replicateSequential #-}
genericReplicateSequential n (c1, c2, c3, c4) =
strictTuple4 (genericReplicateSequential n c1) (genericReplicateSequential n c2) (genericReplicateSequential n c3) (genericReplicateSequential n c4)
{-# INLINABLE genericReplicateSequential #-}
replicateParallel n (c1, c2, c3, c4) =
strictTuple4 (replicateParallel n c1) (replicateParallel n c2) (replicateParallel n c3) (replicateParallel n c4)
{-# INLINABLE replicateParallel #-}
genericReplicateParallel n (c1, c2, c3, c4) =
strictTuple4 (genericReplicateParallel n c1) (genericReplicateParallel n c2) (genericReplicateParallel n c3) (genericReplicateParallel n c4)
{-# INLINABLE genericReplicateParallel #-}
strictTuple5 :: a1 -> a2 -> a3 -> a4 -> a5 -> (a1, a2, a3, a4, a5)
strictTuple5 a1 a2 a3 a4 a5 =
a1 `seq` a2 `seq` a3 `seq` a4 `seq` a5 `seq` (a1, a2, a3, a4, a5)
instance (Cost c1, Cost c2, Cost c3, Cost c4, Cost c5) => Cost (c1, c2, c3, c4, c5) where
empty =
strictTuple5 empty empty empty empty empty
{-# INLINABLE empty #-}
newWire =
strictTuple5 newWire newWire newWire newWire newWire
{-# INLINABLE newWire #-}
gate key =
strictTuple5 (gate key) (gate key) (gate key) (gate key) (gate key)
{-# INLINABLE gate #-}
sequential (c1, c2, c3, c4, c5) (c1', c2', c3', c4', c5') =
strictTuple5 (sequential c1 c1') (sequential c2 c2') (sequential c3 c3') (sequential c4 c4') (sequential c5 c5')
{-# INLINABLE sequential #-}
parallel (c1, c2, c3, c4, c5) (c1', c2', c3', c4', c5') =
strictTuple5 (parallel c1 c1') (parallel c2 c2') (parallel c3 c3') (parallel c4 c4') (parallel c5 c5')
{-# INLINABLE parallel #-}
with (c1, c2, c3, c4, c5) (c1', c2', c3', c4', c5') =
strictTuple5 (with c1 c1') (with c2 c2') (with c3 c3') (with c4 c4') (with c5 c5')
{-# INLINABLE with #-}
adjoint (c1, c2, c3, c4, c5) =
strictTuple5 (adjoint c1) (adjoint c2) (adjoint c3) (adjoint c4) (adjoint c5)
{-# INLINABLE adjoint #-}
replicateSequential n (c1, c2, c3, c4, c5) =
strictTuple5 (replicateSequential n c1) (replicateSequential n c2) (replicateSequential n c3) (replicateSequential n c4) (replicateSequential n c5)
{-# INLINABLE replicateSequential #-}
genericReplicateSequential n (c1, c2, c3, c4, c5) =
strictTuple5 (genericReplicateSequential n c1) (genericReplicateSequential n c2) (genericReplicateSequential n c3) (genericReplicateSequential n c4) (genericReplicateSequential n c5)
{-# INLINABLE genericReplicateSequential #-}
replicateParallel n (c1, c2, c3, c4, c5) =
strictTuple5 (replicateParallel n c1) (replicateParallel n c2) (replicateParallel n c3) (replicateParallel n c4) (replicateParallel n c5)
{-# INLINABLE replicateParallel #-}
genericReplicateParallel n (c1, c2, c3, c4, c5) =
strictTuple5 (genericReplicateParallel n c1) (genericReplicateParallel n c2) (genericReplicateParallel n c3) (genericReplicateParallel n c4) (genericReplicateParallel n c5)
{-# INLINABLE genericReplicateParallel #-}
|
ti1024/hacq
|
src/Data/Quantum/Cost/Class.hs
|
bsd-3-clause
| 9,852
| 0
| 10
| 1,940
| 2,982
| 1,641
| 1,341
| 132
| 1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module IDP.AzureAD where
import Data.Aeson
import Data.Default
import Data.Text.Lazy (Text)
import GHC.Generics
import Network.OAuth.OAuth2
import Types
import URI.ByteString.QQ
data AzureAD = AzureAD deriving (Eq, Show)
type instance IDPUserInfo AzureAD = AzureADUser
type instance IDPName AzureAD = AzureAD
azureIdp :: IDP AzureAD
azureIdp =
def
{ idpName = AzureAD,
oauth2Config = azureADKey,
oauth2AuthorizeParams = [("resource", "https://graph.microsoft.com")],
oauth2UserInfoUri = [uri|https://graph.microsoft.com/v1.0/me|],
convertUserInfoToLoginUser = toLoginUser
}
azureADKey :: OAuth2
azureADKey =
def
{ oauth2AuthorizeEndpoint =
[uri|https://login.windows.net/common/oauth2/authorize|],
oauth2TokenEndpoint =
[uri|https://login.windows.net/common/oauth2/token|]
}
newtype AzureADUser = AzureADUser {mail :: Text} deriving (Show, Generic)
instance FromJSON AzureADUser where
parseJSON = genericParseJSON defaultOptions
toLoginUser :: AzureADUser -> LoginUser
toLoginUser ouser = LoginUser {loginUserName = mail ouser}
|
freizl/hoauth2
|
hoauth2-example/src/IDP/AzureAD.hs
|
bsd-3-clause
| 1,308
| 0
| 8
| 209
| 249
| 157
| 92
| 37
| 1
|
module Flat.Decoder.Run(strictDecoder,listTDecoder) where
import Foreign ( Ptr, plusPtr, withForeignPtr )
import qualified Data.ByteString as B
import ListT ( ListT(..) )
import qualified Data.ByteString.Internal as BS
import Control.Exception ( try, Exception )
import Flat.Decoder.Types
( tooMuchSpace, S(S), GetResult(..), Get(runGet), DecodeException )
import System.IO.Unsafe ( unsafePerformIO )
import Flat.Decoder.Prim ( dBool )
-- | Given a decoder and an input buffer returns either the decoded value or an error (if the input buffer is not fully consumed)
strictDecoder :: Get a -> B.ByteString -> Either DecodeException a
strictDecoder get bs =
strictDecoder_ get bs $ \(GetResult s'@(S ptr' o') a) endPtr ->
if ptr' /= endPtr || o' /= 0
then tooMuchSpace endPtr s'
else return a
strictDecoder_ ::
Exception e
=> Get a1
-> BS.ByteString
-> (GetResult a1 -> Ptr b -> IO a)
-> Either e a
strictDecoder_ get (BS.PS base off len) check =
unsafePerformIO . try $
withForeignPtr base $ \base0 ->
let ptr = base0 `plusPtr` off
endPtr = ptr `plusPtr` len
in do res <- runGet get endPtr (S ptr 0)
check res endPtr
{-# NOINLINE strictDecoder_ #-}
-- strictRawDecoder :: Exception e => Get t -> B.ByteString -> Either e (t,B.ByteString, NumBits)
-- strictRawDecoder get (BS.PS base off len) = unsafePerformIO . try $
-- withForeignPtr base $ \base0 ->
-- let ptr = base0 `plusPtr` off
-- endPtr = ptr `plusPtr` len
-- in do
-- GetResult (S ptr' o') a <- runGet get endPtr (S ptr 0)
-- return (a, BS.PS base (ptr' `minusPtr` base0) (endPtr `minusPtr` ptr'), o')
listTDecoder :: Get a -> BS.ByteString -> IO (ListT IO a)
listTDecoder get (BS.PS base off len) =
withForeignPtr base $ \base0 -> do
let ptr = base0 `plusPtr` off
endPtr = ptr `plusPtr` len
s = S ptr 0
go s = do
GetResult s' b <- runGet dBool endPtr s
if b
then do
GetResult s'' a <- runGet get endPtr s'
return $ Just (a, ListT $ go s'')
else return Nothing
return $ ListT (go s)
|
tittoassini/flat
|
src/Flat/Decoder/Run.hs
|
bsd-3-clause
| 2,235
| 0
| 21
| 629
| 588
| 316
| 272
| 44
| 2
|
{-# LANGUAGE ForeignFunctionInterface #-}
-- |
-- Copyright : Anders Claesson 2013-2016
-- Maintainer : Anders Claesson <anders.claesson@gmail.com>
--
-- Common permutation statistics. To avoid name clashes this module is
-- best imported @qualified@; e.g.
--
-- > import qualified Sym.Perm.Stat as S
--
module Sym.Perm.Stat
(
asc -- ascents
, des -- descents
, exc -- excedances
, fp -- fixed points
, sfp -- strong fixed points
, cyc -- cycles
, inv -- inversions
, maj -- the major index
, comaj -- the co-major index
, peak -- peaks
, vall -- valleys
, dasc -- double ascents
, ddes -- double descents
, lmin -- left-to-right minima
, lmax -- left-to-right maxima
, rmin -- right-to-left minima
, rmax -- right-to-left maxima
, head -- the first element
, last -- the last element
, lir -- left-most increasing run
, ldr -- left-most decreasing run
, rir -- right-most increasing run
, rdr -- right-most decreasing run
, comp -- components
, scomp -- skew components
, ep -- rank a la Elizalde & Pak
, dim -- dimension
, asc0 -- small ascents
, des0 -- small descents
, lis -- longest increasing subsequence
, lds -- longest decreasing subsequence
-- , shad -- shadow
) where
import Prelude hiding (head, last)
import Sym.Perm
import qualified Sym.Perm.SSYT as Y
import qualified Sym.Perm.D8 as D8
import Foreign.Ptr
import Foreign.C.Types
import System.IO.Unsafe
marshal :: (Ptr CLong -> CLong -> CLong) -> Perm -> Int
marshal f w =
fromIntegral . unsafePerformIO . unsafeWith w $ \p ->
return $ f p (fromIntegral (size w))
{-# INLINE marshal #-}
foreign import ccall unsafe "stat.h asc" c_asc
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h des" c_des
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h exc" c_exc
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h fp" c_fp
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h sfp" c_sfp
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h cyc" c_cyc
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h inv" c_inv
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h maj" c_maj
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h comaj" c_comaj
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h peak" c_peak
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h vall" c_vall
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h dasc" c_dasc
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h ddes" c_ddes
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h lmin" c_lmin
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h lmax" c_lmax
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h lir" c_lir
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h ldr" c_ldr
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h comp" c_comp
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h ep" c_ep
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h dim" c_dim
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h asc0" c_asc0
:: Ptr CLong -> CLong -> CLong
foreign import ccall unsafe "stat.h des0" c_des0
:: Ptr CLong -> CLong -> CLong
-- | The number of ascents. An /ascent/ in @w@ is an index @i@ such
-- that @w[i] \< w[i+1]@.
asc :: Perm -> Int
asc = marshal c_asc
-- | The number of descents. A /descent/ in @w@ is an index @i@ such
-- that @w[i] > w[i+1]@.
des :: Perm -> Int
des = marshal c_des
-- | The number of /excedances/: positions @i@ such that @w[i] > i@.
exc :: Perm -> Int
exc = marshal c_exc
-- | The number of /fixed points/: positions @i@ such that @w[i] == i@.
fp :: Perm -> Int
fp = marshal c_fp
-- | The number of /strong fixed points/ (also called splitters):
-- positions @i@ such that @w[j] \< i@ for @j \< i@ and @w[j] \> i@ for @j \> i@.
sfp :: Perm -> Int
sfp = marshal c_sfp
-- | The number of /cycles/:
-- orbits of the permutation when viewed as a function.
cyc :: Perm -> Int
cyc = marshal c_cyc
-- | The number of /inversions/:
-- pairs @\(i,j\)@ such that @i \< j@ and @w[i] > w[j]@.
inv :: Perm -> Int
inv = marshal c_inv
-- | /The major index/ is the sum of descents.
maj :: Perm -> Int
maj = marshal c_maj
-- | /The co-major index/ is the sum of descents.
comaj :: Perm -> Int
comaj = marshal c_comaj
-- | The number of /peaks/:
-- positions @i@ such that @w[i-1] \< w[i]@ and @w[i] \> w[i+1]@.
peak :: Perm -> Int
peak = marshal c_peak
-- | The number of /valleys/:
-- positions @i@ such that @w[i-1] \> w[i]@ and @w[i] \< w[i+1]@.
vall :: Perm -> Int
vall = marshal c_vall
-- | The number of /double ascents/:
-- positions @i@ such that @w[i-1] \< w[i] \< w[i+1]@.
dasc :: Perm -> Int
dasc = marshal c_dasc
-- | The number of /double descents/:
-- positions @i@ such that @w[i-1] \> w[i] \> w[i+1]@.
ddes :: Perm -> Int
ddes = marshal c_ddes
-- | The number of /left-to-right minima/:
-- positions @i@ such that @w[i] \< w[j]@ for all @j \< i@.
lmin :: Perm -> Int
lmin = marshal c_lmin
-- | The number of /left-to-right maxima/:
-- positions @i@ such that @w[i] \> w[j]@ for all @j \< i@.
lmax :: Perm -> Int
lmax = marshal c_lmax
-- | The number of /right-to-left minima/:
-- positions @i@ such that @w[i] \< w[j]@ for all @j \> i@.
rmin :: Perm -> Int
rmin = lmin . D8.reverse
-- | The number of /right-to-left maxima/:
-- positions @i@ such that @w[i] \> w[j]@ for all @j \> i@.
rmax :: Perm -> Int
rmax = lmax . D8.reverse
-- | The first (left-most) element in the standardization. E.g.,
-- @head \"231\" = head (fromList [1,2,0]) = 1@.
head :: Perm -> Int
head w | size w > 0 = fromIntegral (w `unsafeAt` 0)
| otherwise = 0
-- | The last (right-most) element in the standardization. E.g.,
-- @last \"231\" = last (fromList [1,2,0]) = 0@.
last :: Perm -> Int
last w | size w > 0 = fromIntegral (w `unsafeAt` (size w - 1))
| otherwise = 0
-- | Length of the left-most increasing run: largest @i@ such that
-- @w[0] \< w[1] \< ... \< w[i-1]@.
lir :: Perm -> Int
lir = marshal c_lir
-- | Length of the left-most decreasing run: largest @i@ such that
-- @w[0] \> w[1] \> ... \> w[i-1]@.
ldr :: Perm -> Int
ldr = marshal c_ldr
-- | Length of the right-most increasing run: largest @i@ such that
-- @w[n-i] \< ... \< w[n-2] \< w[n-1]@.
rir :: Perm -> Int
rir = ldr . D8.reverse
-- | Length of the right-most decreasing run: largest @i@ such that
-- @w[n-i] \> ... \> w[n-2] \> w[n-1]@.
rdr :: Perm -> Int
rdr = lir . D8.reverse
-- | The number of components. E.g., @[2,0,3,1,4,6,7,5]@ has three
-- components: @[2,0,3,1]@, @[4]@ and @[6,7,5]@.
comp :: Perm -> Int
comp = marshal c_comp
-- | The number of skew components. E.g., @[5,7,4,6,3,1,0,2]@ has three
-- skew components: @[5,7,4,6]@, @[3]@ and @[1,0,2]@.
scomp :: Perm -> Int
scomp = comp . D8.complement
-- | The rank as defined by Elizalde and Pak [Bijections for
-- refined restricted permutations, /J. Comb. Theory, Ser. A/, 2004]:
--
-- > maximum [ k | k <- [0..n-1], w[i] >= k for all i < k ]
--
ep :: Perm -> Int
ep = marshal c_ep
-- | The dimension of a permutation is defined as the largest
-- non-fixed-point, or zero if all points are fixed.
dim :: Perm -> Int
dim = marshal c_dim
-- | The number of small ascents. A /small ascent/ in @w@ is an index
-- @i@ such that @w[i] + 1 == w[i+1]@.
asc0 :: Perm -> Int
asc0 = marshal c_asc0
-- | The number of small descents. A /small descent/ in @w@ is an
-- index @i@ such that @w[i] == w[i+1] + 1@.
des0 :: Perm -> Int
des0 = marshal c_des0
-- | The longest increasing subsequence.
lis :: Perm -> Int
lis w = case Y.shape (Y.fromPerm w) of
[] -> 0
(x:_) -> x
-- | The longest decreasing subsequence.
lds :: Perm -> Int
lds = length . Y.recordingTableau . Y.fromPerm
-- | The size of the shadow of @w@. That is, the number of different
-- one point deletions of @w@.
-- shad :: Perm -> Int
-- shad = length . shadow . return . st
|
akc/sym
|
Sym/Perm/Stat.hs
|
bsd-3-clause
| 8,514
| 0
| 12
| 2,103
| 1,555
| 885
| 670
| 156
| 2
|
module Numerus.Demo where
import Numerus
runbletch :: Int
runbletch = bletch 5
|
markhibberd/numerus
|
demo/Numerus/Demo.hs
|
bsd-3-clause
| 81
| 0
| 5
| 14
| 22
| 13
| 9
| 4
| 1
|
{-# language TypeFamilies #-}
{-# language FlexibleInstances #-}
{-# language FlexibleContexts #-}
{-# language MultiParamTypeClasses #-}
{-# language UndecidableInstances #-}
{-# language Rank2Types #-}
{-# language ScopedTypeVariables #-}
{-# language TypeOperators #-}
module Feldspar.Software.Frontend where
import Feldspar.Sugar
import Feldspar.Representation
import Feldspar.Frontend
import Feldspar.Array.Vector hiding (reverse, (++))
import Feldspar.Array.Buffered (ArraysSwap(..))
import Feldspar.Software.Primitive
import Feldspar.Software.Primitive.Backend ()
import Feldspar.Software.Expression
import Feldspar.Software.Representation
import Data.Struct
import Data.Bits (Bits, FiniteBits)
import Data.Complex
import Data.Constraint hiding (Sub)
import Data.Proxy
import Data.List (genericLength)
import Data.Word hiding (Word)
-- syntactic.
import Language.Syntactic (Syntactic(..))
import Language.Syntactic.Functional
import qualified Language.Syntactic as Syn
-- operational-higher.
import qualified Control.Monad.Operational.Higher as Oper
-- imperative-edsl.
import Language.Embedded.Imperative.Frontend.General hiding (Ref, Arr, IArr)
import qualified Language.Embedded.Imperative as Imp
import qualified Language.Embedded.Imperative.CMD as Imp
-- hardware-edsl.
import qualified Language.Embedded.Hardware.Command.CMD as Hard
-- hmm!
import Feldspar.Hardware.Primitive (HardwarePrimType(..), HardwarePrimTypeRep(..))
import Feldspar.Hardware.Expression (HType')
import Feldspar.Hardware.Frontend (HSig, withHType')
import Prelude hiding (length, Word, (<=), (<), (>=), (>))
import qualified Prelude as P
--------------------------------------------------------------------------------
-- * Expressions.
--------------------------------------------------------------------------------
instance Value SExp
where
value = sugarSymSoftware . Lit
instance Share SExp
where
share = sugarSymSoftware (Let "")
instance Iterate SExp
where
loop = sugarSymSoftware ForLoop
instance Cond SExp
where
cond = sugarSymSoftware Cond
instance Equality SExp
where
(==) = sugarSymPrimSoftware Eq
instance Ordered SExp
where
(<) = sugarSymPrimSoftware Lt
(<=) = sugarSymPrimSoftware Lte
(>) = sugarSymPrimSoftware Gt
(>=) = sugarSymPrimSoftware Gte
instance Logical SExp
where
not = sugarSymPrimSoftware Not
(&&) = sugarSymPrimSoftware And
(||) = sugarSymPrimSoftware Or
instance Multiplicative SExp
where
mult = sugarSymPrimSoftware Mul
div = sugarSymPrimSoftware Div
mod = sugarSymPrimSoftware Mod
instance Bitwise SExp
where
complement = sugarSymPrimSoftware BitCompl
(.&.) = sugarSymPrimSoftware BitAnd
(.|.) = sugarSymPrimSoftware BitOr
xor = sugarSymPrimSoftware BitXor
sll = sugarSymPrimSoftware ShiftL
srl = sugarSymPrimSoftware ShiftR
rol = sugarSymPrimSoftware RotateL
ror = sugarSymPrimSoftware RotateR
instance Casting SExp
where
i2n = sugarSymPrimSoftware I2N
i2b = sugarSymPrimSoftware I2B
b2i = sugarSymPrimSoftware B2I
--------------------------------------------------------------------------------
instance (Bounded a, SType a) => Bounded (SExp a)
where
minBound = value minBound
maxBound = value maxBound
instance (Num a, SType' a) => Num (SExp a)
where
fromInteger = value . fromInteger
(+) = sugarSymPrimSoftware Add
(-) = sugarSymPrimSoftware Sub
(*) = sugarSymPrimSoftware Mul
negate = sugarSymPrimSoftware Neg
abs = error "todo: abs not implemeted for `SExp`"
signum = error "todo: signum not implemented for `SExp`"
instance (Fractional a, SType' a) => Fractional (SExp a)
where
fromRational = value . fromRational
(/) = sugarSymPrimSoftware FDiv
instance (Floating a, SType' a) => Floating (SExp a)
where
pi = sugarSymPrimSoftware Pi
exp = sugarSymPrimSoftware Exp
log = sugarSymPrimSoftware Log
sqrt = sugarSymPrimSoftware Sqrt
(**) = sugarSymPrimSoftware Pow
sin = sugarSymPrimSoftware Sin
cos = sugarSymPrimSoftware Cos
tan = sugarSymPrimSoftware Tan
asin = sugarSymPrimSoftware Asin
acos = sugarSymPrimSoftware Acos
atan = sugarSymPrimSoftware Atan
sinh = sugarSymPrimSoftware Sinh
cosh = sugarSymPrimSoftware Cosh
tanh = sugarSymPrimSoftware Tanh
asinh = sugarSymPrimSoftware Asinh
acosh = sugarSymPrimSoftware Acosh
atanh = sugarSymPrimSoftware Atanh
--------------------------------------------------------------------------------
complex :: (Num a, SType' a, SType' (Complex a)) =>
SExp a -> -- ^ Real
SExp a -> -- ^ Imaginary
SExp (Complex a)
complex = sugarSymPrimSoftware Complex
polar :: (Floating a, SType' a, SType' (Complex a)) =>
SExp a -> -- ^ Magnitude
SExp a -> -- ^ Phase
SExp (Complex a)
polar = sugarSymPrimSoftware Polar
real :: (SType' a, SType' (Complex a)) => SExp (Complex a) -> SExp a
real = sugarSymPrimSoftware Real
imaginary :: (SType' a, SType' (Complex a)) => SExp (Complex a) -> SExp a
imaginary = sugarSymPrimSoftware Imag
magnitude :: (RealFloat a, SType' a, SType' (Complex a)) => SExp (Complex a) -> SExp a
magnitude = sugarSymPrimSoftware Magnitude
phase :: (RealFloat a, SType' a, SType' (Complex a)) => SExp (Complex a) -> SExp a
phase = sugarSymPrimSoftware Phase
conjugate :: (RealFloat a, SType' a, SType' (Complex a)) => SExp (Complex a) -> SExp (Complex a)
conjugate = sugarSymPrimSoftware Conjugate
ilog2 :: (FiniteBits a, Integral a, SType' a) => SExp a -> SExp a
ilog2 a = snd $ P.foldr (\ffi vr -> share vr (step ffi)) (a,0) ffis
where
step (ff,i) (v,r) = share (b2i (v > fromInteger ff) .<<. value i) $ \shift ->
(v .>>. i2n shift, r .|. shift)
-- [(0x1, 0), (0x3, 1), (0xF, 2), (0xFF, 3), (0xFFFF, 4), ...]
ffis = (`P.zip` [0..])
$ P.takeWhile (P.<= (2 P.^ (bitSize a `P.div` 2) - 1 :: Integer))
$ P.map ((subtract 1) . (2 P.^) . (2 P.^))
$ [(0::Integer)..]
-- Based on: http://graphics.stanford.edu/~seander/bithacks.html#IntegerLog
--------------------------------------------------------------------------------
foreignImport
:: ( Syn.Signature sig
, fi ~ Syn.SmartFun dom sig
, sig ~ Syn.SmartSig fi
, dom ~ Syn.SmartSym fi
, dom ~ SoftwareDomain
, Syn.SyntacticN f fi
, Type SoftwarePrimType (Syn.DenResult sig)
)
=> String -> Denotation sig -> f
foreignImport str f = sugarSymSoftware (Construct str f)
--------------------------------------------------------------------------------
-- * Instructions.
--------------------------------------------------------------------------------
desugar :: (Syntactic a, Domain a ~ SoftwareDomain) => a -> SExp (Internal a)
desugar = SExp . Syn.desugar
sugar :: (Syntactic a, Domain a ~ SoftwareDomain) => SExp (Internal a) -> a
sugar = Syn.sugar . unSExp
resugar
:: ( Syntactic a
, Syntactic b
, Internal a ~ Internal b
, Domain a ~ SoftwareDomain
, Domain b ~ SoftwareDomain
)
=> a -> b
resugar = Syn.resugar
--------------------------------------------------------------------------------
instance References Software
where
type Reference Software = Ref
initRef = Software . fmap Ref . mapStructA (Imp.initRef) . resugar
newRef = Software . fmap Ref . mapStructA (const Imp.newRef) $ typeRep
getRef = Software . fmap resugar . mapStructA getRef' . unRef
setRef ref
= Software
. sequence_
. zipListStruct setRef' (unRef ref)
. resugar
unsafeFreezeRef
= Software
. fmap resugar
. mapStructA freezeRef'
. unRef
-- Imp.getRef specialized to software.
getRef' :: forall b . SoftwarePrimType b => Imp.Ref b -> Oper.Program SoftwareCMD (Oper.Param2 SExp SoftwarePrimType) (SExp b)
getRef' = withSType (Proxy :: Proxy b) Imp.getRef
-- Imp.setRef specialized to software.
setRef' :: forall b . SoftwarePrimType b => Imp.Ref b -> SExp b -> Oper.Program SoftwareCMD (Oper.Param2 SExp SoftwarePrimType) ()
setRef' = withSType (Proxy :: Proxy b) Imp.setRef
-- 'Imp.unsafeFreezeRef' specialized to software.
freezeRef' :: forall b . SoftwarePrimType b => Imp.Ref b -> Oper.Program SoftwareCMD (Oper.Param2 SExp SoftwarePrimType) (SExp b)
freezeRef' = withSType (Proxy :: Proxy b) Imp.unsafeFreezeRef
--------------------------------------------------------------------------------
instance Slicable SExp (Arr a)
where
slice from len (Arr o l arr) = Arr (o+from) len arr
instance Finite SExp (Arr a)
where
length = arrLength
instance Arrays Software
where
type Array Software = Arr
newArr len
= Software
$ fmap (Arr 0 len)
$ mapStructA (const (Imp.newArr len))
$ typeRep
initArr elems
= Software
$ fmap (Arr 0 len . Node)
$ Imp.constArr elems
where len = value $ genericLength elems
getArr arr ix
= Software
$ fmap resugar
$ mapStructA (flip getArr' (ix + arrOffset arr))
$ unArr arr
setArr arr ix a
= Software
$ sequence_
$ zipListStruct
(\a' arr' -> setArr' arr' (ix + arrOffset arr) a')
(resugar a)
$ unArr arr
copyArr arr brr
= Software
$ sequence_
$ zipListStruct (\a b ->
Imp.copyArr (a, arrOffset arr) (b, arrOffset brr) (length brr))
(unArr arr)
(unArr brr)
-- 'Imp.getArr' specialized to software.
getArr' :: forall b . SoftwarePrimType b
=> Imp.Arr Index b -> SExp Index
-> Oper.Program SoftwareCMD (Oper.Param2 SExp SoftwarePrimType) (SExp b)
getArr' = withSType (Proxy :: Proxy b) Imp.getArr
-- 'Imp.setArr' specialized to software.
setArr' :: forall b . SoftwarePrimType b
=> Imp.Arr Index b -> SExp Index -> SExp b
-> Oper.Program SoftwareCMD (Oper.Param2 SExp SoftwarePrimType) ()
setArr' = withSType (Proxy :: Proxy b) Imp.setArr
--------------------------------------------------------------------------------
instance Syntax SExp a => Indexed SExp (IArr a)
where
type ArrElem (IArr a) = a
(!) (IArr off len a) ix = resugar $ mapStruct index a
where
index :: forall b . SoftwarePrimType b => Imp.IArr Index b -> SExp b
index arr = sugarSymPrimSoftware
(GuardVal InternalAssertion "arrIndex: index out of bounds.")
(ix < len)
(sugarSymPrimSoftware (ArrIx arr) (ix + off) :: SExp b)
instance Slicable SExp (IArr a)
where
slice from len (IArr o l arr) = IArr (o+from) len arr
instance Finite SExp (IArr a)
where
length = iarrLength
instance IArrays Software
where
type IArray Software = IArr
unsafeFreezeArr arr
= Software
$ fmap (IArr (arrOffset arr) (length arr))
$ mapStructA (Imp.unsafeFreezeArr)
$ unArr arr
unsafeThawArr iarr
= Software
$ fmap (Arr (iarrOffset iarr) (length iarr))
$ mapStructA (Imp.unsafeThawArr)
$ unIArr iarr
--------------------------------------------------------------------------------
-- | Short-hand for software pull vectors.
type SPull a = Pull SExp a
-- | Short-hand for software push vectors.
type SPush a = Push Software a
-- | Short-hand for software manifest vectors.
type SManifest a = Manifest Software a
instance Syntax SExp (SExp a) => Pushy Software (IArr (SExp a)) (SExp a)
where
toPush iarr = toPush (M iarr :: Manifest Software (SExp a))
instance ViewManifest Software (IArr (SExp a)) (SExp a)
where
viewManifest = Just . M
instance Manifestable Software (IArr (SExp a)) (SExp a)
instance ArraysSwap Software
where
unsafeArrSwap arr brr = Software $ sequence_ $ zipListStruct Imp.unsafeSwapArr
(unArr arr)
(unArr brr)
--------------------------------------------------------------------------------
instance Control Software
where
iff c t f
= Software
$ Imp.iff (resugar c)
(unSoftware t)
(unSoftware f)
instance Loop Software
where
while c body
= Software
$ Imp.while
(fmap resugar $ unSoftware c)
(unSoftware body)
for lower step upper body
= Software
$ Imp.for
(resugar lower, step, Imp.Incl $ resugar upper)
(unSoftware . body . resugar)
instance Assert Software
where
assert = assertLabel $ UserAssertion ""
assertLabel :: AssertionLabel -> SExp Bool -> String -> Software ()
assertLabel lbl cond msg = Software $ Oper.singleInj $ Assert lbl cond msg
--------------------------------------------------------------------------------
-- ** Software instructions.
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- *** Assertions.
guard :: Syntax SExp a => SExp Bool -> String -> a -> a
guard = guardLabel $ UserAssertion ""
guardLabel :: Syntax SExp a => AssertionLabel -> SExp Bool -> String -> a -> a
guardLabel lbl cond msg = sugarSymSoftware (GuardVal lbl msg) cond
hint :: (Syntax SExp a, Syntax SExp b, Primitive SExp (Internal a))
=> a -- ^ Value to be used in invariant.
-> b -- ^ Result value.
-> b
hint x y = sugarSymSoftware HintVal x y
--------------------------------------------------------------------------------
-- *** File handling.
-- | Open a file.
fopen :: FilePath -> IOMode -> Software Handle
fopen file = Software . Imp.fopen file
-- | Close a file.
fclose :: Handle -> Software ()
fclose = Software . Imp.fclose
-- | Check for end of file.
feof :: Handle -> Software (SExp Bool)
feof = Software . Imp.feof
-- | Put a primitive value to a handle.
fput :: (Formattable a, SType' a)
=> Handle
-> String -- ^ Prefix.
-> SExp a -- ^ Expression to print.
-> String -- ^ Suffix.
-> Software ()
fput h pre e post = Software $ Imp.fput h pre e post
-- | Get a primitive value from a handle.
fget :: (Formattable a, SType' a) => Handle -> Software (SExp a)
fget = Software . Imp.fget
-- | Handle to \stdin\.
stdin :: Handle
stdin = Imp.stdin
-- | Handle to \stdout\.
stdout :: Handle
stdout = Imp.stdout
--------------------------------------------------------------------------------
-- *** Printing.
class PrintfType r
where
fprf :: Handle -> String -> [Imp.PrintfArg SExp SoftwarePrimType] -> r
instance (a ~ ()) => PrintfType (Software a)
where
fprf h form = Software . Oper.singleInj . Imp.FPrintf h form . reverse
instance (Formattable a, SType' a, PrintfType r) => PrintfType (SExp a -> r)
where
fprf h form as = \a -> fprf h form (Imp.PrintfArg a : as)
-- | Print to a handle. Accepts a variable number of arguments.
fprintf :: PrintfType r => Handle -> String -> r
fprintf h format = fprf h format []
-- | Print to @stdout@. Accepts a variable number of arguments.
printf :: PrintfType r => String -> r
printf = fprintf Imp.stdout
--------------------------------------------------------------------------------
-- *** Memory.
-- | Software argument specialized to software primitives.
type SArg = Argument SoftwarePrimType
-- | Establish a memory-mapping to a hardware signature.
mmap :: String -> HSig a -> Software (Address a)
mmap address sig =
do pointer <- Software $ Oper.singleInj $ MMap address sig
return $ Address pointer sig
-- | Call a memory-mapped component.
call :: Address a -> SArg (Soften a) -> Software ()
call address arg = Software $ Oper.singleInj $ Call address arg
-- | ...
nil :: SArg ()
nil = Nil
-- | ...
(>:) :: forall a b . (SType' a, HType' a, Integral a)
=> Ref (SExp a) -> SArg b -> SArg (Ref (SExp a) -> b)
(>:) = withHType' (Proxy :: Proxy a) ARef
(>.) :: forall a b . (SType' a, HType' a, Integral a)
=> SExp a -> SArg b -> SArg (Ref (SExp a) -> b)
(>.) v = undefined
-- | ...
(>>:) :: forall a b . (SType' a, HType' a, Integral a)
=> Arr (SExp a) -> SArg b -> SArg (Arr (SExp a) -> b)
(>>:) = withHType' (Proxy :: Proxy a) AArr
(>>.) :: forall a b . (SType' a, HType' a, Integral a)
=> IArr (SExp a) -> SArg b -> SArg (Arr (SExp a) -> b)
(>>.) = undefined
infixr 1 >:, >>:
--------------------------------------------------------------------------------
-- *** C specific.
-- | Create a null pointer
newPtr :: SType' a => Software (Ptr a)
newPtr = newNamedPtr "p"
-- | Create a named null pointer
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedPtr :: SType' a => String -> Software (Ptr a)
newNamedPtr = Software . Imp.newNamedPtr
-- | Cast a pointer to an array
ptrToArr :: SType' a => Ptr a -> SExp Length -> Software (Arr (SExp a))
ptrToArr ptr len = fmap (Arr 0 len . Node) $ Software $ Imp.ptrToArr ptr
-- | Create a pointer to an abstract object. The only thing one can do with such
-- objects is to pass them to 'callFun' or 'callProc'.
newObject
:: String -- ^ Object type
-> Bool -- ^ Pointed?
-> Software Object
newObject = newNamedObject "obj"
-- | Create a pointer to an abstract object. The only thing one can do with such
-- objects is to pass them to 'callFun' or 'callProc'.
--
-- The provided base name may be appended with a unique identifier to avoid name
-- collisions.
newNamedObject
:: String -- ^ Base name
-> String -- ^ Object type
-> Bool -- ^ Pointed?
-> Software Object
newNamedObject base t p = Software $ Imp.newNamedObject base t p
-- | Add an @#include@ statement to the generated code
addInclude :: String -> Software ()
addInclude = Software . Imp.addInclude
-- | Add a global definition to the generated code
--
-- Can be used conveniently as follows:
--
-- > {-# LANGUAGE QuasiQuotes #-}
-- >
-- > import Feldspar.IO
-- >
-- > prog = do
-- > ...
-- > addDefinition myCFunction
-- > ...
-- > where
-- > myCFunction = [cedecl|
-- > void my_C_function( ... )
-- > {
-- > // C code
-- > // goes here
-- > }
-- > |]
addDefinition :: Imp.Definition -> Software ()
addDefinition = Software . Imp.addDefinition
-- | Declare an external function
addExternFun :: SType' res
=> String -- ^ Function name
-> proxy res -- ^ Proxy for expression and result type
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments (only used to determine types)
-> Software ()
addExternFun fun res args = Software $ Imp.addExternFun fun res args
-- | Declare an external procedure
addExternProc
:: String -- ^ Procedure name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments (only used to determine types)
-> Software ()
addExternProc proc args = Software $ Imp.addExternProc proc args
-- | Call a function
callFun :: SType' a
=> String -- ^ Function name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments
-> Software (SExp a)
callFun fun as = Software $ Imp.callFun fun as
-- | Call a procedure
callProc
:: String -- ^ Function name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments
-> Software ()
callProc fun as = Software $ Imp.callProc fun as
-- | Call a procedure and assign its result
callProcAssign :: Assignable obj
=> obj -- ^ Object to which the result should be assigned
-> String -- ^ Procedure name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments
-> Software ()
callProcAssign obj fun as = Software $ Imp.callProcAssign obj fun as
-- | Declare and call an external function
externFun :: SType' res
=> String -- ^ Procedure name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments
-> Software (SExp res)
externFun fun args = Software $ Imp.externFun fun args
-- | Declare and call an external procedure
externProc
:: String -- ^ Procedure name
-> [FunArg SExp SoftwarePrimType]
-- ^ Arguments
-> Software ()
externProc proc args = Software $ Imp.externProc proc args
-- | Generate code into another translation unit
inModule :: String -> Software () -> Software ()
inModule mod = Software . Imp.inModule mod . unSoftware
-- | Get current time as number of seconds passed today
getTime :: Software (SExp Double)
getTime = Software Imp.getTime
-- | Constant string argument
strArg :: String -> FunArg SExp SoftwarePrimType
strArg = Imp.strArg
-- | Value argument
valArg :: SoftwarePrimType a => SExp a -> FunArg SExp SoftwarePrimType
valArg = Imp.valArg
-- | Reference argument
refArg :: SoftwarePrimType (Internal a) => Ref a -> FunArg SExp SoftwarePrimType
refArg (Ref r) = Imp.refArg (extractNode r)
-- | Mutable array argument
arrArg :: SoftwarePrimType (Internal a) => Arr a -> FunArg SExp SoftwarePrimType
arrArg (Arr o _ a) = Imp.offset (Imp.arrArg (extractNode a)) o
-- | Immutable array argument
iarrArg :: SoftwarePrimType (Internal a) => IArr a -> FunArg SExp SoftwarePrimType
iarrArg (IArr o _ a) = Imp.offset (Imp.iarrArg (extractNode a)) o
-- | Abstract object argument
objArg :: Object -> FunArg SExp SoftwarePrimType
objArg = Imp.objArg
-- | Named constant argument
constArg
:: String -- ^ Type
-> String -- ^ Named constant
-> FunArg SExp SoftwarePrimType
constArg = Imp.constArg
-- | Modifier that takes the address of another argument
addr :: FunArg SExp SoftwarePrimType -> FunArg SExp SoftwarePrimType
addr = Imp.addr
-- | Modifier that dereferences another argument
deref :: FunArg SExp SoftwarePrimType -> FunArg SExp SoftwarePrimType
deref = Imp.deref
--------------------------------------------------------------------------------
--
--------------------------------------------------------------------------------
-- Swap an `Imp.FreePred` constraint with a `SoftwarePrimType` one.
withSType :: forall a b . Proxy a
-> (Imp.FreePred SExp a => b)
-> (SoftwarePrimType a => b)
withSType _ f = case predicateDict (softwareRep :: SoftwarePrimTypeRep a) of
Dict -> f
-- Proves that a type from `SoftwarePrimTypeRep` satisfies `Imp.FreePred`.
predicateDict :: SoftwarePrimTypeRep a -> Dict (Imp.FreePred SExp a)
predicateDict rep = case rep of
BoolST -> Dict
Int8ST -> Dict
Int16ST -> Dict
Int32ST -> Dict
Int64ST -> Dict
Word8ST -> Dict
Word16ST -> Dict
Word32ST -> Dict
Word64ST -> Dict
FloatST -> Dict
ComplexFloatST -> Dict
ComplexDoubleST -> Dict
--------------------------------------------------------------------------------
|
markus-git/co-feldspar
|
src/Feldspar/Software/Frontend.hs
|
bsd-3-clause
| 22,407
| 0
| 17
| 4,764
| 6,202
| 3,258
| 2,944
| 439
| 12
|
-- vim: encoding=latin1
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
-- | Instances for tuples of fields up to a 10-tuple. This allows
-- accessing several fields simultaneously.
--
-- > r.#(field1, field2, field3#field4) =: (value1, value2, value3)
--
-- In addition, the pair instance is recursively defined, which allows
-- stuff like
--
-- > import Control.Arrow ((***))
-- > r.#(field1, (field2, field3)) =~ (f *** g *** h)
--
module Data.Record.Field.Tuple
(
) where
import Data.Record.Field.Basic
import Data.Record.Field.Combinators
import Data.Record.Label hiding ((=:))
{- Commented out to remove the dependency to pretty.
import Text.PrettyPrint hiding (int)
import qualified Text.PrettyPrint as PP
-}
instance (Field f, r ~ Src f) => Field (r :-> a, f) where
type Src (r :-> a, f) = r
type Dst (r :-> a, f) = (a, Dst f)
field (l1, f) = lens get set
where get r = (getL l1 r, getL l2 r)
set (a, b) = setL l2 b . setL l1 a
l2 = field f
{- Commented out to remove the dependency to pretty.
mkTupleFieldInstance :: Int -> String
mkTupleFieldInstance n = render inst
where inst = header $+$ nest 4 defs
header = text "instance Field" <+> typ <+> text "where"
typ = tupleOf [ text "r :->" <+> v | v <- vs ]
vals = tupleOf vs
defs = vcat [rec, val, field, accs]
tupleOf = parens . commaSep
commaSep = sep . punctuate (text ",")
rs = [ text "r" <> PP.int i | i <- [1..n] ]
vs = take n $ [ text [v] | v <- ['a'..'z'] ] ++
[ text [v1,v2] | v1 <- ['a'..'z']
, v2 <- ['a'..'z'] ]
rec = text "type Src" <+> typ <+> text "= r"
val = text "type Dst" <+> typ <+> text "=" <+> vals
field = text "field" <+> tupleOf rs <+> text "= lens get set"
accs = nest 4 $ text "where" <+> vcat [getter, setter]
getter = text "get r =" <+> tupleOf [ get r | r <- rs ]
setter = text "set" <+> vals <+> text "=" <+>
(sep . punctuate (text " .")) [ set r v | (r,v) <- zip rs vs ]
get r = text "getL" <+> r <+> text "r"
set r v = text "setL" <+> r <+> v
-}
instance Field (r :-> a, r :-> b, r :-> c) where
type Src (r :-> a, r :-> b, r :-> c) = r
type Dst (r :-> a, r :-> b, r :-> c) = (a, b, c)
field (r1, r2, r3) = lens get set
where get r = (getL r1 r, getL r2 r, getL r3 r)
set (a, b, c) = setL r1 a . setL r2 b . setL r3 c
instance Field (r :-> a, r :-> b, r :-> c, r :-> d) where
type Src (r :-> a, r :-> b, r :-> c, r :-> d) = r
type Dst (r :-> a, r :-> b, r :-> c, r :-> d) = (a, b, c, d)
field (r1, r2, r3, r4) = lens get set
where get r = (getL r1 r, getL r2 r, getL r3 r, getL r4 r)
set (a, b, c, d) = setL r1 a . setL r2 b . setL r3 c . setL r4 d
instance Field (r :-> a, r :-> b, r :-> c, r :-> d, r :-> e) where
type Src (r :-> a, r :-> b, r :-> c, r :-> d, r :-> e) = r
type Dst (r :-> a, r :-> b, r :-> c, r :-> d, r :-> e) = (a,
b,
c,
d,
e)
field (r1, r2, r3, r4, r5) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r)
set (a, b, c, d, e) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e
instance Field (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f) where
type Src (r :-> a, r :-> b, r :-> c, r :-> d, r :-> e, r :-> f) = r
type Dst (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f) = (a, b, c, d, e, f)
field (r1, r2, r3, r4, r5, r6) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r,
getL r6 r)
set (a, b, c, d, e, f) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e .
setL r6 f
instance Field (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g) where
type Src (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g) = r
type Dst (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g) = (a, b, c, d, e, f, g)
field (r1, r2, r3, r4, r5, r6, r7) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r,
getL r6 r,
getL r7 r)
set (a, b, c, d, e, f, g) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e .
setL r6 f .
setL r7 g
instance Field (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h) where
type Src (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h) = r
type Dst (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h) = (a, b, c, d, e, f, g, h)
field (r1, r2, r3, r4, r5, r6, r7, r8) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r,
getL r6 r,
getL r7 r,
getL r8 r)
set (a, b, c, d, e, f, g, h) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e .
setL r6 f .
setL r7 g .
setL r8 h
instance Field (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i) where
type Src (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i) = r
type Dst (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i) = (a, b, c, d, e, f, g, h, i)
field (r1, r2, r3, r4, r5, r6, r7, r8, r9) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r,
getL r6 r,
getL r7 r,
getL r8 r,
getL r9 r)
set (a, b, c, d, e, f, g, h, i) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e .
setL r6 f .
setL r7 g .
setL r8 h .
setL r9 i
instance Field (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i,
r :-> j) where
type Src (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i,
r :-> j) = r
type Dst (r :-> a,
r :-> b,
r :-> c,
r :-> d,
r :-> e,
r :-> f,
r :-> g,
r :-> h,
r :-> i,
r :-> j) = (a, b, c, d, e, f, g, h, i, j)
field (r1, r2, r3, r4, r5, r6, r7, r8, r9, r10) = lens get set
where get r = (getL r1 r,
getL r2 r,
getL r3 r,
getL r4 r,
getL r5 r,
getL r6 r,
getL r7 r,
getL r8 r,
getL r9 r,
getL r10 r)
set (a, b, c, d, e, f, g, h, i, j) = setL r1 a .
setL r2 b .
setL r3 c .
setL r4 d .
setL r5 e .
setL r6 f .
setL r7 g .
setL r8 h .
setL r9 i .
setL r10 j
|
AstraFIN/fields
|
Data/Record/Field/Tuple.hs
|
bsd-3-clause
| 10,886
| 0
| 17
| 6,528
| 3,092
| 1,703
| 1,389
| 247
| 0
|
module Htrans.Logger (
-- funcs
setAppLogger,
logStartAppDebug,
logStopAppDebug,
logConfigDebug,
logInOutInfo
) where
import System.Log.Logger
import System.Log.Handler.Simple (fileHandler)
import System.Log.Handler (setFormatter, LogHandler)
import System.Log.Formatter (simpleLogFormatter)
import qualified Data.Text as T
import Htrans.Types (LogLevel(..))
import Htrans.Cli (appName)
setAppLogger :: FilePath -> LogLevel -> IO ()
setAppLogger logPath priority = do
fHandler <- fileHandler logPath (getPriority priority)
let cFormatter = setCustomFormatter fHandler
updateGlobalLogger appName (addHandler cFormatter . setLevel (getPriority priority))
setCustomFormatter :: System.Log.Handler.LogHandler a => a -> a
setCustomFormatter h = setFormatter h f
where f = simpleLogFormatter "[$time : $prio] : $msg "
logStartAppDebug :: IO ()
logStartAppDebug = debugM appName "---- Start translation! ----"
logConfigDebug :: Show a => a -> IO ()
logConfigDebug cfg = debugM appName ("Get configuration:" ++ show cfg)
logInOutInfo :: Maybe T.Text -> Maybe T.Text -> IO ()
logInOutInfo input output = infoM appName $ "input:" ++ showTxt input ++
" output:" ++ showTxt output
where showTxt = maybe noText T.unpack
noText = "No text"
logStopAppDebug :: IO ()
logStopAppDebug = debugM appName "---- Stop translation! -----"
getPriority :: LogLevel -> Priority
getPriority level = case level of
ERR -> ERROR
INF -> INFO
DEB -> DEBUG
_ -> EMERGENCY
|
johhy/htrans
|
src/Htrans/Logger.hs
|
bsd-3-clause
| 1,532
| 0
| 12
| 288
| 438
| 229
| 209
| 38
| 4
|
-- |
-- Module : Crypto.PubKey.Curve25519
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Curve25519 support
--
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Crypto.PubKey.Curve25519
( SecretKey
, PublicKey
, DhSecret
-- * Smart constructors
, dhSecret
, publicKey
, secretKey
-- * Methods
, dh
, toPublic
, generateSecretKey
) where
import Data.Bits
import Data.Word
import Foreign.Ptr
import Foreign.Storable
import GHC.Ptr
import Crypto.Error
import Crypto.Internal.Compat
import Crypto.Internal.Imports
import Crypto.Internal.ByteArray (ByteArrayAccess, ByteArray, ScrubbedBytes, Bytes, withByteArray)
import qualified Crypto.Internal.ByteArray as B
import Crypto.Error (CryptoFailable(..))
import Crypto.Random
-- | A Curve25519 Secret key
newtype SecretKey = SecretKey ScrubbedBytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | A Curve25519 public key
newtype PublicKey = PublicKey Bytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | A Curve25519 Diffie Hellman secret related to a
-- public key and a secret key.
newtype DhSecret = DhSecret ScrubbedBytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | Try to build a public key from a bytearray
publicKey :: ByteArrayAccess bs => bs -> CryptoFailable PublicKey
publicKey bs
| B.length bs == 32 = CryptoPassed $ PublicKey $ B.copyAndFreeze bs (\_ -> return ())
| otherwise = CryptoFailed CryptoError_PublicKeySizeInvalid
-- | Try to build a secret key from a bytearray
secretKey :: ByteArrayAccess bs => bs -> CryptoFailable SecretKey
secretKey bs
| B.length bs == 32 = unsafeDoIO $ do
withByteArray bs $ \inp -> do
valid <- isValidPtr inp
if valid
then (CryptoPassed . SecretKey) <$> B.copy bs (\_ -> return ())
else return $ CryptoFailed CryptoError_SecretKeyStructureInvalid
| otherwise = CryptoFailed CryptoError_SecretKeySizeInvalid
where
-- e[0] &= 0xf8;
-- e[31] &= 0x7f;
-- e[31] |= 40;
isValidPtr :: Ptr Word8 -> IO Bool
isValidPtr _ = do
--b0 <- peekElemOff inp 0
--b31 <- peekElemOff inp 31
return True
{-
return $ and [ testBit b0 0 == False
, testBit b0 1 == False
, testBit b0 2 == False
, testBit b31 7 == False
, testBit b31 6 == True
]
-}
{-# NOINLINE secretKey #-}
-- | Create a DhSecret from a bytearray object
dhSecret :: ByteArrayAccess b => b -> CryptoFailable DhSecret
dhSecret bs
| B.length bs == 32 = CryptoPassed $ DhSecret $ B.copyAndFreeze bs (\_ -> return ())
| otherwise = CryptoFailed CryptoError_SharedSecretSizeInvalid
-- | Compute the Diffie Hellman secret from a public key and a secret key.
--
-- This implementation may return an all-zero value as it does not check for
-- the condition.
dh :: PublicKey -> SecretKey -> DhSecret
dh (PublicKey pub) (SecretKey sec) = DhSecret <$>
B.allocAndFreeze 32 $ \result ->
withByteArray sec $ \psec ->
withByteArray pub $ \ppub ->
ccryptonite_curve25519 result psec ppub
{-# NOINLINE dh #-}
-- | Create a public key from a secret key
toPublic :: SecretKey -> PublicKey
toPublic (SecretKey sec) = PublicKey <$>
B.allocAndFreeze 32 $ \result ->
withByteArray sec $ \psec ->
ccryptonite_curve25519 result psec basePoint
where
basePoint = Ptr "\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
{-# NOINLINE toPublic #-}
-- | Generate a secret key.
generateSecretKey :: MonadRandom m => m SecretKey
generateSecretKey = tweakToSecretKey <$> getRandomBytes 32
where
tweakToSecretKey :: ScrubbedBytes -> SecretKey
tweakToSecretKey bin = SecretKey $ B.copyAndFreeze bin $ \inp -> do
modifyByte inp 0 (\e0 -> e0 .&. 0xf8)
modifyByte inp 31 (\e31 -> (e31 .&. 0x7f) .|. 0x40)
modifyByte :: Ptr Word8 -> Int -> (Word8 -> Word8) -> IO ()
modifyByte p n f = peekByteOff p n >>= pokeByteOff p n . f
foreign import ccall "cryptonite_curve25519_donna"
ccryptonite_curve25519 :: Ptr Word8 -- ^ public
-> Ptr Word8 -- ^ secret
-> Ptr Word8 -- ^ basepoint
-> IO ()
|
tekul/cryptonite
|
Crypto/PubKey/Curve25519.hs
|
bsd-3-clause
| 4,742
| 0
| 19
| 1,326
| 945
| 503
| 442
| 79
| 2
|
{-# LANGUAGE UnicodeSyntax, OverloadedStrings, TemplateHaskell, QuasiQuotes, TypeFamilies #-}
module ExampleTypes where
import Control.Monad
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Massive.Database.MongoDB
import System.Locale
import Text.Printf
data User = User { userName ∷ Text
, userAlias ∷ Text
, userAge :: Double
, userCreatedAt ∷ UTCTime
}
asMongoEntity ''User useDefaults
prettyUser ∷ Entity User → IO ()
prettyUser (Entity userId user) = do
void $ printf "%s:\n" (show userId)
void $ printf " User Name : %s\n" (T.unpack (userName user))
void $ printf " User Age : %f\n" (userAge user)
void $ printf " User Alias: %s\n" (T.unpack (userAlias user))
void $ printf " Created At: %s\n\n" (formatTime defaultTimeLocale "%F %T" (userCreatedAt user))
return ()
|
HalfWayMan/mt-mongodb
|
examples/ExampleTypes.hs
|
bsd-3-clause
| 944
| 0
| 12
| 248
| 249
| 129
| 120
| 22
| 1
|
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving #-}
module Clay.Text
(
-- * Letter and word-spacing.
letterSpacing
, wordSpacing
-- * Text-rendering.
, TextRendering
, textRendering
, optimizeSpeed, optimizeLegibility, geometricPrecision
-- * Text-shadow.
, textShadow
-- * Text-indent.
, TextIndent
, textIndent
, eachLine, hanging
, indent
-- * Text-direction.
, TextDirection
, direction
, ltr
, rtl
-- * Text-align.
, TextAlign
, textAlign
, justify, matchParent, start, end
, alignSide
, alignString
-- * White-space.
, WhiteSpace
, whiteSpace
, pre, nowrap, preWrap, preLine
-- * Text-decoration.
, TextDecoration
, textDecoration
, textDecorationStyle
, textDecorationLine
, textDecorationColor
, underline, overline, lineThrough, blink
-- * Text-transform.
, TextTransform
, textTransform
, capitalize, uppercase, lowercase, fullWidth
-- * Content.
, Content
, content
, contents
, attrContent
, stringContent
, uriContent
, openQuote, closeQuote, noOpenQuote, noCloseQuote
)
where
import Data.Monoid
import Data.String
import Data.Text (Text)
import Clay.Background
import Clay.Border
import Clay.Color
import Clay.Common
import Clay.Property
import Clay.Stylesheet
import Clay.Size
-------------------------------------------------------------------------------
letterSpacing :: Size a -> Css
letterSpacing = key "letter-spacing"
wordSpacing :: Size a -> Css
wordSpacing = key "word-spacing"
-------------------------------------------------------------------------------
newtype TextRendering = TextRendering Value
deriving (Val, Auto, Inherit, Other)
optimizeSpeed, optimizeLegibility, geometricPrecision :: TextRendering
optimizeSpeed = TextRendering "optimizeSpeed"
optimizeLegibility = TextRendering "optimizeLegibility"
geometricPrecision = TextRendering "geometricPrecision"
textRendering :: TextRendering -> Css
textRendering = key "text-rendering"
-------------------------------------------------------------------------------
textShadow :: Size a -> Size a -> Size a -> Color -> Css
textShadow x y w c = key "text-shadow" (x ! y ! w ! c)
-------------------------------------------------------------------------------
newtype TextIndent = TextIndent Value
deriving (Val, Inherit, Other)
eachLine, hanging :: TextIndent
eachLine = TextIndent "each-line"
hanging = TextIndent "hanging"
indent :: Size a -> TextIndent
indent = TextIndent . value
textIndent :: TextIndent -> Css
textIndent = key "text-indent"
-------------------------------------------------------------------------------
newtype TextDirection = TextDirection Value
deriving (Val, Normal, Inherit, Other)
ltr :: TextDirection
ltr = TextDirection "ltr"
rtl :: TextDirection
rtl = TextDirection "rtl"
direction :: TextDirection -> Css
direction = key "direction"
-------------------------------------------------------------------------------
newtype TextAlign = TextAlign Value
deriving (Val, Normal, Inherit, Other)
justify, matchParent, start, end :: TextAlign
justify = TextAlign "justify"
matchParent = TextAlign "matchParent"
start = TextAlign "start"
end = TextAlign "end"
alignSide :: Side -> TextAlign
alignSide = TextAlign . value
alignString :: Char -> TextAlign
alignString = TextAlign . value . Literal . fromString . return
textAlign :: TextAlign -> Css
textAlign = key "text-align"
-------------------------------------------------------------------------------
newtype WhiteSpace = WhiteSpace Value
deriving (Val, Normal, Inherit, Other)
whiteSpace :: WhiteSpace -> Css
whiteSpace = key "whiteSpace"
pre, nowrap, preWrap, preLine :: WhiteSpace
pre = WhiteSpace "pre"
nowrap = WhiteSpace "nowrap"
preWrap = WhiteSpace "pre-wrap"
preLine = WhiteSpace "pre-line"
-------------------------------------------------------------------------------
newtype TextDecoration = TextDecoration Value
deriving (Val, None, Inherit, Other)
underline, overline, lineThrough, blink :: TextDecoration
underline = TextDecoration "underline"
overline = TextDecoration "overline"
lineThrough = TextDecoration "line-through"
blink = TextDecoration "blink"
textDecorationLine :: TextDecoration -> Css
textDecorationLine = key "text-decoration-line"
textDecorationColor :: Color -> Css
textDecorationColor = key "text-decoration-color"
textDecoration :: TextDecoration -> Css
textDecoration = key "text-decoration"
textDecorationStyle :: Stroke -> Css
textDecorationStyle = key "text-decoration-style"
-------------------------------------------------------------------------------
newtype TextTransform = TextTransform Value
deriving (Val, None, Inherit)
capitalize, uppercase, lowercase, fullWidth :: TextTransform
capitalize = TextTransform "capitalize"
uppercase = TextTransform "uppercase"
lowercase = TextTransform "lowercase"
fullWidth = TextTransform "full-width"
textTransform :: TextTransform -> Css
textTransform = key "text-transform"
-------------------------------------------------------------------------------
newtype Content = Content Value
deriving (Val, None, Normal, Inherit)
attrContent :: Text -> Content
attrContent a = Content ("attr(" <> value a <> ")")
stringContent :: Text -> Content
stringContent = Content . value . Literal
uriContent :: Text -> Content
uriContent u = Content ("uri(" <> value (Literal u) <> ")")
openQuote, closeQuote, noOpenQuote, noCloseQuote :: Content
openQuote = Content "open-quote"
closeQuote = Content "close-quote"
noOpenQuote = Content "no-open-quote"
noCloseQuote = Content "no-close-quote"
content :: Content -> Css
content = key "content"
contents :: [Content] -> Css
contents cs = key "content" (noCommas cs)
-- TODO: counters
|
bergmark/clay
|
src/Clay/Text.hs
|
bsd-3-clause
| 5,720
| 0
| 11
| 786
| 1,245
| 711
| 534
| 145
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
-- |
-- Module: $HEADER$
-- Description: Windows Named Pipes streaming API.
-- Copyright: (c) 2016, Ixperta Solutions s.r.o.
-- License: BSD3
--
-- Maintainer: Ixcom Core Team <ixcom-core@ixperta.com>
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- Windows Named Pipes streaming API.
module Data.Streaming.NamedPipes
(
-- * Common
AppDataPipe
, defaultReadBufferSize
-- * Server
, HasAfterBindPipe(afterBindPipeLens)
, HasPipeName(pipeNameLens)
, HasPipeMode(pipeModeLens)
, getAfterBindPipe
, getPipeMode
, getPipeName
, setAfterBindPipe
, setPipeMode
, setPipeName
, ServerSettingsPipe
, serverSettingsPipe
, runPipeServer
-- * Client
, HasPipePath(pipePathLens)
, getPipePath
, setPipePath
, ClientSettingsPipe
, clientSettingsPipe
, runPipeClient
-- * Re-exported
, appRead
, appWrite
, getReadBufferSize
, setReadBufferSize
)
where
import Control.Applicative (pure)
import Control.Exception (bracket, finally, mask, onException)
import Control.Concurrent (forkIO)
import Control.Monad (unless, void, when)
import Control.Monad.Loops (iterateM_)
import Data.Function (($), (.))
import System.IO (IO)
import Data.Streaming.Network
( appRead
, appWrite
, getReadBufferSize
, setReadBufferSize
)
import Data.Streaming.NamedPipes.Internal
( AppDataPipe
, ClientSettingsPipe
( ClientSettingsPipe
, clientPipePath
)
, HasAfterBindPipe(afterBindPipeLens)
, HasPipeMode(pipeModeLens)
, HasPipeName(pipeNameLens)
, HasPipePath(pipePathLens)
, ServerSettingsPipe
( ServerSettingsPipe
, serverAfterBindPipe
, serverPipeMode
, serverPipeName
, serverReadBufferSizePipe
)
, clientSettingsPipe
, defaultReadBufferSize
, getAfterBindPipe
, getPipeMode
, getPipeName
, getPipePath
, mkAppDataPipe
, serverSettingsPipe
, setAfterBindPipe
, setPipeMode
, setPipeName
, setPipePath
)
import System.Win32.NamedPipes
( PipeHandle
, bindPipe
, closePipe
, connectPipe
, disconnectPipe
, getPipe
, readPipe
, writePipe
)
-- | Run an server application function with the given settings. This function
-- will accept connections on a Named Pipe, and spawn a new thread for each
-- connection.
--
-- Example:
--
-- @
-- 'runPipeServer' ('serverSettingsPipe' pipeName) $ \\appData ->
-- -- -->8-- Server code.
-- @
runPipeServer :: ServerSettingsPipe -> (AppDataPipe -> IO ()) -> IO a
runPipeServer cfg@ServerSettingsPipe{..} app = do
pipe <- bindPipe'
serverAfterBindPipe pipe
iterateM_ connectAndServe pipe
where
connectAndServe :: PipeHandle -> IO PipeHandle
connectAndServe pipe =
-- It is important to close pipe only when exception is detected,
-- otherwise we would close an active handle.
--
-- On this level we can use plain closePipe (in onExceptionClose)
-- without disconnectPipe. If we have detected that client is connected
-- then we are necessarily inside connection handling thread (see
-- "serve" function).
(`onExceptionClose` pipe) $ do
haveClient <- connectPipe pipe
-- We must create (bind) a new pipe instance before we fork the
-- serving thread because otherwise that thread could close the
-- handle sooner than we get to call bindPipe' and the named pipe
-- would cease to exist for a short moment, confusing clients.
pipe' <- bindPipe'
-- Note, that when we are closing pipe', here, we expect that it
-- had no chance of being used, therefore, it is safe to use
-- closePipe (inside onExceptionClose) without involving
-- disconnectPipe.
when haveClient
$ serve pipe `onExceptionClose` pipe'
pure pipe'
where
onExceptionClose f p = f `onException` closePipe p
-- We are assuming that it is optimal to use same size of input/output
-- buffer as the read size when calling readPipe.
bindPipe' :: IO PipeHandle
bindPipe' = bindPipe serverReadBufferSizePipe serverPipeMode serverPipeName
-- We need to use closePipe' instead of closePipe, since AppDataPipe are
-- living in "app", which means that client is connected to a Named Pipe.
-- See closePipe' for details.
mkAppData :: PipeHandle -> AppDataPipe
mkAppData = mkAppDataPipe cfg readPipe writePipe closePipe'
-- Implementation of serve is based on how streaming-commons does it,
-- i.e. we have masked asynchronous exception during forkIO, and defered
-- their apperence to "app" evaluation.
serve :: PipeHandle -> IO ()
serve pipe = mask $ \restore ->
void . forkIO
$ restore (app (mkAppData pipe))
`finally` closePipe' pipe
-- When client is already connected to a named pipe, server has to
-- disconnect it, which forces client end of the named pipe to be closed,
-- before closing the server side end of a named pipe.
--
-- More can be found in MSDN documentation of DisconnectNamedPipe function:
-- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365166(v=vs.85).aspx
closePipe' :: PipeHandle -> IO ()
closePipe' pipe = do
isHandleInvalid <- disconnectPipe pipe
unless isHandleInvalid $ void (closePipe pipe)
-- Implementation of runPipeServer is inspired by streamings-common, and
-- Multithreaded Pipe Server example from MSDN:
-- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365588(v=vs.85).aspx
-- | Run a client application function by connecting to the specified server
-- via Named Pipe. Client function is evaluated in current thread.
--
-- Example:
--
-- @
-- 'runPipeClient' ('clientSettingsPipe' pipePath) $ \\appData ->
-- -- -->8-- Client code.
-- @
runPipeClient :: ClientSettingsPipe -> (AppDataPipe -> IO a) -> IO a
runPipeClient cfg@ClientSettingsPipe{..} app = withPipe $ app . mkAppData
where
withPipe = getPipe clientPipePath `bracket` closePipe
mkAppData = mkAppDataPipe cfg readPipe writePipe (void . closePipe)
-- Implementation of runPipeServer is inspired by streamings-common, and
-- Multithreaded Pipe Server example from MSDN:
-- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365592(v=vs.85).aspx
|
IxpertaSolutions/windows-named-pipes
|
src/Data/Streaming/NamedPipes.hs
|
bsd-3-clause
| 6,641
| 0
| 15
| 1,610
| 823
| 505
| 318
| 116
| 1
|
--------------------------------------------------------------------------------
-- | Provides an easy way to combine several items in a list. The applications
-- are obvious:
--
-- * A post list on a blog
--
-- * An image list in a gallery
--
-- * A sitemap
{-# LANGUAGE TupleSections #-}
module Hakyll.Web.Template.List
( applyTemplateList
, applyJoinTemplateList
, chronological
, recentFirst
) where
--------------------------------------------------------------------------------
import Control.Monad (liftM)
import Data.List (intersperse, sortBy)
import Data.Ord (comparing)
import System.Locale (defaultTimeLocale)
--------------------------------------------------------------------------------
import Hakyll.Core.Compiler
import Hakyll.Core.Item
import Hakyll.Core.Metadata
import Hakyll.Web.Template
import Hakyll.Web.Template.Context
--------------------------------------------------------------------------------
-- | Generate a string of a listing of pages, after applying a template to each
-- page.
applyTemplateList :: Template
-> Context a
-> [Item a]
-> Compiler String
applyTemplateList = applyJoinTemplateList ""
--------------------------------------------------------------------------------
-- | Join a listing of pages with a string in between, after applying a template
-- to each page.
applyJoinTemplateList :: String
-> Template
-> Context a
-> [Item a]
-> Compiler String
applyJoinTemplateList delimiter tpl context items = do
items' <- mapM (applyTemplate tpl context) items
return $ concat $ intersperse delimiter $ map itemBody items'
--------------------------------------------------------------------------------
-- | Sort pages chronologically. This function assumes that the pages have a
-- @year-month-day-title.extension@ naming scheme -- as is the convention in
-- Hakyll.
chronological :: MonadMetadata m => [Item a] -> m [Item a]
chronological =
sortByM $ getItemUTC defaultTimeLocale . itemIdentifier
where
sortByM :: (Monad m, Ord k) => (a -> m k) -> [a] -> m [a]
sortByM f xs = liftM (map fst . sortBy (comparing snd)) $
mapM (\x -> liftM (x,) (f x)) xs
--------------------------------------------------------------------------------
-- | The reverse of 'chronological'
recentFirst :: (MonadMetadata m, Functor m) => [Item a] -> m [Item a]
recentFirst = fmap reverse . chronological
|
bergmark/hakyll
|
src/Hakyll/Web/Template/List.hs
|
bsd-3-clause
| 2,698
| 0
| 13
| 654
| 458
| 253
| 205
| 36
| 1
|
instance Functor (Either Int) where
fmap _ (Left n) = Left n
fmap f (Right r) = Right (f r)
|
elbrujohalcon/hPage
|
res/test/FlexibleInstances.hs
|
bsd-3-clause
| 103
| 0
| 8
| 31
| 59
| 28
| 31
| 3
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Control.Applicative
import Control.Monad
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Napm.Password
import Napm.Types
import Test.Hspec
import Test.QuickCheck
tuple :: (Arbitrary a, Arbitrary b) => Gen (a,b)
tuple = (,) <$> arbitrary <*> arbitrary
triple :: (Arbitrary a, Arbitrary b, Arbitrary c) => Gen (a,b,c)
triple = (,,) <$> arbitrary <*> arbitrary <*> arbitrary
main :: IO ()
main = hspec suite
suite :: Spec
suite = describe "computePassword" $ do
it "hashes a constant context to a consistent value" $
computePassword (PasswordLength 12) (Domain "c0nt3xt") (Passphrase "s33d") `shouldBe` "xwR3ziEmkGc7"
it "generates a password of the requested length" $
forAll triple $ \(l@(PasswordLength m),p,d) ->
T.length (computePassword l p d) === m
|
fractalcat/napm
|
tests/NapmTest.hs
|
mit
| 1,017
| 0
| 13
| 259
| 298
| 164
| 134
| 25
| 1
|
module E.Show(ePretty,render,prettyE) where
import Control.Monad.Identity
import Data.Char(chr)
import Data.Maybe
import Doc.DocLike
import Doc.PPrint
import Doc.Pretty
import E.E
import E.FreeVars()
import E.TypeCheck
import Name.Id
import Name.Name
import Name.Names
import Name.VConsts
import Options
import Support.FreeVars
import Support.Unparse
import Util.SetLike
import Util.VarName
import qualified Data.Map as Map
import qualified Doc.Chars as UC
import qualified FlagDump as FD
{-# NOINLINE render #-}
{-# NOINLINE ePretty #-}
{-# NOINLINE prettyE #-}
render :: Doc -> String
render doc = displayS (renderPretty 100.0 (optColumns options) doc) ""
prettyE :: E -> String
prettyE e = render $ ePretty e
instance DocLike d => PPrint d TVr where
pprint TVr { tvrIdent = i } = pprint i
instance PPrint Doc E where
pprint x = ePretty x
instance PPrint String E where
pprintAssoc a i x = render $ pprintAssoc a i x
instance PPrint String (Lit E E) where
pprintAssoc _ n x | n <= 9 = prettyE (ELit x)
| otherwise = parens (prettyE (ELit x))
newtype SEM a = SEM { _unSEM :: VarNameT E Id String Identity a }
deriving(Monad,Functor)
enumList = [
(tc_Bool_,["False#","True#"]),
(toName TypeConstructor ("Jhc.Order","Ordering#"),["LT#","EQ#","GT#"])
]
showLit ::
(a -> SEM (Unparse Doc)) -- ^ routine for showing the contents of constructor literals
-> Lit a E -- ^ the literal to show
-> SEM (Unparse Doc) -- ^ the final result
showLit showBind l = do
let f (LitInt i (ELit LitCons { litName = n })) | Just l <- lookup n enumList, i >= 0 && fromIntegral i < length l =
return $ atom $ ((text $ l !! (fromIntegral i)))
f (LitInt n (ELit LitCons { litName = t})) | t == tc_Char_ = return $ atom $ tshow (chr $ fromIntegral n) <> char '#'
f (LitInt i t) | dump FD.EVerbose = do
se <- showE t
return $ (atom (text $ show i) `inhabit` se )
f (LitInt i _) = return $ atom $ ((text $ show i))
f LitCons { litName = s, litArgs = es } | Just n <- fromTupname s , n == length es = do
es' <- mapM (fmap unparse . showBind) es
return $ atom $ tupled es'
f LitCons { litName = s, litArgs = es } | Just n <- fromUnboxedNameTuple s, n == length es = do
es' <- mapM (fmap unparse . showBind) es
return $ atom $ encloseSep (text "(# ") (text " #)") (text ", ") es'
f LitCons { litName = n, litArgs = [a,b] } | dc_Cons == n = do
a' <- showBind a
b' <- showBind b
return $ a' `cons` b'
f LitCons { litName = n, litArgs = [e] } | tc_List == n = do
e <- showBind e
return $ atom (char '[' <> unparse e <> char ']')
f LitCons { litName = n, litArgs = [] } | dc_EmptyList == n = return $ atom $ text "[]"
-- f LitCons { litName = n, litArgs = [] } | Just m <- getModule n, m `elem`[toModule "Jhc.Prim.Bits", toModule "Jhc.Prim.Word"] = return $ atom $ text "[]"
-- f LitCons { litName = ((tc_Addr_ ==) -> True), litType = ((eHash ==) -> True) } = return $ atom $ text "Addr_"
-- f LitCons { litName = ((tc_FunAddr_ ==) -> True), litType = ((eHash ==) -> True) } = return $ atom $ text "FunAddr_"
-- f LitCons { litName = ((tc_Char_ ==) -> True), litType = ((eHash ==) -> True) } = return $ atom $ text "Char_"
-- f LitCons { litName = n, litArgs = [v] }
-- f LitCons { litName = n, litArgs = [v] }
-- | n == dc_Integer = go "Integer#"
-- | n == dc_Int = go "Int#"
-- | n == dc_Char = go "Char#"
-- where go n = do
-- se <- showBind v
-- return $ atom (text n) `app` se
f LitCons { litName = s, litArgs = es, litType = t,
litAliasFor = Just af } | dump FD.EAlias = do
s <- return $ fromMaybe s (shortenName s)
es' <- mapM showBind es
se <- showE af
return $ foldl appCon (atom (tshow s <> char '@' <> parens (unparse se))) es' -- `inhabit` prettye t
f LitCons { litName = s, litArgs = es, litType = t } = do
s <- return $ fromMaybe s (shortenName s)
es' <- mapM showBind es
return $ foldl appCon (atom (tshow s)) es' -- `inhabit` prettye t
cons = bop (R,5) (text ":")
shortenName n = Map.lookup n shortName `mplus` (getModule n >>= mm) where
mm m = if m `elem` shortMods then return (toUnqualified n) else Nothing
shortMods = map toModule [ "Jhc.Prim.IO", "Jhc.Prim.Bits", "Jhc.Type.Word", "Jhc.Type.C" ]
f l
app = bop (L,100) (text " ")
appCon = bop (L,99) (text " ")
showI i = do
n <- SEM $ maybeLookupName i
case n of
Nothing -> pprint i
Just n -> text n
showTVr :: TVr -> SEM (Unparse Doc)
showTVr TVr { tvrIdent = i, tvrType = t, tvrInfo = nfo} = do
let si = if dump FD.EInfo then (<> tshow nfo) else id
ty <- showE t
ii <- showI i
return $ atom (si ii) `inhabit` ty
showTVr' TVr { tvrIdent = i} = do
ii <- showI i
return $ atom ii
allocTVr :: TVr -> SEM a -> SEM a
allocTVr _tvr action | dump FD.EVerbose = action
allocTVr tvr action | tvrIdent tvr == emptyId = action
allocTVr tvr (SEM action) | tvrType tvr == eStar = do
SEM $ subVarName $ newName (map (:[]) ['a' ..]) eStar (tvrIdent tvr) >> action
allocTVr tvr (SEM action) | tvrType tvr == eStar `tFunc` eStar = do
SEM $ subVarName $ newName (map (('f':) . show) [0::Int ..]) (tvrType tvr) (tvrIdent tvr) >> action
allocTVr tvr (SEM action) | not $ isJust (fromId (tvrIdent tvr)) = do
SEM $ subVarName $ newName (map (('v':) . show) [1::Int ..]) Unknown (tvrIdent tvr) >> action
allocTVr _ action = action
-- collects lambda and pi abstractions
collectAbstractions e0 = go e0 [] where
go e1@(EPi tvr e) xs | tvrIdent tvr == emptyId = done e1 xs
| not (sortKindLike (tvrType tvr)) = go e ((UC.pI, tvr, True) :xs)
| tvrType tvr /= eStar = go e ((UC.forall, tvr, True) :xs)
| dump FD.EVerbose || tvrIdent tvr `member` (freeVars e::IdSet)
= go e ((UC.forall, tvr, False):xs)
| otherwise = done e1 xs
go e1@(ELam tvr e) xs | tvrType tvr == eStar = go e ((UC.lAmbda, tvr, False):xs)
| sortKindLike (tvrType tvr) = go e ((UC.lAmbda, tvr, True) :xs)
| otherwise = go e ((UC.lambda, tvr, True) :xs)
go e xs = done e xs
done e xs = (reverse xs, e)
short_names = [
tc_Bool, tc_Char, tc_IO, tc_ACIO, tc_State_,
tc_RealWorld, tc_Ordering, tc_Bool_, tc_Ratio, tc_Float,
tc_Double, tc_Ptr, tc_FunPtr, tc_Integer, tc_Addr_,
tc_FunAddr_, tc_Char_, dc_Boolzh, dc_Char, dc_Integer,
tc_ST, tc_Bang_]
shortName = Map.fromList [ (x, toUnqualified x) | x <- short_names]
showE :: E -> SEM (Unparse Doc)
showE e = do
let f e | Just s <- E.E.toString e = return $ atom $ (text $ show s)
f e | Just xs <- eToList e = do
xs <- mapM (fmap unparse . showE) xs
return $ atom $ list xs
f e | e == tRational = return $ atom $ text "Rational"
f e | e == tString = return $ atom $ text "String"
f e | e == tUnit = return $ atom $ text "()"
f e | e == tWorld__ = return $ atom $ text "RealWorld_"
f e | e == vUnit = return $ atom $ text "()"
f (EAp a b) = liftM2 app (showE a) (showE b)
f (EPi (TVr { tvrIdent = eid, tvrType = e1}) e2) | eid == emptyId = liftM2 arr (showE e1) (showE e2)
f (EPi (TVr { tvrIdent = n, tvrType = e1}) e2) | not $ dump FD.EVerbose, not $ n `member` (freeVars e2 ::IdSet) = liftM2 arr (showE e1) (showE e2)
f e0 | (as@(_:_), e) <- collectAbstractions e0 =
foldr (\(_, tvr, _) -> allocTVr tvr)
(do tops <- mapM p as
e <- showE e
return (fixitize (N,1) $ atom $ group $ (align $ skipToNest <> fillCat tops) <$> unparse e))
as
where
p :: (Doc, TVr, Bool) -> SEM Doc
p (c,t,detailed) = do tvr <- if detailed then showTVr t else showTVr' t
return (c <> unparse tvr <> (char '.'))
f (EVar tvr) = if dump FD.EVerbose then showTVr tvr else showTVr' tvr
f Unknown = return $ symbol (char '?')
f (ESort s) = return $ symbol (tshow s)
f (ELit (LitCons { litName = n, litArgs = [ELit (LitInt i _)] })) | n == dc_Char = return $ atom $ tshow $ chr (fromIntegral i)
f (ELit l) = showLit showE l
f (EError "" t) = do
ty <- showE t
return $ atom $ angles (text "exitFailure" <> UC.coloncolon <> unparse ty)
f (EError s t) = do
ty <- showE t
return $ atom $ angles ( UC.bottom <> char ':' <> text s <> UC.coloncolon <> unparse ty)
f (EPrim s es t) = do
es' <- mapM showE es
t <- showE t
return $ atom $ angles $ unparse $ foldl app (atom (pprint s)) es' `inhabit` t
f ELetRec { eDefs = ds, eBody = e } = foldr (\(tvr,_) -> allocTVr tvr) (do
e <- fmap unparse $ showE e
ds <- mapM (fmap unparse . showDecl) ds
return $ fixitize (N,98) $ atom $ nest 2 (group ( keyword "let"
<$> (align $ sep (map (<> char ';') ds))
<$> (keyword "in")) </> e )) ds
f ec@(ECase { eCaseScrutinee = e, eCaseAlts = alts }) = mt (showE (eCaseType ec)) $ allocTVr (eCaseBind ec) $ do
scrut <- fmap unparse $ showE e
alts <- mapM showAlt alts
let ecb = eCaseBind ec
isUsed = tvrIdent ecb `member` (freeVars (caseBodies ec) :: IdSet)
db <- showTVr (if dump FD.EVerbose || isUsed then ecb else ecb { tvrIdent = emptyId })
dcase <- case (eCaseDefault ec) of
Nothing -> return []
Just e -> do
e <- showE e
return [unparse db <+> UC.rArrow </> unparse e]
let alts' = map (\a -> nest 2 (group (a <> char ';'))) (alts ++ dcase)
let mbind | isJust (eCaseDefault ec) = empty
| (isUsed && isNothing (eCaseDefault ec)) || dump FD.EVerbose = text " " <> (if isUsed then id else (char '_' <>)) (unparse db) <+> text "<-"
| otherwise = empty
return $ fixitize ((N,98)) $ atom $
group (nest 2 ( keyword "case" <> mbind <+> scrut <+> keyword "of" <$> (align $ vcat alts')) )
f _ = error "undefined value in E.Show"
showAlt (Alt l e) = foldr allocTVr ans (litBinds l) where
ans = do
l <- showLit showTVr l
e <- showE e
return $ unparse l <+> UC.rArrow </> unparse e
showDecl (t,e) = do
t <- subSEM $ showTVr t
e <- subSEM $ showE e
return $ atom $ nest 2 $ group $ unparse t <+> (char '=') </> unparse e
keyword x = text x
symbol x = atom x
arr = bop (R,0) $ (space <> UC.rArrow <> space)
mt t x | dump FD.EVerbose = do
t <- t
x <- x
return $ x `inhabit` t
mt _ x = x
f e
subSEM (SEM act) = SEM $ subVarName act
inhabit = bop (N,-2) $ UC.coloncolon
ePretty e = unparse pe where
(SEM pe') = showE e
Identity pe = runVarNameT pe'
-- skip to the current nesting level, breaking the line if already past it
skipToNest = column (\k ->
nesting (\i -> if k > i
then linebreak
else text (replicate (i-k) ' ')))
|
dec9ue/jhc_copygc
|
src/E/Show.hs
|
gpl-2.0
| 12,128
| 10
| 28
| 4,246
| 4,750
| 2,376
| 2,374
| -1
| -1
|
module XorCnfSimplify (simplify, Sub(..)) where
import XorCnf
import qualified Data.Set as Set
import Data.List
import Generic
data Sub = SubLit Var Lit
| SubDef Var Clause
| SubConst Var Bool deriving (Read,Show)
simplify :: Clause -> Maybe Clause
simplify (Or lits)
| isTautology = Nothing
| otherwise = Just $ Or lits
where
isTautology = True `elem` (map hasNegLit (Set.toList lits))
hasNegLit lit = neg lit `Set.member` lits
simplify (Xor parity vars)
| parity == Even && (Set.size vars == 0) = Nothing
| otherwise = Just (Xor parity vars)
|
a1880/xor
|
xcnf-preprocess/XorCnfSimplify.hs
|
gpl-3.0
| 623
| 0
| 12
| 169
| 229
| 121
| 108
| 17
| 1
|
module Rules where
import Data.Char
{-# RULES "map-loop" [ ~ ] forall f . map' f = map' (id . f) #-}
{-# NOINLINE map' #-}
map' f [] = []
map' f (x:xs) = f x : map' f xs
main = print (map' toUpper "Hello, World")
-- Should warn
foo1 x = x
{-# RULES "foo1" [ 1] forall x. foo1 x = x #-}
-- Should warn
foo2 x = x
{-# INLINE foo2 #-}
{-# RULES "foo2" [~ 1 ] forall x. foo2 x = x #-}
-- Should not warn
foo3 x = x
{-# NOINLINE foo3 #-}
{-# RULES "foo3" forall x. foo3 x = x #-}
{-# NOINLINE f #-}
f :: Int -> String
f x = "NOT FIRED"
{-# NOINLINE neg #-}
neg :: Int -> Int
neg = negate
{-# RULES
"f" forall (c::Char->Int) (x::Char). f (c x) = "RULE FIRED"
#-}
|
mpickering/ghc-exactprint
|
tests/examples/ghc710/Rules.hs
|
bsd-3-clause
| 677
| 0
| 7
| 172
| 136
| 77
| 59
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
-- | Remove unused heap objects.
module Stg.Machine.GarbageCollection (
garbageCollect,
-- * Algorithms
GarbageCollectionAlgorithm,
triStateTracing,
twoSpaceCopying,
) where
import qualified Data.Set as S
import Stg.Machine.GarbageCollection.Common
import Stg.Machine.GarbageCollection.TriStateTracing
import Stg.Machine.GarbageCollection.TwoSpaceCopying
import Stg.Machine.Types
-- | Apply a garbage collection algorithm to the heap of the current machine
-- state, and return the resulting cleaned state.
garbageCollect :: GarbageCollectionAlgorithm -> StgState -> StgState
garbageCollect algorithm@(GarbageCollectionAlgorithm name _) state
= let (deadAddrs, forwards, state') = splitHeapWith algorithm state
in if S.size deadAddrs > 0
then state' { stgSteps = stgSteps state + 1
, stgInfo = Info GarbageCollection
[Detail_GarbageCollected name deadAddrs forwards] }
else state
|
quchen/stg
|
src/Stg/Machine/GarbageCollection.hs
|
bsd-3-clause
| 1,025
| 0
| 12
| 215
| 175
| 104
| 71
| 19
| 2
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
-- | Provide ability to upload tarballs to Hackage.
module Stack.Upload
( -- * Upload
upload
, uploadBytes
, uploadRevision
-- * Credentials
, HackageCreds
, loadCreds
, writeFilePrivate
) where
import Stack.Prelude
import Data.Aeson (FromJSON (..),
ToJSON (..),
decode', toEncoding, fromEncoding,
object, withObject,
(.:), (.=))
import Data.ByteString.Builder (lazyByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import qualified Data.Conduit.Binary as CB
import qualified Data.Text as T
import Network.HTTP.StackClient (Request, RequestBody(RequestBodyLBS), Response, withResponse, httpNoBody, getGlobalManager, getResponseStatusCode,
getResponseBody,
setRequestHeader,
parseRequest,
formDataBody, partFileRequestBody,
partBS, partLBS,
applyDigestAuth,
displayDigestAuthException)
import Stack.Types.Config
import System.Directory (createDirectoryIfMissing,
removeFile, renameFile)
import System.Environment (lookupEnv)
import System.FilePath ((</>), takeFileName, takeDirectory)
import System.IO (putStrLn, putStr, print) -- TODO remove putStrLn, use logInfo
import System.PosixCompat.Files (setFileMode)
-- | Username and password to log into Hackage.
--
-- Since 0.1.0.0
data HackageCreds = HackageCreds
{ hcUsername :: !Text
, hcPassword :: !Text
, hcCredsFile :: !FilePath
}
deriving Show
instance ToJSON HackageCreds where
toJSON (HackageCreds u p _) = object
[ "username" .= u
, "password" .= p
]
instance FromJSON (FilePath -> HackageCreds) where
parseJSON = withObject "HackageCreds" $ \o -> HackageCreds
<$> o .: "username"
<*> o .: "password"
withEnvVariable :: Text -> IO Text -> IO Text
withEnvVariable varName fromPrompt = lookupEnv (T.unpack varName) >>= maybe fromPrompt (pure . T.pack)
-- | Load Hackage credentials, either from a save file or the command
-- line.
--
-- Since 0.1.0.0
loadCreds :: Config -> IO HackageCreds
loadCreds config = do
fp <- credsFile config
elbs <- tryIO $ L.readFile fp
case either (const Nothing) Just elbs >>= \lbs -> (lbs, ) <$> decode' lbs of
Nothing -> fromPrompt fp
Just (lbs, mkCreds) -> do
-- Ensure privacy, for cleaning up old versions of Stack that
-- didn't do this
writeFilePrivate fp $ lazyByteString lbs
unless (configSaveHackageCreds config) $ do
putStrLn "WARNING: You've set save-hackage-creds to false"
putStrLn "However, credentials were found at:"
putStrLn $ " " ++ fp
return $ mkCreds fp
where
fromPrompt fp = do
username <- withEnvVariable "HACKAGE_USERNAME" (prompt "Hackage username: ")
password <- withEnvVariable "HACKAGE_PASSWORD" (promptPassword "Hackage password: ")
let hc = HackageCreds
{ hcUsername = username
, hcPassword = password
, hcCredsFile = fp
}
when (configSaveHackageCreds config) $ do
shouldSave <- promptBool $ T.pack $
"Save hackage credentials to file at " ++ fp ++ " [y/n]? "
putStrLn "NOTE: Avoid this prompt in the future by using: save-hackage-creds: false"
when shouldSave $ do
writeFilePrivate fp $ fromEncoding $ toEncoding hc
putStrLn "Saved!"
hFlush stdout
return hc
-- | Write contents to a file which is always private.
--
-- For history of this function, see:
--
-- * https://github.com/commercialhaskell/stack/issues/2159#issuecomment-477948928
--
-- * https://github.com/commercialhaskell/stack/pull/4665
writeFilePrivate :: MonadIO m => FilePath -> Builder -> m ()
writeFilePrivate fp builder = liftIO $ withTempFile (takeDirectory fp) (takeFileName fp) $ \fpTmp h -> do
-- Temp file is created such that only current user can read and write it.
-- See docs for openTempFile: https://www.stackage.org/haddock/lts-13.14/base-4.12.0.0/System-IO.html#v:openTempFile
-- Write to the file and close the handle.
hPutBuilder h builder
hClose h
-- Make sure the destination file, if present, is writeable
void $ tryIO $ setFileMode fp 0o600
-- And atomically move
renameFile fpTmp fp
credsFile :: Config -> IO FilePath
credsFile config = do
let dir = toFilePath (view stackRootL config) </> "upload"
createDirectoryIfMissing True dir
return $ dir </> "credentials.json"
applyCreds :: HackageCreds -> Request -> IO Request
applyCreds creds req0 = do
manager <- getGlobalManager
ereq <- applyDigestAuth
(encodeUtf8 $ hcUsername creds)
(encodeUtf8 $ hcPassword creds)
req0
manager
case ereq of
Left e -> do
putStrLn "WARNING: No HTTP digest prompt found, this will probably fail"
case fromException e of
Just e' -> putStrLn $ displayDigestAuthException e'
Nothing -> print e
return req0
Right req -> return req
-- | Upload a single tarball with the given @Uploader@. Instead of
-- sending a file like 'upload', this sends a lazy bytestring.
--
-- Since 0.1.2.1
uploadBytes :: String -- ^ Hackage base URL
-> HackageCreds
-> String -- ^ tar file name
-> L.ByteString -- ^ tar file contents
-> IO ()
uploadBytes baseUrl creds tarName bytes = do
let req1 = setRequestHeader "Accept" ["text/plain"]
(fromString $ baseUrl <> "packages/")
formData = [partFileRequestBody "package" tarName (RequestBodyLBS bytes)]
req2 <- formDataBody formData req1
req3 <- applyCreds creds req2
putStr $ "Uploading " ++ tarName ++ "... "
hFlush stdout
withResponse req3 $ \res ->
case getResponseStatusCode res of
200 -> putStrLn "done!"
401 -> do
putStrLn "authentication failure"
handleIO (const $ return ()) (removeFile (hcCredsFile creds))
throwString "Authentication failure uploading to server"
403 -> do
putStrLn "forbidden upload"
putStrLn "Usually means: you've already uploaded this package/version combination"
putStrLn "Ignoring error and continuing, full message from Hackage below:\n"
printBody res
503 -> do
putStrLn "service unavailable"
putStrLn "This error some times gets sent even though the upload succeeded"
putStrLn "Check on Hackage to see if your pacakge is present"
printBody res
code -> do
putStrLn $ "unhandled status code: " ++ show code
printBody res
throwString $ "Upload failed on " ++ tarName
printBody :: Response (ConduitM () S.ByteString IO ()) -> IO ()
printBody res = runConduit $ getResponseBody res .| CB.sinkHandle stdout
-- | Upload a single tarball with the given @Uploader@.
--
-- Since 0.1.0.0
upload :: String -- ^ Hackage base URL
-> HackageCreds
-> FilePath
-> IO ()
upload baseUrl creds fp = uploadBytes baseUrl creds (takeFileName fp) =<< L.readFile fp
uploadRevision :: String -- ^ Hackage base URL
-> HackageCreds
-> PackageIdentifier
-> L.ByteString
-> IO ()
uploadRevision baseUrl creds ident@(PackageIdentifier name _) cabalFile = do
req0 <- parseRequest $ concat
[ baseUrl
, "package/"
, packageIdentifierString ident
, "/"
, packageNameString name
, ".cabal/edit"
]
req1 <- formDataBody
[ partLBS "cabalfile" cabalFile
, partBS "publish" "on"
]
req0
req2 <- applyCreds creds req1
void $ httpNoBody req2
|
juhp/stack
|
src/Stack/Upload.hs
|
bsd-3-clause
| 8,896
| 0
| 18
| 2,992
| 1,708
| 871
| 837
| 177
| 5
|
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE QuasiQuotes #-}
module System.Command.QQ.EvalSpec (spec) where
import Data.Text.Lazy (Text, pack)
import System.Command.QQ
import System.Exit (ExitCode(..))
import Test.Hspec
spec :: Spec
spec = do
describe "quasiquotation" $ do
context "unicode" $ do
it "works with unicode symbols in the String output" $
[sh|echo -n "ДМИТРИЙ МАЛИКОВ"|] `shouldReturn` "ДМИТРИЙ МАЛИКОВ"
it "works with unicode symbols in the Text output" $
[sh|echo -n "ЕГОР ЛЕТОВ"|] `shouldReturn` text "ЕГОР ЛЕТОВ"
context "exit code" $ do
it "is possible to get successful exit code" $
[sh|exit 0|] `shouldReturn` ExitSuccess
it "is possible to get unsuccessful exit code" $
[sh|exit 4|] `shouldReturn` ExitFailure 4
it "is possible to get all of exitcode, stdout, and stderr" $
[sh|echo -n hello; echo -n world >&2; exit 4|] `shouldReturn`
(ExitFailure 4, text "hello", text "world")
describe "embedding" $ do
describe "variable embeddings" $ do
it "can embed integers" $
let foo = 7 in [sh|echo #{foo}|] `shouldReturn` "7\n"
it "can embed doubles" $
let foo = 7.0 in [sh|echo #{foo}|] `shouldReturn` "7.0\n"
it "can embed characters" $
let foo = 'z' in [sh|echo #{foo}|] `shouldReturn` "z\n"
it "can embed strings" $
let foo = "hello" in [sh|echo #{foo}|] `shouldReturn` "hello\n"
describe "multi-line embeddings" $ do
it "supports multiline commands" $
[sh|
echo hello
echo world
echo !!!
|] `shouldReturn` "hello\nworld\n!!!\n"
it "supports embeddings in multiline commands" $
let foo = 4
bar = 7
in [sh|
echo #{foo}
echo #{bar}
|] `shouldReturn` "4\n7\n"
describe "escapings" $ do
it "is possible to write #{} literally as a comment" $ do
[sh|echo #\{foo}|] `shouldReturn` "\n"
[sh|echo #\\{foo}|] `shouldReturn` "\n"
it "is possible to write #{} literally as a string" $ do
[sh|echo "#\{foo}"|] `shouldReturn` "#{foo}\n"
[sh|echo "#\\{foo}"|] `shouldReturn` "#\\{foo}\n"
describe "literals" $ do
it "is possible to embed integer literals" $
[sh|echo -n #{4}|] `shouldReturn` "4"
it "is possible to embed rational literals" $
[sh|echo -n #{4.0}|] `shouldReturn` "4.0"
it "is possible to embed char literals" $
[sh|echo -n #{'q'}|] `shouldReturn` "q"
it "is possible to embed string literals" $
[sh|echo -n #{"hello"}|] `shouldReturn` "hello"
describe "custom data types" $
it "is possible to embed custom data types" $
[sh|echo -n hello#{Bang}|] `shouldReturn` "hello!"
data Bang = Bang
instance Embed Bang where embed Bang = "!"
text :: String -> Text
text = pack
|
beni55/command-qq
|
test/System/Command/QQ/EvalSpec.hs
|
bsd-3-clause
| 2,952
| 0
| 17
| 793
| 664
| 372
| 292
| 65
| 1
|
-- Tests for the identity rhino jsSrc = rhino.pretty.parse jsSrc, where
-- parse and pretty are our parser and pretty-printer and rhino is Mozilla
-- Rhino's JavaScript parser and pretty-printer.
module Rhino where
import Control.Monad
import qualified Data.ByteString.Char8 as B
import BrownPLT.Test
rhino :: FilePath -- ^Path to the file
-> B.ByteString -- ^JavaScript source
-> IO B.ByteString -- ^JavaScript source, parsed and printed by Rhino
rhino path {- not used -} src = do
result <- commandIO "/usr/bin/env"
["java", "-classpath", ".:rhino.jar", "RhinoTest"]
src
case result of
Right stdout -> return stdout
Left stderr -> fail $ "RhinoTest signalled an error:\n" ++ (B.unpack stderr)
testRhino:: FilePath -> String -> Test
testRhino src str = TestCase $ do
let src' = src ++ " (pretty-printed)"
lhs <- ((rhino src') . B.pack . pretty . (parse src)) str
rhs <- rhino src (B.pack str)
if lhs == rhs
then return ()
else assertFailure ("testRhino failed on " ++ src)
main = do
testPaths <- liftM concat $ mapM getJsPaths ["parse-pretty", "libs"]
testData <- mapM readFile testPaths
let tests = zipWith testRhino testPaths testData
return (TestList tests)
|
brownplt/webbits
|
tests/Rhino.hs
|
bsd-3-clause
| 1,249
| 0
| 15
| 271
| 329
| 167
| 162
| 27
| 2
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Trifecta.Combinators
-- Copyright : (c) Edward Kmett 2011-2014
-- License : BSD3
--
-- Maintainer : ekmett@gmail.com
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Text.Trifecta.Combinators
( DeltaParsing(..)
, sliced
, careting, careted
, spanning, spanned
, fixiting
, MarkParsing(..)
) where
import Control.Applicative
import Control.Monad (MonadPlus)
import Control.Monad.Trans.Class
import Control.Monad.Trans.Identity
import Control.Monad.Trans.RWS.Lazy as Lazy
import Control.Monad.Trans.RWS.Strict as Strict
import Control.Monad.Trans.Reader
import Control.Monad.Trans.State.Lazy as Lazy
import Control.Monad.Trans.State.Strict as Strict
import Control.Monad.Trans.Writer.Lazy as Lazy
import Control.Monad.Trans.Writer.Strict as Strict
import Data.ByteString as Strict hiding (span)
import Data.Semigroup
import Text.Parser.Token
import Text.Trifecta.Delta
import Text.Trifecta.Rendering
import Prelude hiding (span)
-- | This class provides parsers with easy access to:
--
-- 1) the current line contents.
-- 2) the current position as a 'Delta'.
-- 3) the ability to use 'sliced' on any parser.
class (MonadPlus m, TokenParsing m) => DeltaParsing m where
-- | Retrieve the contents of the current line (from the beginning of the line)
line :: m ByteString
-- | Retrieve the current position as a 'Delta'.
position :: m Delta
-- | Run a parser, grabbing all of the text between its start and end points
slicedWith :: (a -> Strict.ByteString -> r) -> m a -> m r
-- | Retrieve a 'Rendering' of the current linem noting this position, but not
-- placing a 'Caret' there.
rend :: DeltaParsing m => m Rendering
rend = rendered <$> position <*> line
{-# INLINE rend #-}
-- | Grab the remainder of the current line
restOfLine :: DeltaParsing m => m ByteString
restOfLine = Strict.drop . fromIntegral . columnByte <$> position <*> line
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m) => DeltaParsing (Lazy.StateT s m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Lazy.StateT m) = Lazy.StateT $ \s -> slicedWith (\(a,s') b -> (f a b, s')) $ m s
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m) => DeltaParsing (Strict.StateT s m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Strict.StateT m) = Strict.StateT $ \s -> slicedWith (\(a,s') b -> (f a b, s')) $ m s
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m) => DeltaParsing (ReaderT e m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (ReaderT m) = ReaderT $ slicedWith f . m
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Strict.WriterT w m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Strict.WriterT m) = Strict.WriterT $ slicedWith (\(a,s') b -> (f a b, s')) m
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Lazy.WriterT w m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Lazy.WriterT m) = Lazy.WriterT $ slicedWith (\(a,s') b -> (f a b, s')) m
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Lazy.RWST r w s m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Lazy.RWST m) = Lazy.RWST $ \r s -> slicedWith (\(a,s',w) b -> (f a b, s',w)) $ m r s
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m, Monoid w) => DeltaParsing (Strict.RWST r w s m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (Strict.RWST m) = Strict.RWST $ \r s -> slicedWith (\(a,s',w) b -> (f a b, s',w)) $ m r s
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
instance (MonadPlus m, DeltaParsing m) => DeltaParsing (IdentityT m) where
line = lift line
{-# INLINE line #-}
position = lift position
{-# INLINE position #-}
slicedWith f (IdentityT m) = IdentityT $ slicedWith f m
{-# INLINE slicedWith #-}
rend = lift rend
{-# INLINE rend #-}
restOfLine = lift restOfLine
{-# INLINE restOfLine #-}
-- | Run a parser, grabbing all of the text between its start and end points and discarding the original result
sliced :: DeltaParsing m => m a -> m ByteString
sliced = slicedWith (\_ bs -> bs)
{-# INLINE sliced #-}
-- | Grab a 'Caret' pointing to the current location.
careting :: DeltaParsing m => m Caret
careting = Caret <$> position <*> line
{-# INLINE careting #-}
-- | Parse a 'Careted' result. Pointing the 'Caret' to where you start.
careted :: DeltaParsing m => m a -> m (Careted a)
careted p = (\m l a -> a :^ Caret m l) <$> position <*> line <*> p
{-# INLINE careted #-}
-- | Discard the result of a parse, returning a 'Span' from where we start to where it ended parsing.
spanning :: DeltaParsing m => m a -> m Span
spanning p = (\s l e -> Span s e l) <$> position <*> line <*> (p *> position)
{-# INLINE spanning #-}
-- | Parse a 'Spanned' result. The 'Span' starts here and runs to the last position parsed.
spanned :: DeltaParsing m => m a -> m (Spanned a)
spanned p = (\s l a e -> a :~ Span s e l) <$> position <*> line <*> p <*> position
{-# INLINE spanned #-}
-- | Grab a fixit.
fixiting :: DeltaParsing m => m Strict.ByteString -> m Fixit
fixiting p = (\(r :~ s) -> Fixit s r) <$> spanned p
{-# INLINE fixiting #-}
-- | This class is a refinement of 'DeltaParsing' that adds the ability to mark your position in the input
-- and return there for further parsing later.
class (DeltaParsing m, HasDelta d) => MarkParsing d m | m -> d where
-- | mark the current location so it can be used in constructing a span, or for later seeking
mark :: m d
-- | Seek a previously marked location
release :: d -> m ()
instance (MonadPlus m, MarkParsing d m) => MarkParsing d (Lazy.StateT s m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m) => MarkParsing d (Strict.StateT s m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m) => MarkParsing d (ReaderT e m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Strict.WriterT w m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Lazy.WriterT w m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Lazy.RWST r w s m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m, Monoid w) => MarkParsing d (Strict.RWST r w s m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
instance (MonadPlus m, MarkParsing d m) => MarkParsing d (IdentityT m) where
mark = lift mark
{-# INLINE mark #-}
release = lift . release
{-# INLINE release #-}
|
treeowl/trifecta
|
src/Text/Trifecta/Combinators.hs
|
bsd-3-clause
| 8,565
| 0
| 13
| 1,734
| 2,268
| 1,248
| 1,020
| 188
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Aws.ElasticTranscoder.Commands.ListPipelines
( ListPipelines(..)
, ListPipelinesResponse(..)
) where
import Aws.Core
import Aws.ElasticTranscoder.Core
import Control.Applicative
data ListPipelines
= ListPipelines
deriving (Show,Eq)
newtype ListPipelinesResponse
= ListPipelinesResponse
{ lprPipelines :: [PipelineIdStatus]
}
deriving (Show,Eq)
instance SignQuery ListPipelines where
type ServiceConfiguration ListPipelines = EtsConfiguration
signQuery ListPipelines{..} = etsSignQuery
EtsQuery
{ etsqMethod = Get
, etsqRequest = "pipelines"
, etsqQuery = []
, etsqBody = Nothing
}
instance ResponseConsumer ListPipelines ListPipelinesResponse where
type ResponseMetadata ListPipelinesResponse = EtsMetadata
responseConsumer _ mref = etsResponseConsumer mref $ \rsp ->
cnv <$> jsonConsumer rsp
where
cnv (PipelineList pls) = ListPipelinesResponse pls
instance Transaction ListPipelines ListPipelinesResponse
instance AsMemoryResponse ListPipelinesResponse where
type MemoryResponse ListPipelinesResponse = ListPipelinesResponse
loadToMemory = return
|
cdornan/aws-elastic-transcoder
|
Aws/ElasticTranscoder/Commands/ListPipelines.hs
|
bsd-3-clause
| 1,503
| 0
| 10
| 434
| 249
| 142
| 107
| 34
| 0
|
module Main where
import Control.Concurrent.MVar
import HFuse
type FileName = String
type Permissions = (FileMode, UserID, GroupID)
type FSMap = FiniteMap FilePath FSObject
data FSObject
= FSRegularFile Permissions String
| FSDirectory Permissions [(FileName, FSObject)]
fsObjectPersmissions :: FSObject -> Persmissions
fsObjectPersmissions (FSRegularFile perms _) = perms
fsObjectPersmissions (FSDirectory perms _) = perms
main :: IO ()
main =
do mvRoot <- newMVar $ Directory []
let liveFSOps :: FuseOperations
liveFSOps =
FuseOperations { fuseGetFileStat = liveGetFileStat
, fuseReadSymbolicLink = liveReadSymbolicLink
, fuseGetDirectoryContents = liveGetDirectoryContents
, fuseCreateDevice = liveCreateDevice
, fuseCreateDirectory = liveCreateDirectory
, fuseRemoveLink = liveRemoveLink
, fuseRemoveDirectory = liveRemoveDirectory
, fuseCreateSymbolicLink = liveCreateSymbolicLink
, fuseRename = liveRename
, fuseCreateLink = liveCreateLink
, fuseSetFileMode = liveSetFileMode
, fuseSetOwnerAndGroup = liveSetOwnerAndGroup
, fuseSetFileSize = liveSetFileSize
, fuseSetFileTimes = liveSetFileTimes
, fuseOpen = liveOpen
, fuseRead = liveRead
, fuseWrite = liveWrite
, fuseGetFileSystemStats = liveGetFileSystemStats
, fuseFlush = liveFlush
, fuseRelease = liveRelease
, fuseSynchronizeFile = liveSynchronizeFile
}
withPath :: FilePath -> (Maybe FSObject -> a) -> IO a
withPath path f =
withMVar mvRoot $ \ root ->
f (foldl digPath root (paths path))
digPath :: Maybe FSObject -> FileName -> Maybe FSObject
digPath (Just (Directory entries)) name = lookup name entries
digPath _ _ = Nothing
liveGetFileStat :: FilePath -> IO (Either Errno FileStat)
liveGetFileStat path =
withPath path $ \ mbObj -> case mbObj of
Nothing -> return (Left eNOENT)
Just obj -> let entryType = case obj of FSRegularFile _ -> RegularFile
Directory _ -> Directory
(mode, owner, group) = fsObjectPermissions obj
size =
in return $ Right $ FileStat
{ statEntryType = entryType
, statFileMode = mode
, statLinkCount = 1
, statFileOwner = owner
, statFileGroup = group
, statSpecialDeviceID = 0
, statFileSize =
fuseMain liveFSOps (\e -> print e >> defaultExceptionHandler e)
paths :: FilePath -> [FileName]
paths s = case dropWhile (== '/') s of
"" -> []
s' -> w : words s''
where (w, s'') = break (== '/') s'
|
hackern/halfs
|
test/deps/hfuse/examples/LiveFS.hs
|
bsd-3-clause
| 3,606
| 5
| 11
| 1,648
| 672
| 377
| 295
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>Highlighter</title>
<maps>
<homeID>highlighter</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/highlighter/src/main/javahelp/help_sl_SI/helpset_sl_SI.hs
|
apache-2.0
| 964
| 105
| 29
| 155
| 391
| 208
| 183
| -1
| -1
|
-- (c) Simon Marlow 2011, see the file LICENSE for copying terms.
--
-- Sample geturls.hs (CEFP summer school notes, 2011)
--
-- Downloading multiple URLs concurrently, timing the downloads
--
-- Compile with:
-- ghc -threaded --make geturls.hs
import GetURL
import TimeIt
import Control.Monad
import Control.Concurrent
import Control.Exception
import Text.Printf
import qualified Data.ByteString as B
-----------------------------------------------------------------------------
-- Our Async API:
data Async a = Async (MVar a)
async :: IO a -> IO (Async a)
async action = do
var <- newEmptyMVar
forkIO (action >>= putMVar var)
return (Async var)
wait :: Async a -> IO a
wait (Async var) = readMVar var
-----------------------------------------------------------------------------
sites = ["http://www.google.com",
"http://www.bing.com",
"http://www.yahoo.com",
"http://www.wikipedia.com/wiki/Spade",
"http://www.wikipedia.com/wiki/Shovel"]
main = mapM (async.http) sites >>= mapM wait
where
http url = do
(page, time) <- timeit $ getURL url
printf "downloaded: %s (%d bytes, %.2fs)\n" url (B.length page) time
|
lywaterman/parconc-examples
|
geturls.hs
|
bsd-3-clause
| 1,182
| 4
| 10
| 213
| 270
| 137
| 133
| 24
| 1
|
module Vectorise.Convert
( fromVect
)
where
import Vectorise.Monad
import Vectorise.Builtins
import Vectorise.Type.Type
import CoreSyn
import TyCon
import Type
import TyCoRep
import NameSet
import FastString
import Outputable
import Control.Applicative
import Prelude -- avoid redundant import warning due to AMP
-- |Convert a vectorised expression such that it computes the non-vectorised equivalent of its
-- value.
--
-- For functions, we eta expand the function and convert the arguments and result:
-- For example
-- @
-- \(x :: Double) ->
-- \(y :: Double) ->
-- ($v_foo $: x) $: y
-- @
--
-- We use the type of the original binding to work out how many outer lambdas to add.
--
fromVect :: Type -- ^ The type of the original binding.
-> CoreExpr -- ^ Expression giving the closure to use, eg @$v_foo@.
-> VM CoreExpr
-- Convert the type to the core view if it isn't already.
--
fromVect ty expr
| Just ty' <- coreView ty
= fromVect ty' expr
-- For each function constructor in the original type we add an outer
-- lambda to bind the parameter variable, and an inner application of it.
fromVect (ForAllTy (Anon arg_ty) res_ty) expr
= do
arg <- newLocalVar (fsLit "x") arg_ty
varg <- toVect arg_ty (Var arg)
varg_ty <- vectType arg_ty
vres_ty <- vectType res_ty
apply <- builtin applyVar
body <- fromVect res_ty
$ Var apply `mkTyApps` [varg_ty, vres_ty] `mkApps` [expr, varg]
return $ Lam arg body
-- If the type isn't a function, then we can't current convert it unless the type is scalar (i.e.,
-- is identical to the non-vectorised version).
--
fromVect ty expr
= identityConv ty >> return expr
-- Convert an expression such that it evaluates to the vectorised equivalent of the value of the
-- original expression.
--
-- WARNING: Currently only works for the scalar types, where the vectorised value coincides with the
-- original one.
--
toVect :: Type -> CoreExpr -> VM CoreExpr
toVect ty expr = identityConv ty >> return expr
-- |Check that the type is neutral under type vectorisation — i.e., all involved type constructor
-- are not altered by vectorisation as they contain no parallel arrays.
--
identityConv :: Type -> VM ()
identityConv ty
| Just ty' <- coreView ty
= identityConv ty'
identityConv (TyConApp tycon tys)
= do { mapM_ identityConv tys
; identityConvTyCon tycon
}
identityConv (LitTy {}) = noV $ text "identityConv: not sure about literal types under vectorisation"
identityConv (TyVarTy {}) = noV $ text "identityConv: type variable changes under vectorisation"
identityConv (AppTy {}) = noV $ text "identityConv: type appl. changes under vectorisation"
identityConv (ForAllTy {}) = noV $ text "identityConv: quantified type changes under vectorisation"
identityConv (CastTy {}) = noV $ text "identityConv: not sure about casted types under vectorisation"
identityConv (CoercionTy {}) = noV $ text "identityConv: not sure about coercions under vectorisation"
-- |Check that this type constructor is not changed by vectorisation — i.e., it does not embed any
-- parallel arrays.
--
identityConvTyCon :: TyCon -> VM ()
identityConvTyCon tc
= do
{ isParallel <- (tyConName tc `elemNameSet`) <$> globalParallelTyCons
; parray <- builtin parrayTyCon
; if isParallel && not (tc == parray)
then noV idErr
else return ()
}
where
idErr = text "identityConvTyCon: type constructor contains parallel arrays" <+> ppr tc
|
tjakway/ghcjvm
|
compiler/vectorise/Vectorise/Convert.hs
|
bsd-3-clause
| 3,560
| 0
| 11
| 767
| 649
| 340
| 309
| 56
| 2
|
-----------------------------------------------------------------------------
--
-- Module : IDE.Command.VCS.Common.Helper
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL Nothing
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module IDE.Command.VCS.Common.Helper (
--helper for vcs actions
eMergeToolSetter
,createActionFromContext
) where
import IDE.Core.Types
import IDE.Core.State
import qualified IDE.Workspaces.Writer as Writer
import qualified IDE.Command.VCS.Types as Types
import qualified VCSWrapper.Common as VCS
import qualified VCSGui.Common as VCSGUI
import Control.Monad.Reader
import Control.Monad.Trans(liftIO)
import Data.Maybe
import qualified Data.Map as Map
{- |
Runs given vcs-action using the vcs-conf set in the ReaderT.
Provides a basic exception handler for any errors occuring.
-}
createActionFromContext :: VCS.Ctx() -- ^ computation to execute, i.e. showCommit
-> Types.VCSAction ()
createActionFromContext vcsAction = do
((_,conf,_),_) <- ask
liftIO $ VCSGUI.defaultVCSExceptionHandler $ VCS.runVcs conf vcsAction
{- |
Creates an 'eMergeToolSetter' (Either MergeTool or MT-Setter) from given parameters.
-}
eMergeToolSetter :: IDERef
-> FilePath
-> Maybe VCSGUI.MergeTool
-> Either VCSGUI.MergeTool (VCSGUI.MergeTool -> IO())
eMergeToolSetter ideRef cabalFp mbMergeTool =
case mbMergeTool of
Nothing -> Right $ mergeToolSetter ideRef cabalFp
Just mergeTool -> Left mergeTool
{- |
Facility to set a mergetool for a given package.
-}
mergeToolSetter :: IDERef -> FilePath -> VCSGUI.MergeTool -> IO()
mergeToolSetter ideRef cabalFp mergeTool =
runReaderT (workspaceSetMergeTool cabalFp mergeTool) ideRef
{- |
Sets the given mergetool for given package in current workspace. Workspace must be set.
-}
workspaceSetMergeTool :: FilePath -> VCSGUI.MergeTool -> IDEAction
workspaceSetMergeTool pathToPackage mergeTool = do
modifyIDE_ (\ide -> do
let oldWs = fromJust (workspace ide)
let oldMap = packageVcsConf oldWs
case Map.lookup pathToPackage oldMap of
Nothing -> ide --TODO error
Just (vcsType,config,_) -> do
let vcsConf = (vcsType,config,Just mergeTool)
let newMap = Map.insert pathToPackage vcsConf oldMap
let newWs = oldWs { packageVcsConf = newMap }
ide {workspace = Just newWs })
newWs <- readIDE workspace
Writer.writeWorkspace $ fromJust newWs
|
573/leksah
|
src/IDE/Command/VCS/Common/Helper.hs
|
gpl-2.0
| 2,734
| 7
| 12
| 593
| 493
| 283
| 210
| 43
| 2
|
{-# LANGUAGE TypeFamilies, ConstraintKinds #-}
module Foo( shared, foo, bar) where
-- module Foo where
import GHC.Exts
{-
foo :: [Int] -> [Int]
foo = let f = map negate
in f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f . f . f . f . f . f . f . f . f . f . f . f . f
-}
type family Domain (f :: * -> *) a :: Constraint
type instance Domain [] a = ()
instance MyFunctor [] where
myfmap = map
class MyFunctor f where
myfmap :: (Domain f a, Domain f b) => (a -> b) -> f a -> f b
shared :: (MyFunctor f, Domain f Int) => f Int -> f Int
shared = let
f = myfmap negate
in
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f. f.
f . f . f . f . f . f . f . f . f . f . f . f . f
foo xs = shared $ 0:xs
bar xs = 0:shared xs
|
shlevy/ghc
|
testsuite/tests/simplCore/should_compile/T7785.hs
|
bsd-3-clause
| 1,423
| 2
| 84
| 601
| 628
| 321
| 307
| -1
| -1
|
{-# LANGUAGE CPP #-}
-- | This module provides an interface for typechecker plugins to
-- access select functions of the 'TcM', principally those to do with
-- reading parts of the state.
module TcPluginM (
#ifdef GHCI
-- * Basic TcPluginM functionality
TcPluginM,
tcPluginIO,
tcPluginTrace,
unsafeTcPluginTcM,
-- * Finding Modules and Names
FindResult(..),
findImportedModule,
lookupOrig,
-- * Looking up Names in the typechecking environment
tcLookupGlobal,
tcLookupTyCon,
tcLookupDataCon,
tcLookupClass,
tcLookup,
tcLookupId,
-- * Getting the TcM state
getTopEnv,
getEnvs,
getInstEnvs,
getFamInstEnvs,
matchFam,
-- * Type variables
newUnique,
newFlexiTyVar,
isTouchableTcPluginM,
-- * Zonking
zonkTcType,
zonkCt,
-- * Creating constraints
newWanted,
newDerived,
newGiven,
-- * Manipulating evidence bindings
newEvVar,
setEvBind,
getEvBindsTcPluginM,
getEvBindsTcPluginM_maybe
#endif
) where
#ifdef GHCI
import qualified TcRnMonad
import qualified TcSMonad
import qualified TcEnv
import qualified TcMType
import qualified Inst
import qualified FamInst
import qualified IfaceEnv
import qualified Finder
import FamInstEnv ( FamInstEnv )
import TcRnMonad ( TcGblEnv, TcLclEnv, Ct, CtLoc, TcPluginM
, unsafeTcPluginTcM, getEvBindsTcPluginM_maybe
, liftIO, traceTc )
import TcMType ( TcTyVar, TcType )
import TcEnv ( TcTyThing )
import TcEvidence ( TcCoercion, EvTerm, EvBind, EvBindsVar, mkGivenEvBind )
import TcRnTypes ( CtEvidence(..) )
import Var ( EvVar )
import Module
import Name
import TyCon
import DataCon
import Class
import HscTypes
import Outputable
import Type
import Id
import InstEnv
import FastString
import Maybes
import Unique
-- | Perform some IO, typically to interact with an external tool.
tcPluginIO :: IO a -> TcPluginM a
tcPluginIO a = unsafeTcPluginTcM (liftIO a)
-- | Output useful for debugging the compiler.
tcPluginTrace :: String -> SDoc -> TcPluginM ()
tcPluginTrace a b = unsafeTcPluginTcM (traceTc a b)
findImportedModule :: ModuleName -> Maybe FastString -> TcPluginM FindResult
findImportedModule mod_name mb_pkg = do
hsc_env <- getTopEnv
tcPluginIO $ Finder.findImportedModule hsc_env mod_name mb_pkg
lookupOrig :: Module -> OccName -> TcPluginM Name
lookupOrig mod = unsafeTcPluginTcM . IfaceEnv.lookupOrig mod
tcLookupGlobal :: Name -> TcPluginM TyThing
tcLookupGlobal = unsafeTcPluginTcM . TcEnv.tcLookupGlobal
tcLookupTyCon :: Name -> TcPluginM TyCon
tcLookupTyCon = unsafeTcPluginTcM . TcEnv.tcLookupTyCon
tcLookupDataCon :: Name -> TcPluginM DataCon
tcLookupDataCon = unsafeTcPluginTcM . TcEnv.tcLookupDataCon
tcLookupClass :: Name -> TcPluginM Class
tcLookupClass = unsafeTcPluginTcM . TcEnv.tcLookupClass
tcLookup :: Name -> TcPluginM TcTyThing
tcLookup = unsafeTcPluginTcM . TcEnv.tcLookup
tcLookupId :: Name -> TcPluginM Id
tcLookupId = unsafeTcPluginTcM . TcEnv.tcLookupId
getTopEnv :: TcPluginM HscEnv
getTopEnv = unsafeTcPluginTcM TcRnMonad.getTopEnv
getEnvs :: TcPluginM (TcGblEnv, TcLclEnv)
getEnvs = unsafeTcPluginTcM TcRnMonad.getEnvs
getInstEnvs :: TcPluginM InstEnvs
getInstEnvs = unsafeTcPluginTcM Inst.tcGetInstEnvs
getFamInstEnvs :: TcPluginM (FamInstEnv, FamInstEnv)
getFamInstEnvs = unsafeTcPluginTcM FamInst.tcGetFamInstEnvs
matchFam :: TyCon -> [Type] -> TcPluginM (Maybe (TcCoercion, TcType))
matchFam tycon args = unsafeTcPluginTcM $ TcSMonad.matchFamTcM tycon args
newUnique :: TcPluginM Unique
newUnique = unsafeTcPluginTcM TcRnMonad.newUnique
newFlexiTyVar :: Kind -> TcPluginM TcTyVar
newFlexiTyVar = unsafeTcPluginTcM . TcMType.newFlexiTyVar
isTouchableTcPluginM :: TcTyVar -> TcPluginM Bool
isTouchableTcPluginM = unsafeTcPluginTcM . TcRnMonad.isTouchableTcM
zonkTcType :: TcType -> TcPluginM TcType
zonkTcType = unsafeTcPluginTcM . TcMType.zonkTcType
zonkCt :: Ct -> TcPluginM Ct
zonkCt = unsafeTcPluginTcM . TcMType.zonkCt
-- | Create a new wanted constraint.
newWanted :: CtLoc -> PredType -> TcPluginM CtEvidence
newWanted loc pty = do
new_ev <- newEvVar pty
return CtWanted { ctev_pred = pty, ctev_evar = new_ev, ctev_loc = loc }
-- | Create a new derived constraint.
newDerived :: CtLoc -> PredType -> TcPluginM CtEvidence
newDerived loc pty = return CtDerived { ctev_pred = pty, ctev_loc = loc }
-- | Create a new given constraint, with the supplied evidence. This
-- must not be invoked from 'tcPluginInit' or 'tcPluginStop', or it
-- will panic.
newGiven :: CtLoc -> PredType -> EvTerm -> TcPluginM CtEvidence
newGiven loc pty evtm = do
new_ev <- newEvVar pty
setEvBind $ mkGivenEvBind new_ev evtm
return CtGiven { ctev_pred = pty, ctev_evar = new_ev, ctev_loc = loc }
-- | Create a fresh evidence variable.
newEvVar :: PredType -> TcPluginM EvVar
newEvVar = unsafeTcPluginTcM . TcMType.newEvVar
-- | Bind an evidence variable. This must not be invoked from
-- 'tcPluginInit' or 'tcPluginStop', or it will panic.
setEvBind :: EvBind -> TcPluginM ()
setEvBind ev_bind = do
tc_evbinds <- getEvBindsTcPluginM
unsafeTcPluginTcM $ TcMType.addTcEvBind tc_evbinds ev_bind
-- | Access the 'EvBindsVar' carried by the 'TcPluginM' during
-- constraint solving. This must not be invoked from 'tcPluginInit'
-- or 'tcPluginStop', or it will panic.
getEvBindsTcPluginM :: TcPluginM EvBindsVar
getEvBindsTcPluginM = fmap (expectJust oops) getEvBindsTcPluginM_maybe
where
oops = "plugin attempted to read EvBindsVar outside the constraint solver"
#endif
|
urbanslug/ghc
|
compiler/typecheck/TcPluginM.hs
|
bsd-3-clause
| 5,764
| 0
| 10
| 1,156
| 1,121
| 627
| 494
| 2
| 0
|
module XPrelude (module X) where
import Control.Monad as X
import Data.Foldable as X
import Data.List as X
import Data.Monoid as X
import Data.Traversable as X
import Prelude as X
|
urbanslug/ghc
|
libraries/base/tests/T9586.hs
|
bsd-3-clause
| 208
| 0
| 4
| 57
| 50
| 36
| 14
| 7
| 0
|
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternGuards #-}
module Haks.Sanskrit.Roman.Particulator where
import BasicPrelude hiding (empty,all,null)
import Data.Char
import Data.Text hiding (reverse,head)
import Haks.Types
tokenizer :: Char -> Maybe (Token,Char)
tokenizer tok
| isConsonant = Just (SANSKRIT_ROMAN SKTR_CONSONANT, tok)
| isVowel = Just (SANSKRIT_ROMAN SKTR_VOWEL, tok)
| isDigit tok = Just (SANSKRIT_ROMAN SKTR_DIGIT, tok)
| isTerminal = Just (SANSKRIT_ROMAN SKTR_TERMINAL, tok)
| otherwise = Nothing
where
isConsonant = tok `elem` consonants
isVowel = tok `elem` vowels
isTerminal = tok `elem` terminals
particulate :: Text -> [Particle] -> [(Token,Char)] -> [Particle]
particulate (null -> True) particles [] = reverse particles
particulate newp@(null -> True) [] (((x,_):xs) -> x == SANSKRIT_ROMAN SKTR_VOWEL) = undefined
-- ((SANSKRIT_ROMAN SKTR_CONSONANT,tok):xs) = undefined
particulate newp@(null -> True) [] ((SANSKRIT_ROMAN SKTR_VOWEL,tok):xs) = undefined
particulate emptyp@(null -> True) [] (_:xs) = particulate emptyp [] xs
particulate particle particles ((SANSKRIT_ROMAN SKTR_TERMINAL,term):xs) = undefined
-- particulate particle particles
consonants :: [Char]
consonants = [ 'k','h','g','ṅ','c','j','ñ','ṭ','ḍ','ṇ','t','d','n','p','b','m'
, 'y','r','l','v','ś','ṣ','s']
vowels :: [Char]
vowels = ['a','i','u','e','o','ā','ī','ū','ṛ','ṝ','ḷ','ḹ']
terminals :: [Char]
terminals =['ṁ','ṃ','ḥ']
{-
find_begining :: [(Token,Char)] -> [(Token,Char)]
find_begining (x:xs) = case (fst x) of
(SANSKRIT_ROMAN SKTR_CONSONANT) -> ((SANSKRIT_ROMAN SKTR_START, snd x):xs)
(SANSKRIT_ROMAN SKTR_VOWEL) -> ((SANSKRIT_ROMAN SKTR_START, snd x):xs)
otherwise -> find_begining xs
-}
|
mlitchard/haks
|
src/Haks/Sanskrit/Roman/Particulator.hs
|
isc
| 1,816
| 1
| 11
| 291
| 575
| 332
| 243
| -1
| -1
|
module Main where
import Control.Monad.Reader (ask, runReaderT)
import Control.Monad.State (get, runStateT, modify)
import UI.NCurses (Event(..), runCurses, defaultWindow, setEcho, setKeypad, getEvent)
import RogueLike
import RogueLike.Update
import RogueLike.Update.Game
import RogueLike.Update.Pickup
import RogueLike.Update.Inventory
import RogueLike.Render
import RogueLike.Render.Game
import RogueLike.Render.Pickup
import RogueLike.Render.Inventory
main = runCurses $ do
window <- defaultWindow
setEcho False
setKeypad window True
let appConfig = AppConfig {window=window}
runStateT (runReaderT renderGame appConfig) defaultGameState
runStateT (runReaderT mainLoop appConfig) defaultGameState
mainLoop = do
curGameState <- get
appConfig <- ask
event <- liftCurses $ getEvent (window appConfig) Nothing
case event of
Nothing -> mainLoop
Just (EventCharacter 'q') -> exitMainLoop
Just event' -> do
modify (updateDispatch event')
gameState <- get
renderDispatch appConfig gameState
mainLoop
where exitMainLoop = return ()
updateDispatch :: Event -> GameState -> GameState
updateDispatch event gameState =
case gameMode gameState of
GameMode -> updateGame event gameState
InventoryMode -> updateInventory event gameState
PickupMode -> updatePickup event gameState
renderDispatch :: AppConfig -> GameState -> App ()
renderDispatch appConfig gameState =
case gameMode gameState of
GameMode -> renderGame
InventoryMode -> renderInventory
PickupMode -> renderPickup
|
mgreenly/roguelike
|
app/Main.hs
|
isc
| 1,835
| 0
| 14
| 539
| 428
| 220
| 208
| 45
| 3
|
{-# LANGUAGE ViewPatterns #-}
module Haks.Tibetan.Uchen.Particulator where
import BasicPrelude hiding (empty,all,null)
import Data.Char
import Data.Text hiding (reverse,head)
import Haks.Types
preProcessor :: [Char] -> [Char]
preProcessor [] = []
preProcessor corpus@(x:xs)
| (x `elem` shad) || (x `elem` tsheg) = xs
| otherwise = corpus
tokenizer :: Char -> Maybe (Token,Char)
tokenizer tok
| isShad = Just (TIBETAN_UCHEN TSheg, head tsheg)
| isTsheg = Just (TIBETAN_UCHEN TSheg, tok)
| isAlphaNum' = Nothing
| isGarbage = Nothing -- FIXME, redundant with Latin1 in some cases
| otherwise = Just (TIBETAN_UCHEN StdChar_UC, tok)
where
isAlphaNum' = isLatin1 tok
isShad = tok `elem` shad
isTsheg = tok `elem` tsheg
isGarbage = tok `elem` not_token
particulate :: Text -> [Particle] -> [(Token,Char)] -> [Particle]
particulate (null -> True) particles [] = reverse particles
particulate particle particles ((TIBETAN_UCHEN TSheg,tsheg'):xs) =
(syllable_marker:particles) <> (particulate empty [] xs)
where
syllable_marker = particle <> (pack $ tsheg' : [])
particulate particle particles ((TIBETAN_UCHEN StdChar_UC,char):xs) =
particulate (particle `append` (pack $ char : [])) particles xs
particulate _ _ _ = []
shad :: String
shad = ['།','༑']
tsheg :: String
tsheg = ['་','༌']
tibetan_u :: ParticleConfig
tibetan_u = ParticleConfig
{ pre_processor_hc = preProcessor
, tokenizer_hc = tokenizer
, cleanup_hc = oneTsheg
, particlate_hc = particulate
}
oneTsheg :: [(Token,Char)] -> [(Token,Char)]
oneTsheg [] = []
oneTsheg (tsheg'@(TIBETAN_UCHEN TSheg,_):(TIBETAN_UCHEN TSheg,_):xs) =
tsheg':oneTsheg xs
oneTsheg (x:xs) = x:oneTsheg xs
not_token :: String
not_token = ['.','\n','-',',','/','(',')','\\','║','=',':'
,'#','_','\r','【','】','[',']',',','》']
|
mlitchard/haks
|
src/Haks/Tibetan/Uchen/Particulator.hs
|
isc
| 1,914
| 0
| 10
| 382
| 684
| 382
| 302
| 48
| 1
|
-- We shall say that an n-digit number is pandigital if it makes use of all the digits 1 to n exactly once; for example the 5-digit number 15234 is 1 through 5 pandigital.
-- The product 7254 is unusual as the identity 39 × 186 = 7254 containing multiplicand multiplier and product is 1 through 9 pandigital.
-- Find the sum of all products whose multiplicand/multiplier/product identity can be written as a 1 through 9 pandigital.
-- HINT: Some products can be obtained in more than one way so be sure to only include it once in your sum.
module Euler32 where
import Data.List
import Data.Maybe
import Debug.Trace
debug = flip trace
variants = permutations ([1..9] ++ [666, 777])
fromDigits xs = foldl addDigit 0 xs
where addDigit num d = 10*num + d
-- Final check
isValidFinal [] _ _ = Nothing
isValidFinal _ [] _ = Nothing
isValidFinal _ _ [] = Nothing
isValidFinal ns ms ps =
let
n = fromDigits ns
m = fromDigits ms
p = fromDigits ps
in
if n * m == p
then Just p
else Nothing
-- Parsing
isValidParsing :: [Integer] -> [Integer] -> Bool -> [Integer] -> Bool -> [Integer] -> Maybe Integer
isValidParsing (666:xs) [] _ _ _ _ = Nothing
isValidParsing (777:xs) _ _ [] _ _ = Nothing
isValidParsing (777:[]) _ _ _ _ _ = Nothing
isValidParsing (x:xs) ns multiply ms equal ps
| x == 666 = isValidParsing xs ns True ms equal ps
| x == 777 = isValidParsing xs ns multiply ms True ps
| not multiply = isValidParsing xs (ns ++ [x]) multiply ms equal ps
| multiply && not equal = isValidParsing xs ns multiply (ms ++ [x]) equal ps
| equal = isValidParsing xs ns multiply ms equal (ps ++ [x])
isValidParsing [] ns plus ms equal ps
| plus && equal = isValidFinal ns ms ps
| otherwise = Nothing
isValid xs = isValidParsing xs [] False [] False []
validsMaybe xs = map isValid (permutations xs)
valids xs = nub(mapMaybe id (validsMaybe xs))
resultSum xs = sum (valids xs)
-- *Euler32 Data.Monoid Data.Maybe> sum([7632,6952,7852,7254,5346,5796,4396])
-- 45228
|
kirhgoff/haskell-sandbox
|
euler32/euler32.hs
|
mit
| 2,010
| 0
| 11
| 423
| 638
| 323
| 315
| 36
| 2
|
module PE0009 where
import Benchmark
isPythagoreanTriplet :: (Num a, Eq a) => (a, a, a) -> Bool
isPythagoreanTriplet (a, b, c) = a*a + b*b == c*c
-- triangle triplets a < b < c whose circumference < n
-- but those are too much!
trianglesUnderCircumference :: (Integral a) => a -> [(a, a, a)]
trianglesUnderCircumference n = [(a, b, c) | c <- [3 .. n],
b <- [2 .. c-1],
a <- [c-b+1 .. min (n-c-b-1) (b-1)]]
trianglesWithCircumference :: (Integral a) => a -> [(a, a, a)]
trianglesWithCircumference n = [(a, b, c) | c <- [n `div` 3 .. n],
b <- [2 .. c - 1],
a <- [n - c - b], 0 < a, a < b]
pythagoreanTripletsWithCircumference :: (Integral a) => a -> [(a, a, a)]
pythagoreanTripletsWithCircumference n =
filter isPythagoreanTriplet $
trianglesWithCircumference n
main :: IO ()
main = do
n <- arg 1 "1000"
let n' = read n :: Int
(a, b, c) <- return $ head $ pythagoreanTripletsWithCircumference n'
print (a, b, c)
putStrLn $ "Product: " ++ (show $ a*b*c)
|
mvidner/projecteuler
|
src/PE0009.hs
|
mit
| 1,036
| 8
| 13
| 267
| 499
| 275
| 224
| 23
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Aeson
import Data.ByteString.Lazy.UTF8
import Data.JSString (JSString, unpack, pack)
import Data.JSString.Text
import GHCJS.Foreign
import GHCJS.Foreign.Callback (Callback, OnBlocked(ContinueAsync), syncCallback1')
import GHCJS.Marshal.Pure
--import GHCJS.Prim
import GHCJS.Types (JSVal)
--import JavaScript.Object
import Language
import Lexer
import Parser
handleWithIO :: (String -> IO String) -> JSVal -> IO JSVal
handleWithIO f v = do
putStrLn $ "Starting"
let input = unpack $ pFromJSVal v
putStrLn $ "Input: " ++ input
output <- f input
putStrLn $ "Output: " ++ output
return $ pToJSVal (pack output)
-- handleWithPure :: (String -> String) -> JSVal -> IO JSVal
-- handleWithPure f v = do
-- handleWithIO (\s -> return (f s)) v
-- typeCheck :: JSVal -> IO JSVal
-- typeCheck = handleWithPure process
-- parseTerm :: JSVal -> IO JSVal
-- parseTerm = handleWithPure $ pprint . Parser.parseTerm
typeCheckDebug :: JSVal -> IO JSVal
typeCheckDebug = handleWithIO $ \ input -> do
let term = Parser.parseTerm input
putStrLn $ "Parsed term: " ++ pprint term
let work = inferWork term
putStrLn $ "Work: " ++ show work
let res = tcTraceResult work
putStrLn $ "Result (show): " ++ show res
putStrLn $ "Result (pprint): " ++ pprint res
let resStr = toString . encode $ res
return resStr
foreign import javascript unsafe
"window[$1] = $2"
js_setCallback :: JSString -> Callback a -> IO ()
foreign import javascript unsafe
"window['haskellReady'] = true"
js_ready :: IO ()
register :: JSString -> (JSVal -> IO JSVal) -> IO ()
register s f =
syncCallback1' f >>= js_setCallback s
main = do
--register "typeCheck" typeCheck
register "typeCheckDebug" typeCheckDebug
--register "parseTerm" Main.parseTerm
js_ready
|
Ptival/ProofIDE
|
Main.hs
|
mit
| 1,831
| 8
| 13
| 333
| 464
| 238
| 226
| 42
| 1
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.XPathExpression (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.XPathExpression
#else
module Graphics.UI.Gtk.WebKit.DOM.XPathExpression
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.XPathExpression
#else
import Graphics.UI.Gtk.WebKit.DOM.XPathExpression
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/XPathExpression.hs
|
mit
| 460
| 0
| 5
| 39
| 33
| 26
| 7
| 4
| 0
|
module Document.Tests.Phase where
-- Modules
import Document.Tests.Suite
-- Libraries
import Test.UnitTest
test_case :: TestCase
test_case = test
test :: TestCase
test = test_cases
"refinement relations in the phase example"
[ (aCase "test 0, cyclic refinement relation between machines" (find_errors path0) result0)
, (aCase "test 1, valid references to variables and event declared in ancestor" case1 result1)
, (aCase "test 2, invalid proof obligations" case2 result2)
]
path0 :: FilePath
path0 = [path|Tests/phases-t0.tex|]
result0 :: String
result0 = unlines
[ "A cycle exists in the refinement structure"
, "error 174:1:"
, "\tm0"
, ""
, "error 180:1:"
, "\tm1"
, ""
]
path1 :: FilePath
path1 = [path|Tests/phases-t1.tex|]
case1 :: IO String
case1 = find_errors path1
result1 :: String
result1 = "no errors"
path2 :: FilePath
path2 = [path|Tests/phases-t2.tex|]
case2 :: IO String
case2 = find_errors path2
result2 :: String
result2 = unlines
[ "error 96:8:"
, " proof obligation does not exist: m0/TR/tr0/step/NEG"
, ""
, "m0/prog0/LIVE/disjunction/lhs"
, "m0/prog0/LIVE/disjunction/rhs"
, "m0/prog0/PROG/WD/rhs"
, "m0/prog1/LIVE/implication"
, "m0/prog1/PROG/WD/lhs"
, "m0/prog1/PROG/WD/rhs"
, "m0/prog2/LIVE/induction/lhs"
, "m0/prog2/LIVE/induction/rhs"
, "m0/prog2/PROG/WD/lhs"
, "m0/prog2/PROG/WD/rhs"
, "m0/prog3/LIVE/PSP/lhs"
, "m0/prog3/LIVE/PSP/rhs"
, "m0/prog3/PROG/WD/lhs"
, "m0/prog3/PROG/WD/rhs"
, "m0/prog4/LIVE/discharge/tr/lhs"
, "m0/prog4/LIVE/discharge/tr/rhs"
, "m0/prog4/PROG/WD/lhs"
, "m0/prog4/PROG/WD/rhs"
, "m0/saf0/SAF/WD/lhs"
, "m0/saf0/SAF/WD/rhs"
, "m0/step/FIS/pc@prime"
, "m0/step/SAF/saf0"
, "m0/step/WD/ACT/a0"
, "m0/tr0/TR/WD"
, "m0/tr0/TR/WFIS/p/p@prime"
, "m0/tr0/TR/step/NEG"
, ""
]
|
literate-unitb/literate-unitb
|
src/Document/Tests/Phase.hs
|
mit
| 1,970
| 0
| 10
| 454
| 311
| 192
| 119
| -1
| -1
|
{-# LANGUAGE GADTs, KindSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
import Control.Monad
import Control.Applicative
-- Reverse apply
(-:) :: a -> (a -> b) -> b
(-:) = flip id
infixl 1 -:
-- type-level fixed point combinator
newtype Fix f = Fix (f (Fix f))
deriving instance Show (f (Fix f)) => Show (Fix f)
deriving instance Eq (f (Fix f)) => Eq (Fix f)
fix :: f (Fix f) -> Fix f
fix = Fix
unfix :: Fix f -> f (Fix f)
unfix (Fix f) = f
data ListF :: * -> * -> * where
NilF :: ListF a b
ConsF :: a -> b -> ListF a b
deriving (Show, Eq)
instance Functor (ListF a) where
fmap _ NilF = NilF
fmap f (ConsF a b) = ConsF a (f b)
-- List is ListF's fixed point
-- A List data type defined without recursion
type List a = Fix (ListF a)
nil :: List a
nil = fix NilF
cons :: a -> List a -> List a
cons x xs = fix $ ConsF x xs
infixr 5 `cons`
fmap' :: (a -> b) -> List a -> List b
fmap' f xs =
case unfix xs of
NilF -> nil
ConsF x xs' -> f x `cons` fmap' f xs'
head' :: List a -> a
head' (Fix (ConsF a _)) = a
tail' :: List a -> List a
tail' (Fix (ConsF _ b)) = b
list1 = 2 `cons` 5 `cons` 8 `cons` nil
list2 = fmap' show list1
data TreeF :: * -> * -> * where
LeafF :: TreeF a b
NodeF :: b -> a -> b -> TreeF a b
deriving (Show, Eq)
type Tree a = Fix (TreeF a)
leaf :: Tree a
leaf = fix LeafF
node :: Tree a -> a -> Tree a -> Tree a
node lt a rt = fix $ NodeF lt a rt
tree1 = node (node (node leaf 1 leaf) 1 leaf) 2 (node (node (node leaf 3 (node leaf 5 leaf)) 8 leaf) 13 leaf)
-- Cofree can add annotation to a functor
-- e.g. annotate a AST, annotate the NatF type with Int
data Cofree f a = a :< (f (Cofree f a))
deriving instance (Show a, Show (f (Cofree f a))) => Show (Cofree f a)
deriving instance (Eq a, Eq (f (Cofree f a))) => Eq (Cofree f a)
infixr 5 :<
data NatF :: * -> * where
OneF :: NatF a
SuccF :: a -> NatF a
deriving (Show, Eq)
type Nat = Cofree NatF Int
one :: Nat
one = 1 :< OneF
succ :: Nat -> Nat
succ f@(a :< _) = a + 1 :< SuccF f
|
silverneko/haskell-playground
|
fixPlayground.hs
|
mit
| 2,119
| 79
| 12
| 534
| 1,040
| 531
| 509
| 64
| 2
|
{-# LANGUAGE CPP #-}
module Info (
versionInfo
, info
#ifdef TEST
, formatInfo
#endif
) where
import Data.List.Compat
import System.Process
import System.IO.Unsafe
#if __GLASGOW_HASKELL__ < 900
import Config as GHC
#else
import GHC.Settings.Config as GHC
#endif
import Data.Version (showVersion)
import qualified Paths_doctest
import Interpreter (ghc)
version :: String
version = showVersion Paths_doctest.version
ghcVersion :: String
ghcVersion = GHC.cProjectVersion
versionInfo :: String
versionInfo = unlines [
"doctest version " ++ version
, "using version " ++ ghcVersion ++ " of the GHC API"
, "using " ++ ghc
]
info :: String
info = formatInfo $
("version", version)
: ("ghc_version", ghcVersion)
: ("ghc", ghc)
: ghcInfo
type Info = [(String, String)]
ghcInfo :: Info
ghcInfo = read $ unsafePerformIO (readProcess ghc ["--info"] "")
formatInfo :: Info -> String
formatInfo xs = " [" ++ (intercalate "\n ," $ map show xs) ++ "\n ]\n"
|
sol/doctest
|
src/Info.hs
|
mit
| 1,044
| 0
| 9
| 247
| 259
| 153
| 106
| 31
| 1
|
{-
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification
OpenAPI Version: 3.0.0
Swaggy Jenkins API version: 1.1.2-pre.0
Contact: blah@cliffano.com
Generated by OpenAPI Generator (https://openapi-generator.tech)
-}
{-|
Module : SwaggyJenkins.API
-}
module SwaggyJenkins.API
( module SwaggyJenkins.API.Base
, module SwaggyJenkins.API.BlueOcean
, module SwaggyJenkins.API.RemoteAccess
) where
import SwaggyJenkins.API.Base
import SwaggyJenkins.API.BlueOcean
import SwaggyJenkins.API.RemoteAccess
|
cliffano/swaggy-jenkins
|
clients/haskell-http-client/generated/lib/SwaggyJenkins/API.hs
|
mit
| 552
| 0
| 5
| 83
| 49
| 34
| 15
| 7
| 0
|
{- Has some code essentially copied from osa1/psc-lua -}
{-# LANGUAGE PatternGuards #-}
module Language.PureScript.CodeGen.Lua.Common where
import Data.Char
import Data.List (intercalate, foldl')
import Language.PureScript.Names
import qualified Language.Lua.Syntax as L
import qualified Language.Lua.PrettyPrinter as L
-- | Convert an Ident into a valid Lua identifier:
--
-- * Wraps reserved identifiers with _
--
-- * Symbols are wrapped with _ between a symbol name or their ordinal value.
--
identToLua :: Ident -> String
identToLua (Ident name) | nameIsLuaReserved name = '_' : name ++ "_"
identToLua (Ident name) = name >>= identCharToString
identToLua (Op op) = op >>= identCharToString
identCharToString :: Char -> String
identCharToString c
| isAlphaNum c = [c]
| otherwise =
case c of
'_' -> "_"
'.' -> "_dot_"
'$' -> "_dollar_"
'~' -> "_tilde_"
'=' -> "_eq_"
'<' -> "_less_"
'>' -> "_greater_"
'!' -> "_bang_"
'#' -> "_hash_"
'%' -> "_percent_"
'^' -> "_up_"
'&' -> "_amp_"
'|' -> "_bar_"
'*' -> "_times_"
'/' -> "_div_"
'+' -> "_plus_"
'-' -> "_minus_"
':' -> "_colon_"
'\\' -> "_bslash_"
'?' -> "_qmark_"
'@' -> "_at_"
'\'' -> "_prime_"
_ -> '_' : show (ord c) ++ "_"
nameIsLuaReserved :: String -> Bool
nameIsLuaReserved s =
s `elem` [ "and"
, "end"
, "in"
, "repeat"
, "break"
, "false"
, "local"
, "return"
, "do"
, "for"
, "nil"
, "then"
, "else"
, "function"
, "not"
, "true"
, "elseif"
, "if"
, "or"
, "until"
, "while"
]
moduleNameToLua :: ModuleName -> String
moduleNameToLua (ModuleName pns) = intercalate "_" (map runProperName pns)
var :: String -> L.Exp
var = L.PrefixExp . L.PEVar . L.VarName
funcall :: L.Exp -> [L.Exp] -> L.Exp
funcall f args =
L.PrefixExp $ L.PEFunCall $ L.NormalFunCall (expToPexp f) (L.Args args)
funcallS :: String -> [L.Exp] -> L.Stat
funcallS f args = L.FunCall $ L.NormalFunCall (L.PEVar $ L.VarName f) (L.Args args)
funcallStat :: L.Exp -> [L.Exp] -> L.Stat
funcallStat f args =
L.FunCall $ L.NormalFunCall (expToPexp f) (L.Args args)
pprint :: L.LPretty l => [l] -> String
pprint ls =
foldl' (\str ss -> "\n" ++ ss str) "" $ map (L.displayS . L.renderPretty 0.8 100 . L.pprint) ls
expToPexp :: L.Exp -> L.PrefixExp
expToPexp (L.PrefixExp pexp) = pexp
expToPexp e = L.Paren e
replace :: Eq a => a -> a -> [a] -> [a]
replace _ _ [] = []
replace lhs rhs (x : xs) =
(if x == lhs then rhs else x) : replace lhs rhs xs
select :: L.Exp -> L.Exp -> L.Exp
select tab index = L.PrefixExp . L.PEVar $ L.Select (expToPexp tab) index
selectS :: L.Exp -> String -> L.Exp
selectS tab index = select tab (string index)
string :: String -> L.Exp
string = L.String . show
|
raymoo/psc-lua-redux
|
src/Language/PureScript/CodeGen/Lua/Common.hs
|
mit
| 3,063
| 0
| 13
| 911
| 1,002
| 531
| 471
| 90
| 23
|
{-# LANGUAGE OverloadedStrings #-}
module Web.Uploadcare.Client
(
Client(..)
, newClient
, newDemoClient
, closeClient
) where
import Data.ByteString.Char8 (ByteString)
import Network.HTTP.Conduit (Manager, def, newManager, closeManager)
data Client = Client {
manager :: Manager
, publicKey :: ByteString
, secretKey :: ByteString
}
newClient :: ByteString -> ByteString -> IO Client
newClient connPublicKey connSecretKey = do
connManager <- newManager def
return $ Client {
manager = connManager
, publicKey = connPublicKey
, secretKey = connSecretKey
}
newDemoClient :: IO Client
newDemoClient = do
putStrLn "Warning! You are using the demo account."
newClient "demopublickey" "demoprivatekey"
closeClient :: Client -> IO ()
closeClient = closeManager . manager
|
uploadcare/uploadcare-haskell
|
src/Web/Uploadcare/Client.hs
|
mit
| 825
| 0
| 9
| 164
| 197
| 112
| 85
| 26
| 1
|
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE RebindableSyntax #-}
module Set1 where
import MCPrelude
-- import Control.Arrow (first)
-- 1. Random number generation
fiveRands :: [Integer]
fiveRands = let f (_, s) = rand s in map fst . take 5 . iterate f . rand . mkSeed $ 1
-- 2. Random character generation
randLetter :: Seed -> (Char, Seed)
-- randLetter = first toLetter . rand
randLetter = (\(i, s) -> (toLetter i, s)) . rand
randString3 :: String
randString3 = let f (_, s) = randLetter s in map fst . take 3 . iterate f . randLetter . mkSeed $ 1
-- 3. More generators
type Gen a = Seed -> (a, Seed)
generalA :: (a -> b) -> Gen a -> Gen b
-- generalA f g = first f . g
generalA f g = (\(a, b) -> (f a, b)) . g
randEven = generalA (*2) rand
randOdd = generalA succ randEven
randTen = generalA (*10) rand
-- 4. Generalizing random pairs
randPair :: Gen (Char, Integer)
randPair s = ((c, i), ss')
where (c, s') = randLetter s
(i, ss') = rand s'
generalPair :: Gen a -> Gen b -> Gen (a, b)
generalPair ga gb s = ((a', b') , ss')
where (a', s') = ga s
(b', ss') = gb s'
generalB :: (a -> b -> c) -> Gen a -> Gen b -> Gen c
generalB f ga gb s = (f a' b', ss')
where (a', s') = ga s
(b', ss') = gb s'
generalPair2 = generalB (\a b -> (a, b))
-- 5. Generalizing lists of generators
repRandom :: [Gen a] -> Gen [a]
repRandom [] = \s -> ([], s)
repRandom (x:xs) = generalB (:) x . repRandom $ xs
-- 6. Threading the random number state
genTwo :: Gen a -> (a -> Gen b) -> Gen b
genTwo ga f = uncurry f . ga
mkGen :: a -> Gen a
mkGen z s = (z, s)
|
gafiatulin/monad-challenges
|
src/Set1.hs
|
mit
| 1,611
| 0
| 12
| 402
| 726
| 393
| 333
| 36
| 1
|
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import Control.Exception (throw)
import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Database.Persist.Sqlite (SqliteConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings, widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: SqliteConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Text
-- ^ Base for all generated URLs.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
--userdefined
, appRevProx :: Text
-- ^ Reverse proxy for server
, appTempDir :: Text
-- ^ temporary directory for grid engine files
, appGeQueueName :: Text
-- ^ temporary directory for grid engine files
, appDataDir :: Text
-- ^ directory for data, e.g. bigcache
, appProgramDir :: Text
-- ^ directory for executables, e.g. blast transalign
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .: "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appIpFromHeader <- o .: "ip-from-header"
-- user-defined
appRevProx <- o .: "revprox"
appTempDir <- o .: "tempdir"
appGeQueueName <- o .: "gequeuename"
appDataDir <- o .: "datadir"
appProgramDir <- o .: "programdir"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates <- o .:? "reload-templates" .!= defaultDev
appMutableStatic <- o .:? "mutable-static" .!= defaultDev
appSkipCombining <- o .:? "skip-combining" .!= defaultDev
appCopyright <- o .: "copyright"
appAnalytics <- o .:? "analytics"
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either throw id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
|
bsarah/TAWS
|
Settings.hs
|
gpl-2.0
| 6,046
| 14
| 14
| 1,739
| 729
| 449
| 280
| -1
| -1
|
{- | Module : $Header$
- Description : Logic specific function implementation for Coalition Logic
- Copyright : (c) Georgel Calin & Lutz Schroeder, DFKI Lab Bremen
- License : GPLv2 or higher, see LICENSE.txt
- Maintainer : g.calin@jacobs-university.de
- Stability : provisional
- Portability : non-portable (various -fglasgow-exts extensions)
-
- Provides the implementation of the logic specific functions of the
- ModalLogic class in the particular case of the Coalition Logic -}
{-# OPTIONS -fglasgow-exts #-}
module GMP.CoalitionL where
import qualified Data.Set as Set
import Text.ParserCombinators.Parsec
import GMP.Lexer
import GMP.ModalLogic
import GMP.GMPAS
-- | Rules for Coalition Logic corresponding to the axiomatized rules
data CLrules = CLNR Int
| CLPR Int Int
deriving Show
-- | Indexes of negated & non-negated modal applications
data Coeffs = Coeffs [Set.Set Int] [Set.Set Int]
deriving (Eq, Ord)
instance ModalLogic CL CLrules where
{- |
- -}
specificPreProcessing f = do
let getMaxAgents g m =
case g of
Mapp (Mop (CL _ i) _) _
-> if (i/=(-1)) then i else m
Junctor f1 _ f2
-> max (getMaxAgents f1 m) (getMaxAgents f2 m)
Neg ff
-> getMaxAgents ff m
_ -> m
resetMaxAgents g m =
case g of
Mapp (Mop (CL s i) t) h
-> if (m==(-1))||((i/=(-1))&&(i/=m))
then fail "CoalitionL.getMaxAgents"
else return $ Mapp (Mop (CL s m) t) h
Junctor f1 j f2
-> do r1 <- resetMaxAgents f1 m
r2 <- resetMaxAgents f2 m
return $ Junctor r1 j r2
Neg ff
-> do r <- resetMaxAgents ff m
return $ Neg r
_ -> do return g
checkConsistency g =
case g of
Mapp (Mop (CL s i) _) _
-> if (Set.findMax s > i)||(Set.findMin s < 1)||(Set.size s > i)
then fail "CoalitionL.checkConsistency"
else return g
Junctor f1 j f2
-> do r1 <- checkConsistency f1
r2 <- checkConsistency f2
return $ Junctor r1 j r2
Neg ff
-> do r <- checkConsistency ff
return $ Neg r
_-> do return g
aux = getMaxAgents f (-1)
tmp <- resetMaxAgents f aux
checkConsistency tmp
flagML _ = Sqr
parseIndex = do try(char '{')
let stopParser = do try(char ',')
return False
<|> do try(char '}')
return True
<?> "CoalitionL.parseIndex.stop"
let shortParser = do xx <- natural
let n = fromInteger xx
string ".."
yy <- natural
let m = fromInteger yy
return $ Set.fromList [n..m]
<?> "CoalitionL.parseIndex.short"
let xParser s = do aux <- try(shortParser)
let news = Set.union s aux
q <- stopParser
case q of
False -> xParser news
_ -> return news
<|> do n <- natural
let news = Set.insert (fromInteger n) s
q <- stopParser
case q of
False -> xParser news
_ -> return news
<?> "CoalitionL.parseIndex.x"
let isEmptyParser = do try(char '}')
whiteSpace
return Set.empty
<|> do aux <- xParser Set.empty
return aux
<?> "CoalitionL.parseIndex.isEmpty"
let maxAgentsParser = do aux <- try(natural)
let n = fromInteger aux
return n
<|> return (-1::Int)
<?> "CoalitionL.parseIndex.maxAgents"
res <- isEmptyParser
n <- maxAgentsParser
return $ CL res n
<|> do aux <- natural
let n = fromInteger aux
let res = Set.fromList [1..n]
return $ CL res n
<?> "CoalitionL.parseIndex"
matchR r = let Coeffs q w = eccContent r
in if (pairDisjoint q)&&(w/=[])
then if (allSubsets q (head w))&&(allMaxEq (head w) (tail w))
then [CLPR (length q) (-1 + length w)]
else []
else if (pairDisjoint w)&&(q==[])
then [CLNR (length w)]
else []
guessClause r =
case r of
CLNR n -> [Pimplies [] [1..n]]
CLPR n m -> [Pimplies [(m+2)..(m+n+1)] [1..(m+1)]]
-- | Returns the extracted content of a contracted clause as Coeffs
eccContent :: ModClause CL -> Coeffs
eccContent (Mimplies n p) =
let getGrade x =
case x of
Mapp (Mop (CL g _) Square) _ -> g
_ -> error "CoalitionL.getGrade"
in Coeffs (map getGrade n) (map getGrade p)
-- | True if the list contains pairwise dijoint sets
pairDisjoint :: [Set.Set Int] -> Bool
pairDisjoint x =
let disjoint e l =
case l of
[] -> True
r:s -> if (Set.difference e r == e) then disjoint e s
else False
in case x of
[] -> True
h:t -> if not(disjoint h t) then False
else pairDisjoint t
-- | True if all sets in the 1st list arg. are subsets of the 2nd set arg.
allSubsets :: (Ord a) => [Set.Set a] -> Set.Set a -> Bool
allSubsets l s =
case l of
[] -> True
h:t -> if (Set.isSubsetOf h s) then (allSubsets t s)
else False
-- | True if all sets in the 2nd list arg. are equal and supersets of the 1st
allMaxEq :: (Ord a) => Set.Set a -> [Set.Set a] -> Bool
allMaxEq s l =
case l of
[] -> True
_ -> let aux = head l
in if (Set.isSubsetOf s aux)&&and(map (==aux) (tail l))
then True
else False
|
nevrenato/Hets_Fork
|
GMP/versioning/gmp-0.0.1/GMP/CoalitionL.hs
|
gpl-2.0
| 7,114
| 9
| 21
| 3,486
| 1,875
| 910
| 965
| -1
| -1
|
module Test.Continuation
( module Test.Continuation
, module Continuation
) where
import Continuation
import Control.Concurrent
squawker :: IO (ProcessId String String)
squawker = respondWith () squawker'
squawker' :: () -> Message String String -> IO()
squawker' _ (str,cont) = do
putStrLn str
reply cont str
return ()
echo = respondWith () $ \_ (m,cont) -> reply cont m >> return ()
respNull :: o -> Message m r -> IO o
respNull x _ = return x
|
antarestrader/sapphire
|
test/Continuation.hs
|
gpl-3.0
| 465
| 0
| 9
| 95
| 192
| 97
| 95
| 15
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Deadlink
Description : DeadLink’s runtime
Copyright : (c) Frédéric BISSON, 2016
License : GPL-3
Maintainer : zigazou@free.fr
Stability : experimental
Portability : POSIX
Main DeadLink’s functions.
-}
module Deadlink
( deadlinkLoop
, deadlinkInit
, getCurrentIteration
)
where
import Network.URI.Text (nullURI)
import System.IO (stdout, hFlush)
import Data.Text (Text)
import Database.SQLite3 (Database, open, close)
import Data.Link (Link, makeLink, pertinent)
import Network.LinkChecker (verify, parse, loadLinks)
import Database.LinkSQL ( getUncheckedLinks, getUnparsedHTMLLinks, updateLink
, insertLink, remainingJob, getLastIteration
)
import Control.Monad (liftM)
import Settings (curlCheckOptions, curlLoadOptions)
getCurrentIteration :: Text -> IO Int
getCurrentIteration dbname = do
db <- open dbname
iteration <- getLastIteration db
close db
return iteration
checkPage :: Database -> Int -> Link -> IO Link
checkPage db iteration baseLink = do
-- Load links from web page
links <- liftM (filter (pertinent baseLink))
(loadLinks curlLoadOptions baseLink)
-- Insert pertinent links in the database
actionPartition 50 links (mapM_ (insertLink db iteration baseLink))
-- Update current link
parse baseLink
-- | Initializes the database with the root element
deadlinkInit :: Text -> Link -> IO ()
deadlinkInit dbname link = do
db <- open dbname
_ <- insertLink db 0 (makeLink nullURI) link
close db
-- | Group execution of actions
actionPartition :: Int -> [b] -> ([b] -> IO ()) -> IO ()
actionPartition _ [] _ = return ()
actionPartition nb list action = do
action (take nb list)
actionPartition nb (drop nb list) action
-- | An iteration consists of links checking followed by pages parsing
deadlinkIteration :: Text -> Int -> Link -> IO ()
deadlinkIteration dbname iteration base = do
db <- open dbname
-- Get unchecked links
(uncheckedCount, uncheckeds) <- getUncheckedLinks db
putStr $ "Checking " ++ show uncheckedCount ++ " links... "
hFlush stdout
-- Update links states. Operates 50 links by 50 links to save memory.
actionPartition 50 uncheckeds $ \list -> do
mapM (verify curlCheckOptions) list >>= mapM_ (updateLink db)
tick '*'
-- Check every unparsed HTML page
(unparsedCount, unparseds) <- getUnparsedHTMLLinks db base
putStr $ "\nParsing " ++ show unparsedCount ++ " pages... "
hFlush stdout
-- Update pages states. Operates 50 links by 50 links to save memory.
actionPartition 50 unparseds $ \list -> do
mapM (checkPage db iteration) list >>= mapM_ (updateLink db)
tick '*'
close db
putStr "\n"
where tick c = putChar c >> hFlush stdout
-- | Loop again and again till there is no more links to check or page to
-- parse. It is the responsibility of the caller to call `withCurlDo` before
-- calling this function.
deadlinkLoop :: Text -> Int -> Link -> IO ()
deadlinkLoop _ 15 _ = putStrLn "16 iterations, I stop here!"
deadlinkLoop dbname iteration baselink = do
putStrLn $ "Iteration " ++ show iteration
db <- open dbname
(pageCount, linkCount) <- remainingJob db baselink
close db
if pageCount == 0 && linkCount == 0
then putStrLn "Finished!"
else do
deadlinkIteration dbname iteration baselink
deadlinkLoop dbname (iteration + 1) baselink
|
Zigazou/deadlink
|
src/Deadlink.hs
|
gpl-3.0
| 3,544
| 0
| 14
| 821
| 885
| 438
| 447
| 67
| 2
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
-- |
-- Module : Aura.Settings
-- Copyright : (c) Colin Woodbury, 2012 - 2020
-- License : GPL3
-- Maintainer: Colin Woodbury <colin@fosskers.ca>
--
-- Definition of the runtime environment.
module Aura.Settings
( Settings(..)
, logFuncOfL
-- * Aura Configuration
, BuildConfig(..), BuildSwitch(..)
, buildPathOfL, buildUserOfL, buildSwitchesOfL, allsourcePathOfL, vcsPathOfL
, switch
, Truncation(..)
, defaultBuildDir
-- * Pacman Interop
, CommonConfig(..), CommonSwitch(..)
, cachePathOfL, logPathOfL
, ColourMode(..)
, shared
-- * Makepkg Interop
, Makepkg(..)
) where
import Aura.Types
import Network.HTTP.Client (Manager)
import RIO
import qualified RIO.Set as S
import qualified RIO.Text as T
---
-- | How @-As@ should truncate its results.
data Truncation = None | Head !Word | Tail !Word deriving (Eq, Show)
-- | CLI flags that will be passed down to @makepkg@ when building packages.
data Makepkg = IgnoreArch | AllSource | SkipInteg | SkipPGP deriving (Eq, Ord, Show)
instance Flagable Makepkg where
asFlag IgnoreArch = ["--ignorearch"]
asFlag AllSource = ["--allsource"]
asFlag SkipInteg = ["--skipinteg"]
asFlag SkipPGP = ["--skippgpcheck"]
-- | Flags that are common to both Aura and Pacman.
-- Aura will react to them, but also pass them through to `pacman`
-- calls if necessary.
data CommonConfig = CommonConfig
{ cachePathOf :: !(Either FilePath FilePath)
, configPathOf :: !(Either FilePath FilePath)
, logPathOf :: !(Either FilePath FilePath)
, commonSwitchesOf :: !(Set CommonSwitch) } deriving (Show, Generic)
cachePathOfL :: Lens' CommonConfig (Either FilePath FilePath)
cachePathOfL f cc = (\cp -> cc { cachePathOf = cp }) <$> f (cachePathOf cc)
logPathOfL :: Lens' CommonConfig (Either FilePath FilePath)
logPathOfL f cc = (\cp -> cc { logPathOf = cp }) <$> f (logPathOf cc)
instance Flagable CommonConfig where
asFlag (CommonConfig cap cop lfp cs) =
either (const []) (\p -> ["--cachedir", T.pack p]) cap
<> either (const []) (\p -> ["--config", T.pack p]) cop
<> either (const []) (\p -> ["--logfile", T.pack p]) lfp
<> asFlag cs
-- | Yes/No-style switches that are common to both Aura and Pacman.
-- Aura acts on them first, then passes them down to @pacman@ if necessary.
data CommonSwitch = NoConfirm | NeededOnly | Debug | Colour !ColourMode | Overwrite !Text
deriving (Eq, Ord, Show)
instance Flagable CommonSwitch where
asFlag NoConfirm = ["--noconfirm"]
asFlag NeededOnly = ["--needed"]
asFlag Debug = ["--debug"]
asFlag (Colour m) = "--color" : asFlag m
asFlag (Overwrite t) = "--overwrite" : asFlag t
-- | Matches Pacman's colour options. `Auto` will ensure that text will only be coloured
-- when the output target is a terminal.
data ColourMode = Never | Always | Auto deriving (Eq, Ord, Show)
instance Flagable ColourMode where
asFlag Never = ["never"]
asFlag Always = ["always"]
asFlag Auto = ["auto"]
-- | Settings unique to the AUR package building process.
data BuildConfig = BuildConfig
{ makepkgFlagsOf :: !(Set Makepkg)
, buildPathOf :: !(Maybe FilePath)
, buildUserOf :: !(Maybe User)
, allsourcePathOf :: !(Maybe FilePath)
, vcsPathOf :: !(Maybe FilePath)
, truncationOf :: !Truncation -- For `-As`
, buildSwitchesOf :: !(Set BuildSwitch) } deriving (Show)
buildPathOfL :: Lens' BuildConfig (Maybe FilePath)
buildPathOfL f bc = (\bp -> bc { buildPathOf = bp }) <$> f (buildPathOf bc)
buildUserOfL :: Lens' BuildConfig (Maybe User)
buildUserOfL f bc = (\bu -> bc { buildUserOf = bu }) <$> f (buildUserOf bc)
buildSwitchesOfL :: Lens' BuildConfig (Set BuildSwitch)
buildSwitchesOfL f bc = (\bs -> bc { buildSwitchesOf = bs }) <$> f (buildSwitchesOf bc)
allsourcePathOfL :: Lens' BuildConfig (Maybe FilePath)
allsourcePathOfL f bc = (\pth -> bc { allsourcePathOf = pth }) <$> f (allsourcePathOf bc)
vcsPathOfL :: Lens' BuildConfig (Maybe FilePath)
vcsPathOfL f bc = (\pth -> bc { vcsPathOf = pth }) <$> f (vcsPathOf bc)
-- | Extra options for customizing the build process.
data BuildSwitch
= AsDeps
| DeleteBuildDir
| DeleteMakeDeps
| DiffPkgbuilds
| DontSuppressMakepkg
| DryRun
| ForceBuilding
| HotEdit
| LowVerbosity
| NoPkgbuildCheck
| RebuildDevel
| SkipDepCheck
| SortAlphabetically -- For `-As`
deriving (Eq, Ord, Show)
-- | Is some Aura-specific setting turned on for this run?
switch :: Settings -> BuildSwitch -> Bool
switch ss bs = S.member bs . buildSwitchesOf $ buildConfigOf ss
-- | Is some Aura/Pacman common setting turned on for this run?
shared :: Settings -> CommonSwitch -> Bool
shared ss c = S.member c . commonSwitchesOf $ commonConfigOf ss
-- | The global settings as set by the user with command-line flags.
data Settings = Settings
{ managerOf :: !Manager
, envOf :: !Environment
, langOf :: !Language
, editorOf :: !FilePath
, isTerminal :: !Bool
, ignoresOf :: !(Set PkgName)
, commonConfigOf :: !CommonConfig
, buildConfigOf :: !BuildConfig
, logLevelOf :: !LogLevel
, logFuncOf :: !LogFunc }
deriving stock (Generic)
logFuncOfL :: Lens' Settings LogFunc
logFuncOfL f s = (\lf -> s { logFuncOf = lf }) <$> f (logFuncOf s)
-- | Unless otherwise specified, packages will be built within @/tmp@.
defaultBuildDir :: FilePath
defaultBuildDir = "/tmp"
|
aurapm/aura
|
aura/lib/Aura/Settings.hs
|
gpl-3.0
| 5,518
| 0
| 14
| 1,145
| 1,469
| 821
| 648
| 158
| 1
|
{-|
Module : Azubi
Description : Azubi main class is all you need.
Copyright : (c) Ingolf Wagner, 2017
License : GPL-3
Maintainer : azubi@ingolf-wagner.de
Stability : experimental
Portability : POSIX
Example:
@
import Azubi
main :: IO ()
main = azubiMain $ []
& installed (Ebuild "vim")
& uptodate (Git "git@github.com:mrVanDalo\/azubi.git" "\/dev\/shm\/azubi")
& installed (Git "git@github.com:mrVanDalo\/azubi-config.git" "\/dev\/shm\/azubi-config")
& run (Always "touch" ["/dev/shm/run.test"])
& link "\/dev\/shm\/azubi.link" "\/dev\/shm\/azubi"
@
-}
module Azubi
( State(..)
, Ebuild(..)
, Git(..)
, GitOption(..)
, RunCommand(..)
, RunResult(..)
, installed
, Installable
, uptodate
, Updatable
, run
, link
, folderExists
, content
, requires
, submodule
, (&)
, azubiMain
, Command(..)
, Check(..)
) where
import Azubi.Core.Boot
import Azubi.Core.Model
import Azubi.Module.Installable
import Azubi.Module.Runable
import Azubi.Syntax
|
mrVanDalo/azubi
|
src/Azubi.hs
|
gpl-3.0
| 1,087
| 0
| 5
| 270
| 130
| 90
| 40
| 26
| 0
|
{-# LANGUAGE DoAndIfThenElse #-}
-- Copyright 2013 Gushcha Anton
-- This file is part of PowerCom.
--
-- PowerCom is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- PowerCom is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with PowerCom. If not, see <http://www.gnu.org/licenses/>.
module Parser.GraphLoader(
loadGraphFromSCV
) where
import Data.WeightedGraph
import Text.Parsec
import Text.Parsec.Extra
import Data.Text
import Data.Functor ((<$>))
import Debug.Trace (trace)
type CSVParser a = Parsec String Char a
csv :: CSVParser [(String, String, Int)]
csv = do
result <- many line
eof
return result
line :: CSVParser (String, String, Int)
line = do
result <- cells
eol
return result
firstCell :: CSVParser String
firstCell = manyTill anyChar tab
secondCell :: CSVParser String
secondCell = manyTill anyChar (char ',')
parseInnerCell :: CSVParser (String, Int)
parseInnerCell = do
body <- manyTill anyChar $ try $ do
char '('
notFollowedBy (noneOf "0123456789")
vals <- many Text.Parsec.digit
let val = read vals
_ <- char ')'
_ <- optional (char '*')
return (body, val)
cells :: CSVParser (String, String, Int)
cells = do
first <- firstCell
second <- secondCell
_ <- tab
third <- secondCell
if second == "()" then
case runParser parseInnerCell ' ' "" third of
Right (second', val) -> return (first, second', val)
Left err -> fail $ show err
else if third == "()" then
case runParser parseInnerCell ' ' "" second of
Right (second', val) -> return (first, second', val)
Left err -> fail $ show err
else fail "Unexpected format!"
loadGraphFromSCV :: FilePath -> IO (Graph String Int)
loadGraphFromSCV fileName = do
raws <- getRaws
return $ Prelude.foldl graphAddEdge emptyGraph raws
where
getRaws = do
res <- runParser csv ' ' "" <$> readFile fileName
case res of
Right vals -> return vals
Left err -> fail $ show err
|
NCrashed/Kaissa
|
source/Parser/GraphLoader.hs
|
gpl-3.0
| 2,457
| 0
| 13
| 561
| 632
| 323
| 309
| 58
| 5
|
{-# LANGUAGE GADTs,
MultiParamTypeClasses #-}
module Scales where
import Music (Scale(..),
AbstractPitch1(..), AbstractInt1(..),
AbstractPitch2(..), AbstractInt2(..),
Interval(..), Pitch(..), Transpose(..),
faInt, faPitch,
Name(..), Number(..), Quality(..), Accidental(..), Ficta(..))
import Shortcuts
import Util (rotate, rotateN)
data GenericScale where
GenericScale :: Scale s p i => s -> GenericScale
-- todo: represent/enforce scale length(s) with type-level Nats.
-- todo: make the basic scale type a list of *intervals* (not pitches). e.g. baseMajor = [M2, M2, m2, M2, M2, M2, m2] etc.
ficToAcc Raise = sharpen
ficToAcc Neutral = id
ficToAcc Lower = flatten
completeScale s i = let c = if i >= 0
then scale s ++
map (transpose (AbstractInt2 Perf (Compound Unison))) c
else map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) (reverse (scale s)) ++
map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) c
in if i >= 0
then c
else (head (scale s)) : c
infiniteScale s = completeScale s 1
scaleDegree s (AbstractPitch1 deg fic) =
let i = fromEnum deg
index = abs i
note = (completeScale s i) !! index
in (ficToAcc fic) note
-- Ionian | Hypoionian | Aeolian | Hypoaeolian | Dorian | Phrygian | Lydian | Mixolydian | Hypodorian | Hypophrygian | Hypolydian | Hypomixolydian | Locrian | Hypolocrian
transposeScale orig base new = let offset = interval base new
in map (transpose offset) orig
-- Diatonic:
basicIonian = map (\n -> AbstractPitch2 n Na) [C .. ]
data Ionian = Ionian AbstractPitch2 deriving Show
type Major = Ionian
instance Scale Ionian AbstractPitch1 AbstractInt1 where
tonic (Ionian t) = t
scale s = take 7 $ transposeScale basicIonian (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
data Dorian = Dorian AbstractPitch2 deriving Show
instance Scale Dorian AbstractPitch1 AbstractInt1 where
tonic (Dorian t) = t
scale s = take 7 $ transposeScale (rotate basicIonian) (AbstractPitch2 D Na) (tonic s)
applyScale = scaleDegree
data Phrygian = Phrygian AbstractPitch2 deriving Show
instance Scale Phrygian AbstractPitch1 AbstractInt1 where
tonic (Phrygian t) = t
scale s = take 7 $ transposeScale ((rotateN 2) basicIonian) (AbstractPitch2 E Na) (tonic s)
applyScale = scaleDegree
data Lydian = Lydian AbstractPitch2 deriving Show
instance Scale Lydian AbstractPitch1 AbstractInt1 where
tonic (Lydian t) = t
scale s = take 7 $ transposeScale ((rotateN 3) basicIonian) (AbstractPitch2 F Na) (tonic s)
applyScale = scaleDegree
data Mixolydian = Mixolydian AbstractPitch2 deriving Show
instance Scale Mixolydian AbstractPitch1 AbstractInt1 where
tonic (Mixolydian t) = t
scale s = take 7 $ transposeScale ((rotateN 4) basicIonian) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data Aeolian = Aeolian AbstractPitch2 deriving Show
type Minor = Aeolian
instance Scale Aeolian AbstractPitch1 AbstractInt1 where
tonic (Aeolian t) = t
scale s = take 7 $ transposeScale ((rotateN 5) basicIonian) (AbstractPitch2 A Na) (tonic s)
applyScale = scaleDegree
data Locrian = Locrian AbstractPitch2 deriving Show
instance Scale Locrian AbstractPitch1 AbstractInt1 where
tonic (Locrian t) = t
scale s = take 7 $ transposeScale ((rotateN 6) basicIonian) (AbstractPitch2 B Na) (tonic s)
applyScale = scaleDegree
-- Melodic minor scales:
basicMelodicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) Na,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicMelodicMinor
data MelodicMinor = MelodicMinor AbstractPitch2 deriving Show
instance Scale MelodicMinor AbstractPitch1 AbstractInt1 where
tonic (MelodicMinor t) = t
scale s = take 7 $ transposeScale basicMelodicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
-- Harmonic major scales:
basicHarmonicMajor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMajor
-- Harmonic minor scales:
basicHarmonicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMinor
data AlteredPhrygian = AlteredPhrygian AbstractPitch2 deriving Show
instance Scale AlteredPhrygian AbstractPitch1 AbstractInt1 where
tonic (AlteredPhrygian t) = t
scale s = take 7 $ transposeScale (rotateN 4 basicHarmonicMinor) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data HarmonicMinor = HarmonicMinor AbstractPitch2 deriving Show
instance Scale HarmonicMinor AbstractPitch1 AbstractInt1 where
tonic (HarmonicMinor t) = t
scale s = take 7 $ transposeScale basicHarmonicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
-- Double harmonic scales:
basicDoubleHarmonic = [AbstractPitch2 C Na,
AbstractPitch2 D flat,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicDoubleHarmonic
major :: AbstractPitch2 -> Major
major n = Ionian n
minor :: AbstractPitch2 -> Minor
minor n = Aeolian (n .-^ octave)
harmonicminor :: AbstractPitch2 -> HarmonicMinor
harmonicminor n = HarmonicMinor n
melodicminor :: AbstractPitch2 -> MelodicMinor
melodicminor n = MelodicMinor n
chromaticScale p@(AbstractPitch2 n a)
| (n == B) || (n == E) || (a == sharp) = p:(chromaticScale (AbstractPitch2 (succ n) Na))
| otherwise = p:(chromaticScale (AbstractPitch2 n sharp))
-- Modal:
-- modeII = [(D, Na), (E, Na), (F, Na), (G, Na), (A, Na), (B, Na), (C, Na)]
-- modeIII
-- modeIV
-- modeV
-- modeVI
-- modeVII
-- modeVIII
-- Messiaen's scales:
-- mode1 = [2,2,2,2,2]
-- mode2 = [1,2, 1,2, 1,2, 1,2]
-- mode3 = [2,1,1, 2,1,1, 2,1,1]
-- mode4 = [1,1,3,1, 1,1,3,1]
-- mode5 = [1,4,1, 1,4,1]
-- mode6 = [2,2,1,1, 2,2,1,1]
-- mode7 = [1,1,1,2,1, 1,1,1,2,1]
-- (measured in semitones)
hexachord :: AbstractPitch2 -> [AbstractPitch2]
hexachord p = [p, p .+^ _M2, p .+^ _M3, p .+^ _P4, p .+^ _P5, p .+^ _M6]
-- Guidonian Hexachords:
data HexachordPrima = HexachordPrima deriving Show
instance Scale HexachordPrima AbstractPitch1 AbstractInt1 where
tonic HexachordPrima = g .-^ (3 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSecunda = HexachordSecunda deriving Show
instance Scale HexachordSecunda AbstractPitch1 AbstractInt1 where
tonic s = c .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordTertia = HexachordTertia deriving Show
instance Scale HexachordTertia AbstractPitch1 AbstractInt1 where
tonic s = f .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuarta = HexachordQuarta deriving Show
instance Scale HexachordQuarta AbstractPitch1 AbstractInt1 where
tonic s = g .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuinta = HexachordQuinta deriving Show
instance Scale HexachordQuinta AbstractPitch1 AbstractInt1 where
tonic s = c .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSexta = HexachordSexta deriving Show
instance Scale HexachordSexta AbstractPitch1 AbstractInt1 where
tonic s = f .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSeptima = HexachordSeptima deriving Show
instance Scale HexachordSeptima AbstractPitch1 AbstractInt1 where
tonic s = g .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
|
ejlilley/AbstractMusic
|
Scales.hs
|
gpl-3.0
| 8,668
| 0
| 19
| 2,179
| 2,465
| 1,290
| 1,175
| 161
| 3
|
module Hadolint.Rule.DL3010 (rule) where
import Data.Foldable (toList)
import qualified Data.Text as Text
import Hadolint.Rule
import Language.Docker.Syntax
rule :: Rule args
rule = simpleRule code severity message check
where
code = "DL3010"
severity = DLInfoC
message = "Use ADD for extracting archives into an image"
check (Copy (CopyArgs srcs _ _ _ NoSource)) =
and
[ not (format `Text.isSuffixOf` src)
| SourcePath src <- toList srcs,
format <- archiveFileFormatExtensions
]
check _ = True
{-# INLINEABLE rule #-}
|
lukasmartinelli/hadolint
|
src/Hadolint/Rule/DL3010.hs
|
gpl-3.0
| 587
| 0
| 11
| 145
| 159
| 88
| 71
| 16
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionInstanceGroupManagers.SetInstanceTemplate
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Sets the instance template to use when creating new instances or
-- recreating instances in this group. Existing instances are not affected.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionInstanceGroupManagers.setInstanceTemplate@.
module Network.Google.Resource.Compute.RegionInstanceGroupManagers.SetInstanceTemplate
(
-- * REST Resource
RegionInstanceGroupManagersSetInstanceTemplateResource
-- * Creating a Request
, regionInstanceGroupManagersSetInstanceTemplate
, RegionInstanceGroupManagersSetInstanceTemplate
-- * Request Lenses
, rigmsitProject
, rigmsitInstanceGroupManager
, rigmsitPayload
, rigmsitRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionInstanceGroupManagers.setInstanceTemplate@ method which the
-- 'RegionInstanceGroupManagersSetInstanceTemplate' request conforms to.
type RegionInstanceGroupManagersSetInstanceTemplateResource
=
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"instanceGroupManagers" :>
Capture "instanceGroupManager" Text :>
"setInstanceTemplate" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
RegionInstanceGroupManagersSetTemplateRequest
:> Post '[JSON] Operation
-- | Sets the instance template to use when creating new instances or
-- recreating instances in this group. Existing instances are not affected.
--
-- /See:/ 'regionInstanceGroupManagersSetInstanceTemplate' smart constructor.
data RegionInstanceGroupManagersSetInstanceTemplate = RegionInstanceGroupManagersSetInstanceTemplate'
{ _rigmsitProject :: !Text
, _rigmsitInstanceGroupManager :: !Text
, _rigmsitPayload :: !RegionInstanceGroupManagersSetTemplateRequest
, _rigmsitRegion :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RegionInstanceGroupManagersSetInstanceTemplate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rigmsitProject'
--
-- * 'rigmsitInstanceGroupManager'
--
-- * 'rigmsitPayload'
--
-- * 'rigmsitRegion'
regionInstanceGroupManagersSetInstanceTemplate
:: Text -- ^ 'rigmsitProject'
-> Text -- ^ 'rigmsitInstanceGroupManager'
-> RegionInstanceGroupManagersSetTemplateRequest -- ^ 'rigmsitPayload'
-> Text -- ^ 'rigmsitRegion'
-> RegionInstanceGroupManagersSetInstanceTemplate
regionInstanceGroupManagersSetInstanceTemplate pRigmsitProject_ pRigmsitInstanceGroupManager_ pRigmsitPayload_ pRigmsitRegion_ =
RegionInstanceGroupManagersSetInstanceTemplate'
{ _rigmsitProject = pRigmsitProject_
, _rigmsitInstanceGroupManager = pRigmsitInstanceGroupManager_
, _rigmsitPayload = pRigmsitPayload_
, _rigmsitRegion = pRigmsitRegion_
}
-- | Project ID for this request.
rigmsitProject :: Lens' RegionInstanceGroupManagersSetInstanceTemplate Text
rigmsitProject
= lens _rigmsitProject
(\ s a -> s{_rigmsitProject = a})
-- | The name of the managed instance group.
rigmsitInstanceGroupManager :: Lens' RegionInstanceGroupManagersSetInstanceTemplate Text
rigmsitInstanceGroupManager
= lens _rigmsitInstanceGroupManager
(\ s a -> s{_rigmsitInstanceGroupManager = a})
-- | Multipart request metadata.
rigmsitPayload :: Lens' RegionInstanceGroupManagersSetInstanceTemplate RegionInstanceGroupManagersSetTemplateRequest
rigmsitPayload
= lens _rigmsitPayload
(\ s a -> s{_rigmsitPayload = a})
-- | Name of the region scoping this request.
rigmsitRegion :: Lens' RegionInstanceGroupManagersSetInstanceTemplate Text
rigmsitRegion
= lens _rigmsitRegion
(\ s a -> s{_rigmsitRegion = a})
instance GoogleRequest
RegionInstanceGroupManagersSetInstanceTemplate where
type Rs
RegionInstanceGroupManagersSetInstanceTemplate
= Operation
type Scopes
RegionInstanceGroupManagersSetInstanceTemplate
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
RegionInstanceGroupManagersSetInstanceTemplate'{..}
= go _rigmsitProject _rigmsitRegion
_rigmsitInstanceGroupManager
(Just AltJSON)
_rigmsitPayload
computeService
where go
= buildClient
(Proxy ::
Proxy
RegionInstanceGroupManagersSetInstanceTemplateResource)
mempty
|
rueshyna/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/RegionInstanceGroupManagers/SetInstanceTemplate.hs
|
mpl-2.0
| 5,753
| 0
| 18
| 1,295
| 549
| 326
| 223
| 101
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.Fhir.History
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the versions of a resource (including the current version and
-- deleted versions) from the FHIR store. Implements the per-resource form
-- of the FHIR standard history interaction
-- ([DSTU2](http:\/\/hl7.org\/implement\/standards\/fhir\/DSTU2\/http.html#history),
-- [STU3](http:\/\/hl7.org\/implement\/standards\/fhir\/STU3\/http.html#history),
-- [R4](http:\/\/hl7.org\/implement\/standards\/fhir\/R4\/http.html#history)).
-- On success, the response body contains a JSON-encoded representation of
-- a \`Bundle\` resource of type \`history\`, containing the version
-- history sorted from most recent to oldest versions. Errors generated by
-- the FHIR store contain a JSON-encoded \`OperationOutcome\` resource
-- describing the reason for the error. If the request cannot be mapped to
-- a valid API method on a FHIR store, a generic GCP error might be
-- returned instead. For samples that show how to call \`history\`, see
-- [Listing FHIR resource
-- versions](\/healthcare\/docs\/how-tos\/fhir-resources#listing_fhir_resource_versions).
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.fhirStores.fhir.history@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.Fhir.History
(
-- * REST Resource
ProjectsLocationsDataSetsFhirStoresFhirHistoryResource
-- * Creating a Request
, projectsLocationsDataSetsFhirStoresFhirHistory
, ProjectsLocationsDataSetsFhirStoresFhirHistory
-- * Request Lenses
, pldsfsfhXgafv
, pldsfsfhCount
, pldsfsfhUploadProtocol
, pldsfsfhAccessToken
, pldsfsfhUploadType
, pldsfsfhName
, pldsfsfhAt
, pldsfsfhSince
, pldsfsfhPageToken
, pldsfsfhCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.fhirStores.fhir.history@ method which the
-- 'ProjectsLocationsDataSetsFhirStoresFhirHistory' request conforms to.
type ProjectsLocationsDataSetsFhirStoresFhirHistoryResource
=
"v1" :>
Capture "name" Text :>
"_history" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "_count" (Textual Int32) :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "_at" Text :>
QueryParam "_since" Text :>
QueryParam "_page_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] HTTPBody
-- | Lists all the versions of a resource (including the current version and
-- deleted versions) from the FHIR store. Implements the per-resource form
-- of the FHIR standard history interaction
-- ([DSTU2](http:\/\/hl7.org\/implement\/standards\/fhir\/DSTU2\/http.html#history),
-- [STU3](http:\/\/hl7.org\/implement\/standards\/fhir\/STU3\/http.html#history),
-- [R4](http:\/\/hl7.org\/implement\/standards\/fhir\/R4\/http.html#history)).
-- On success, the response body contains a JSON-encoded representation of
-- a \`Bundle\` resource of type \`history\`, containing the version
-- history sorted from most recent to oldest versions. Errors generated by
-- the FHIR store contain a JSON-encoded \`OperationOutcome\` resource
-- describing the reason for the error. If the request cannot be mapped to
-- a valid API method on a FHIR store, a generic GCP error might be
-- returned instead. For samples that show how to call \`history\`, see
-- [Listing FHIR resource
-- versions](\/healthcare\/docs\/how-tos\/fhir-resources#listing_fhir_resource_versions).
--
-- /See:/ 'projectsLocationsDataSetsFhirStoresFhirHistory' smart constructor.
data ProjectsLocationsDataSetsFhirStoresFhirHistory =
ProjectsLocationsDataSetsFhirStoresFhirHistory'
{ _pldsfsfhXgafv :: !(Maybe Xgafv)
, _pldsfsfhCount :: !(Maybe (Textual Int32))
, _pldsfsfhUploadProtocol :: !(Maybe Text)
, _pldsfsfhAccessToken :: !(Maybe Text)
, _pldsfsfhUploadType :: !(Maybe Text)
, _pldsfsfhName :: !Text
, _pldsfsfhAt :: !(Maybe Text)
, _pldsfsfhSince :: !(Maybe Text)
, _pldsfsfhPageToken :: !(Maybe Text)
, _pldsfsfhCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsFhirStoresFhirHistory' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldsfsfhXgafv'
--
-- * 'pldsfsfhCount'
--
-- * 'pldsfsfhUploadProtocol'
--
-- * 'pldsfsfhAccessToken'
--
-- * 'pldsfsfhUploadType'
--
-- * 'pldsfsfhName'
--
-- * 'pldsfsfhAt'
--
-- * 'pldsfsfhSince'
--
-- * 'pldsfsfhPageToken'
--
-- * 'pldsfsfhCallback'
projectsLocationsDataSetsFhirStoresFhirHistory
:: Text -- ^ 'pldsfsfhName'
-> ProjectsLocationsDataSetsFhirStoresFhirHistory
projectsLocationsDataSetsFhirStoresFhirHistory pPldsfsfhName_ =
ProjectsLocationsDataSetsFhirStoresFhirHistory'
{ _pldsfsfhXgafv = Nothing
, _pldsfsfhCount = Nothing
, _pldsfsfhUploadProtocol = Nothing
, _pldsfsfhAccessToken = Nothing
, _pldsfsfhUploadType = Nothing
, _pldsfsfhName = pPldsfsfhName_
, _pldsfsfhAt = Nothing
, _pldsfsfhSince = Nothing
, _pldsfsfhPageToken = Nothing
, _pldsfsfhCallback = Nothing
}
-- | V1 error format.
pldsfsfhXgafv :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Xgafv)
pldsfsfhXgafv
= lens _pldsfsfhXgafv
(\ s a -> s{_pldsfsfhXgafv = a})
-- | The maximum number of search results on a page. If not specified, 100 is
-- used. May not be larger than 1000.
pldsfsfhCount :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Int32)
pldsfsfhCount
= lens _pldsfsfhCount
(\ s a -> s{_pldsfsfhCount = a})
. mapping _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldsfsfhUploadProtocol :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhUploadProtocol
= lens _pldsfsfhUploadProtocol
(\ s a -> s{_pldsfsfhUploadProtocol = a})
-- | OAuth access token.
pldsfsfhAccessToken :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhAccessToken
= lens _pldsfsfhAccessToken
(\ s a -> s{_pldsfsfhAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldsfsfhUploadType :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhUploadType
= lens _pldsfsfhUploadType
(\ s a -> s{_pldsfsfhUploadType = a})
-- | The name of the resource to retrieve.
pldsfsfhName :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory Text
pldsfsfhName
= lens _pldsfsfhName (\ s a -> s{_pldsfsfhName = a})
-- | Only include resource versions that were current at some point during
-- the time period specified in the date time value. The date parameter
-- format is yyyy-mm-ddThh:mm:ss[Z|(+|-)hh:mm] Clients may specify any of
-- the following: * An entire year: \`_at=2019\` * An entire month:
-- \`_at=2019-01\` * A specific day: \`_at=2019-01-20\` * A specific
-- second: \`_at=2018-12-31T23:59:58Z\`
pldsfsfhAt :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhAt
= lens _pldsfsfhAt (\ s a -> s{_pldsfsfhAt = a})
-- | Only include resource versions that were created at or after the given
-- instant in time. The instant in time uses the format
-- YYYY-MM-DDThh:mm:ss.sss+zz:zz (for example 2015-02-07T13:28:17.239+02:00
-- or 2017-01-01T00:00:00Z). The time must be specified to the second and
-- include a time zone.
pldsfsfhSince :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhSince
= lens _pldsfsfhSince
(\ s a -> s{_pldsfsfhSince = a})
-- | Used to retrieve the first, previous, next, or last page of resource
-- versions when using pagination. Value should be set to the value of
-- \`_page_token\` set in next or previous page links\' URLs. Next and
-- previous page are returned in the response bundle\'s links field, where
-- \`link.relation\` is \"previous\" or \"next\". Omit \`_page_token\` if
-- no previous request has been made.
pldsfsfhPageToken :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhPageToken
= lens _pldsfsfhPageToken
(\ s a -> s{_pldsfsfhPageToken = a})
-- | JSONP
pldsfsfhCallback :: Lens' ProjectsLocationsDataSetsFhirStoresFhirHistory (Maybe Text)
pldsfsfhCallback
= lens _pldsfsfhCallback
(\ s a -> s{_pldsfsfhCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsFhirStoresFhirHistory
where
type Rs
ProjectsLocationsDataSetsFhirStoresFhirHistory
= HTTPBody
type Scopes
ProjectsLocationsDataSetsFhirStoresFhirHistory
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsFhirStoresFhirHistory'{..}
= go _pldsfsfhName _pldsfsfhXgafv _pldsfsfhCount
_pldsfsfhUploadProtocol
_pldsfsfhAccessToken
_pldsfsfhUploadType
_pldsfsfhAt
_pldsfsfhSince
_pldsfsfhPageToken
_pldsfsfhCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsFhirStoresFhirHistoryResource)
mempty
|
brendanhay/gogol
|
gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/FhirStores/Fhir/History.hs
|
mpl-2.0
| 10,458
| 0
| 20
| 2,027
| 1,084
| 643
| 441
| 156
| 1
|
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Action.Route
( Method(..)
, ActionRoute
, actionURL
, actionURI
, actionMethod
, action
, multipartAction
, API(..)
, pathHTML
, pathJSON
, pathAPI
) where
import qualified Data.ByteString.Builder as BSB
import qualified Data.Invertible as I
import Network.HTTP.Types (Query)
import Network.URI (URI(..))
import qualified Web.Route.Invertible as R
import Web.Route.Invertible (Method(..))
import HTTP.Request
import HTTP.Route
import HTTP.Path.Parser
import Action.Run
-- | A 'R.RouteAction' (library code) that holds an 'Action' (Databrary code).
-- The type parameter a represents the values that get captured in a route
-- description (like /foo/:int would capture an Int).
type ActionRoute a = R.RouteAction a Action
actionURL :: Maybe Request -> R.RouteAction r a -> r -> Query -> BSB.Builder
actionURL mreq route routeParams query
| R.requestMethod rr == GET = routeURL mreq rr query
| otherwise = error $ "actionURL: " ++ show rr
where rr = R.requestActionRoute route routeParams
actionURI :: Maybe Request -> R.RouteAction r a -> r -> Query -> URI
actionURI req r a q
| R.requestMethod rr == GET = routeURI req rr q
| otherwise = error $ "actionURI: " ++ show rr
where rr = R.requestActionRoute r a
actionMethod :: R.RouteAction r a -> r -> Method
actionMethod r = R.requestMethod . R.requestActionRoute r
-- | A shortcut for specifying route actions.
action
:: Method -- ^ HTTP method to handle
-> PathParser r -- ^ Path to handle (r holds the captured elements)
-> (r -> a) -- ^ Action to build the response (a)
-> R.RouteAction r a -- ^ The complete, built route/action specifier.
action method path act =
R.routePath path R.>* R.routeMethod method `R.RouteAction` act
multipartAction :: R.RouteAction q a -> R.RouteAction q a
multipartAction (R.RouteAction r a) =
R.RouteAction (r R.>* (R.routeAccept "multipart/form-data" R.>| R.unit)) a
data API
= HTML
| JSON
deriving (Eq)
pathHTML :: PathParser ()
pathHTML = R.unit
pathJSON :: PathParser ()
pathJSON = "api"
pathAPI :: PathParser API
pathAPI = [I.biCase|Left () <-> JSON ; Right () <-> HTML|] R.>$< (pathJSON R.>|< pathHTML)
|
databrary/databrary
|
src/Action/Route.hs
|
agpl-3.0
| 2,223
| 0
| 11
| 414
| 628
| 341
| 287
| 56
| 1
|
{-#LANGUAGE DeriveDataTypeable#-}
module Data.P440.Domain.BZ where
import Data.P440.Domain.SimpleTypes
import Data.P440.Domain.ComplexTypes
import Data.P440.Domain.ComplexTypesZS
import Data.Typeable (Typeable)
import Data.Text (Text)
-- 3.6 Запрос на повторное получение квитанции
data Файл = Файл {
идЭС :: GUID
,версПрог :: Text
,телОтпр :: Text
,должнОтпр :: Text
,фамОтпр :: Text
,запросКвит :: ЗапросКвит
} deriving (Eq, Show, Typeable)
data ЗапросКвит = ЗапросКвит {
имяФайла :: Text
,датаПодп :: Date
,свБанкИлиСвУБР :: СвБанкИлиСвУБР
,предБанка :: ПредБ
} deriving (Eq, Show, Typeable)
|
Macil-dev/440P-old
|
src/Data/P440/Domain/BZ.hs
|
unlicense
| 831
| 14
| 7
| 134
| 198
| 121
| 77
| 21
| 0
|
module Main where
import Kernel
import Prover.ProofState
import Prover.Goal
import Parser.Token
import Parser.PreTypedTerm
import Parser.InputLine
import Parser.Command
import Data.IORef
import Control.Monad.Reader
import System.Console.Haskeline
import Control.Applicative hiding (many)
import Text.Parsec hiding ((<|>))
import System.Console.GetOpt
import System.Environment
commands :: [Parsec String () (ProverCommand Bool)]
commands = [ command "Quit" quit True
, c "Qed" $ return qed
, c "Lemma" (lemma <$> pIdent <*> pTerm)
, c "UVar" (uvar <$> pIdent <*> pUniv)
, c "intro" (intro <$> pIdent)
, c "intros" (intros <$> many1 pIdent)
, c "elim_eq" (elim_eq <$> pIdent <*> pTerm)
, c "exact" (exact <$> pTerm)
, c "trans" (trans <$> pTerm)
, c "sym" $ return sym
, c "f_equal_1" $ return f_equal_1
, c "f_equal_2" $ return f_equal_2
]
where
command :: String -> Parsec String () (ProverCommand ()) -> Bool -> Parsec String () (ProverCommand Bool)
command name f isQuit = try $ do spaces >> string name >> whiteSpace
cmd <- f
eof
return $ cmd >> return isQuit
c :: String -> Parsec String () (ProverCommand ()) -> Parsec String () (ProverCommand Bool)
c name f = command name f False
quit :: Parsec String () (ProverCommand ())
quit = return $ do
lift $ putStrLn "Quiting."
return ()
main :: IO ()
main = do
g <- initGlobal
args <- getArgs
let inputs = case args of
[] -> [defaultBehavior]
files -> map useFile files
forM_ inputs (\b -> runInputTBehavior b defaultSettings (mainLoop g))
mainLoop :: IORef GlobalState -> InputT IO ()
mainLoop gs = loop gs ""
where
loop :: IORef GlobalState -> String -> InputT IO ()
loop g rest = do
minput <- getInputLine "ToyPr> "
case minput of
Nothing -> return ()
Just input -> do outputStrLn $ "Input was: " ++ input
let (cmds, rest') = splitInput (rest ++ input)
outputStrLn $ "Input was: " ++ show cmds ++ rest'
isquit <- lift $ runCmds g cmds
unless isquit $ loop g rest'
runCmds _ [] = return False
runCmds g (c:cs) = do
isquit <- runCmd g c
if isquit then return True else runCmds g cs
runCmd g cmd = case runParser p () "" cmd of
Left err -> do
putStrLn $ "Unknown command: " ++ show err
return True
Right c ->
runReaderT c g
p = choice commands
|
kik/ToyPr
|
main/Main.hs
|
apache-2.0
| 2,742
| 0
| 19
| 933
| 937
| 462
| 475
| 70
| 5
|
module Main where
import qualified Network.AWS.Prelude as AwsPrelude
import Turtle
import Aws
import Utils
import Types
import Data.Traversable as Traversable
import Data.Text as Text
description = "CLI tool for querying amazon EC2 Spot prices."
spotPricesArgs = (,,,) <$> (many (optText "region" 'r' "Region for spot price lookup. etc: us-east-1 , NorthVirginia"))
<*> (many (optText "zone" 'z' "Availability zone for spot price lookup. etc: us-east-1e"))
<*> (many (optText "instancetype" 't' "Instance type for spot price lookup."))
<*> (many (optText "consumer" 'c' "Consumer of call (values: script, human, scriptall)"))
main :: IO ()
main = do
(rrs, rzs, rts, rcs) <- options description spotPricesArgs
let rs = fmap (fmap readRegion) $ Traversable.traverse eitherRead rrs
let ts = fmap (fmap readInstanceType) $ Traversable.traverse eitherRead rts
let cs = Traversable.traverse eitherRead rcs
ps <- either die id (getSpotPrices <$> rs <*> pure rzs <*> ts )
case cs of
Left _ -> die $ Text.pack ("Cannot parse consumer value: " <> show (rcs) <> " value must be \"script\" , \"human\" \"scriptall\"")
Right (Script: _) -> printSpotPricesScript ps
Right (ScriptAll: _) -> printSpotPricesScriptAll ps
_ -> printSpotPrices ps
|
huseyinyilmaz/spotprices
|
src/Main.hs
|
apache-2.0
| 1,294
| 0
| 16
| 250
| 368
| 189
| 179
| 25
| 4
|
-- |
-- Module : Text.Megaparsec.Combinator
-- Copyright : © 2015 Megaparsec contributors
-- © 2007 Paolo Martini
-- © 1999–2001 Daan Leijen
-- License : BSD3
--
-- Maintainer : Mark Karpov <markkarpov@opmbx.org>
-- Stability : experimental
-- Portability : portable
--
-- Commonly used generic combinators. Note that all combinators works with
-- any 'Alternative' instances.
module Text.Megaparsec.Combinator
( between
, choice
, count
, count'
, endBy
, endBy1
, manyTill
, someTill
, option
, sepBy
, sepBy1
, skipMany
, skipSome
-- Deprecated combinators
, chainl
, chainl1
, chainr
, chainr1
, sepEndBy
, sepEndBy1 )
where
import Control.Applicative
import Control.Monad (void)
import Data.Foldable (asum)
-- | @between open close p@ parses @open@, followed by @p@ and @close@.
-- Returns the value returned by @p@.
--
-- > braces = between (symbol "{") (symbol "}")
between :: Applicative m => m open -> m close -> m a -> m a
between open close p = open *> p <* close
{-# INLINE between #-}
-- | @choice ps@ tries to apply the parsers in the list @ps@ in order,
-- until one of them succeeds. Returns the value of the succeeding parser.
choice :: (Foldable f, Alternative m) => f (m a) -> m a
choice = asum
{-# INLINE choice #-}
-- | @count n p@ parses @n@ occurrences of @p@. If @n@ is smaller or
-- equal to zero, the parser equals to @return []@. Returns a list of @n@
-- values.
--
-- This parser is defined in terms of 'count'', like this:
--
-- > count n = count' n n
count :: Alternative m => Int -> m a -> m [a]
count n = count' n n
{-# INLINE count #-}
-- | @count\' m n p@ parses from @m@ to @n@ occurrences of @p@. If @n@ is
-- not positive or @m > n@, the parser equals to @return []@. Returns a list
-- of parsed values.
--
-- Please note that @m@ /may/ be negative, in this case effect is the same
-- as if it were equal to zero.
count' :: Alternative m => Int -> Int -> m a -> m [a]
count' m n p
| n <= 0 || m > n = pure []
| m > 0 = (:) <$> p <*> count' (pred m) (pred n) p
| otherwise =
let f t ts = maybe [] (:ts) t
in f <$> optional p <*> count' 0 (pred n) p
-- | @endBy p sep@ parses /zero/ or more occurrences of @p@, separated
-- and ended by @sep@. Returns a list of values returned by @p@.
--
-- > cStatements = cStatement `endBy` semicolon
endBy :: Alternative m => m a -> m sep -> m [a]
endBy p sep = many (p <* sep)
{-# INLINE endBy #-}
-- | @endBy1 p sep@ parses /one/ or more occurrences of @p@, separated
-- and ended by @sep@. Returns a list of values returned by @p@.
endBy1 :: Alternative m => m a -> m sep -> m [a]
endBy1 p sep = some (p <* sep)
{-# INLINE endBy1 #-}
-- | @manyTill p end@ applies parser @p@ /zero/ or more times until
-- parser @end@ succeeds. Returns the list of values returned by @p@. This
-- parser can be used to scan comments:
--
-- > simpleComment = string "<!--" >> manyTill anyChar (try $ string "-->")
--
-- Note that we need to use 'try' since parsers @anyChar@ and @string
-- \"-->\"@ overlap and @string \"-->\"@ could consume input before failing.
manyTill :: Alternative m => m a -> m end -> m [a]
manyTill p end = (end *> pure []) <|> someTill p end
{-# INLINE manyTill #-}
-- | @someTill p end@ works similarly to @manyTill p end@, but @p@ should
-- succeed at least once.
someTill :: Alternative m => m a -> m end -> m [a]
someTill p end = (:) <$> p <*> manyTill p end
{-# INLINE someTill #-}
-- | @option x p@ tries to apply parser @p@. If @p@ fails without
-- consuming input, it returns the value @x@, otherwise the value returned
-- by @p@.
--
-- > priority = option 0 (digitToInt <$> digitChar)
option :: Alternative m => a -> m a -> m a
option x p = p <|> pure x
{-# INLINE option #-}
-- | @sepBy p sep@ parses /zero/ or more occurrences of @p@, separated
-- by @sep@. Returns a list of values returned by @p@.
--
-- > commaSep p = p `sepBy` comma
sepBy :: Alternative m => m a -> m sep -> m [a]
sepBy p sep = sepBy1 p sep <|> pure []
{-# INLINE sepBy #-}
-- | @sepBy1 p sep@ parses /one/ or more occurrences of @p@, separated
-- by @sep@. Returns a list of values returned by @p@.
sepBy1 :: Alternative m => m a -> m sep -> m [a]
sepBy1 p sep = (:) <$> p <*> many (sep *> p)
{-# INLINE sepBy1 #-}
-- | @skipMany p@ applies the parser @p@ /zero/ or more times, skipping
-- its result.
--
-- > space = skipMany spaceChar
skipMany :: Alternative m => m a -> m ()
skipMany p = void $ many p
{-# INLINE skipMany #-}
-- | @skipSome p@ applies the parser @p@ /one/ or more times, skipping
-- its result.
skipSome :: Alternative m => m a -> m ()
skipSome p = void $ some p
{-# INLINE skipSome #-}
-- Deprecated combinators
-- | @chainl p op x@ parses /zero/ or more occurrences of @p@,
-- separated by @op@. Returns a value obtained by a /left/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. If there are zero occurrences of @p@, the value @x@ is returned.
{-# DEPRECATED chainl "Use \"Text.Megaparsec.Expr\" instead." #-}
chainl :: Alternative m => m a -> m (a -> a -> a) -> a -> m a
chainl p op x = chainl1 p op <|> pure x
{-# INLINE chainl #-}
-- | @chainl1 p op@ parses /one/ or more occurrences of @p@,
-- separated by @op@ Returns a value obtained by a /left/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. This parser can for example be used to eliminate left recursion
-- which typically occurs in expression grammars.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainl1 "Use \"Text.Megaparsec.Expr\" instead." #-}
chainl1 :: Alternative m => m a -> m (a -> a -> a) -> m a
chainl1 p op = scan
where scan = flip id <$> p <*> rst
rst = (\f y g x -> g (f x y)) <$> op <*> p <*> rst <|> pure id
{-# INLINE chainl1 #-}
-- | @chainr p op x@ parses /zero/ or more occurrences of @p@,
-- separated by @op@ Returns a value obtained by a /right/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@. If there are no occurrences of @p@, the value @x@ is returned.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainr "Use \"Text.Megaparsec.Expr\" instead." #-}
chainr :: Alternative m => m a -> m (a -> a -> a) -> a -> m a
chainr p op x = chainr1 p op <|> pure x
{-# INLINE chainr #-}
-- | @chainr1 p op@ parses /one/ or more occurrences of @p@,
-- separated by @op@. Returns a value obtained by a /right/ associative
-- application of all functions returned by @op@ to the values returned by
-- @p@.
--
-- Consider using "Text.Megaparsec.Expr" instead.
{-# DEPRECATED chainr1 "Use \"Text.Megaparsec.Expr\" instead." #-}
chainr1 :: Alternative m => m a -> m (a -> a -> a) -> m a
chainr1 p op = scan where
scan = flip id <$> p <*> rst
rst = (flip <$> op <*> scan) <|> pure id
{-# INLINE chainr1 #-}
-- | @sepEndBy p sep@ parses /zero/ or more occurrences of @p@,
-- separated and optionally ended by @sep@. Returns a list of values
-- returned by @p@.
{-# DEPRECATED sepEndBy "Use @sepBy p sep <* optional sep@ instead." #-}
sepEndBy :: Alternative m => m a -> m sep -> m [a]
sepEndBy p sep = sepBy p sep <* optional sep
{-# INLINE sepEndBy #-}
-- | @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@,
-- separated and optionally ended by @sep@. Returns a list of values
-- returned by @p@.
{-# DEPRECATED sepEndBy1 "Use @sepBy1 p sep <* optional sep@ instead." #-}
sepEndBy1 :: Alternative m => m a -> m sep -> m [a]
sepEndBy1 p sep = sepBy1 p sep <* optional sep
{-# INLINE sepEndBy1 #-}
|
neongreen/megaparsec
|
Text/Megaparsec/Combinator.hs
|
bsd-2-clause
| 7,646
| 0
| 15
| 1,678
| 1,437
| 766
| 671
| 94
| 1
|
module Foundation
( App (..)
, Route (..)
, AppMessage (..)
, resourcesApp
, Handler
, Widget
, Form
, maybeAuth
, requireAuth
, module Settings
, module Model
) where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import qualified Database.Persist.Store
import Settings.StaticFiles
import Data.Text (Text)
import Database.Persist.GenericSql
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Web.ClientSession (getKey)
import Text.Hamlet (hamletFile)
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.Store.PersistConfigPool Settings.PersistConfig -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConfig
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/handler
--
-- This function does three things:
--
-- * Creates the route datatype AppRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route App = AppRoute
-- * Creates the value resourcesApp which contains information on the
-- resources declared below. This is used in Handler.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- App. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the AppRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm App App (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = do
key <- getKey "config/client_session_key.aes"
return . Just $ clientSessionBackend key 120
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(widgetFile "normalize")
addStylesheet $ StaticR css_bootstrap_css
toWidget [lucius|
body {
padding-top: 60px;
}
|]
addStylesheet $ StaticR css_bootstrap_responsive_css
addScript $ StaticR js_jquery_1_7_2_min_js
addScript $ StaticR js_bootstrap_min_js
$(widgetFile "default-layout")
hamletToRepHtml $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- messageLogger y loc level msg =
-- formatLogText (getLogger y) loc level msg >>= logMsg (getLogger y)
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent = addStaticContentExternal minifym base64md5 Settings.staticDir (StaticR . flip StaticRoute [])
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersist
runDB f = do
master <- getYesod
Database.Persist.Store.runPool
(persistConfig master)
f
(connPool master)
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds) Nothing
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId, authGoogleEmail]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
|
tanakh/hackage-mirror
|
Foundation.hs
|
bsd-2-clause
| 6,285
| 0
| 17
| 1,394
| 859
| 479
| 380
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances,
FlexibleInstances #-}
--
-- Common.hs
--
-- Some common definitions related to navigating
-- expression trees.
--
-- Gregory Wright, 2 July 2012
--
module Math.Symbolic.Wheeler.Common where
import Data.DList
data Cxt = Scxt Int | Pcxt Int | Tcxt Int
deriving (Eq, Ord, Show)
isScxt :: Cxt -> Bool
isScxt (Scxt _) = True
isScxt _ = False
isPcxt :: Cxt -> Bool
isPcxt (Pcxt _) = True
isPcxt _ = False
isTcxt :: Cxt -> Bool
isTcxt (Tcxt _) = True
isTcxt _ = False
type Breadcrumbs = [ Cxt ]
type Breadcrumbs' = DList Cxt
instance Eq Breadcrumbs' where
(==) x y = (==) (toList x) (toList y)
instance Show Breadcrumbs' where
show = show . toList
data SymbolType = Regular
| Pattern
| Placeholder
deriving (Eq, Show)
|
gwright83/Wheeler
|
src/Math/Symbolic/Wheeler/Common.hs
|
bsd-3-clause
| 849
| 0
| 8
| 236
| 247
| 140
| 107
| 25
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import System.Console.CmdArgs
import Data.Data
import Text.RegexPR (matchRegexPR)
import Data.Maybe (isJust)
import System.IO (hFlush, stdout)
import Trajectory.Private.Config (writeKey)
main = do
args <- cmdArgs initTjArgDefinition
key <- getKey
writeKey (profileName args) key
return ()
getKey = promptWhile isBlank "API key: "
data InitTjArg = InitTjArg {
profileName :: String
} deriving (Show, Data, Typeable)
initTjArgDefinition = InitTjArg {
profileName = "default"
&= explicit
&= name "profile"
&= help "The profile name to use [default]"
} &= program "initrj"
-- generally useful functions below; maybe they exist elsewhere:
promptWhile p prompt = loop
where
loop = do
putStr prompt
hFlush stdout
result <- getLine
if p result
then loop
else return result
isBlank s = isJust $ matchRegexPR "^\\s*$" s
|
mike-burns/trajectory
|
InitTj.hs
|
bsd-3-clause
| 952
| 0
| 10
| 213
| 253
| 131
| 122
| 32
| 2
|
module Lets.Data (
Locality(..)
, Address(..)
, Person(..)
, IntAnd(..)
, IntOr(..)
, fredLocality
, fredAddress
, fred
, maryLocality
, maryAddress
, mary
, Store(..)
, Const (..)
, Tagged(..)
, Identity(..)
, AlongsideLeft(..)
, AlongsideRight(..)
) where
import Control.Applicative(Applicative(..))
import Data.Monoid(Monoid(..))
data Locality =
Locality
String -- city
String -- state
String -- country
deriving (Eq, Show)
data Address =
Address
String -- street
String -- suburb
Locality
deriving (Eq, Show)
data Person =
Person
Int -- age
String -- name
Address -- address
deriving (Eq, Show)
data IntAnd a =
IntAnd
Int
a
deriving (Eq, Show)
data IntOr a =
IntOrIs Int
| IntOrIsNot a
deriving (Eq, Show)
fredLocality ::
Locality
fredLocality =
Locality
"Fredmania"
"New South Fred"
"Fredalia"
fredAddress ::
Address
fredAddress =
Address
"15 Fred St"
"Fredville"
fredLocality
fred ::
Person
fred =
Person
24
"Fred"
fredAddress
maryLocality ::
Locality
maryLocality =
Locality
"Mary Mary"
"Western Mary"
"Maristan"
maryAddress ::
Address
maryAddress =
Address
"83 Mary Ln"
"Maryland"
maryLocality
mary ::
Person
mary =
Person
28
"Mary"
maryAddress
----
data Store s a =
Store
(s -> a)
s
data Const a b =
Const {
getConst ::
a
}
deriving (Eq, Show)
instance Functor (Const a) where
fmap _ (Const a) =
Const a
instance Monoid a => Applicative (Const a) where
pure _ =
Const mempty
Const f <*> Const a =
Const (f `mappend` a)
data Tagged a b =
Tagged {
getTagged ::
b
}
deriving (Eq, Show)
instance Functor (Tagged a) where
fmap f (Tagged b) =
Tagged (f b)
instance Applicative (Tagged a) where
pure =
Tagged
Tagged f <*> Tagged a =
Tagged (f a)
data Identity a =
Identity {
getIdentity ::
a
}
deriving (Eq, Show)
instance Functor Identity where
fmap f (Identity a) =
Identity (f a)
instance Applicative Identity where
pure =
Identity
Identity f <*> Identity a =
Identity (f a)
data AlongsideLeft f b a =
AlongsideLeft {
getAlongsideLeft ::
f (a, b)
}
instance Functor f => Functor (AlongsideLeft f b) where
fmap f (AlongsideLeft x) =
AlongsideLeft (fmap (\(a, b) -> (f a, b)) x)
data AlongsideRight f a b =
AlongsideRight {
getAlongsideRight ::
f (a, b)
}
instance Functor f => Functor (AlongsideRight f a) where
fmap f (AlongsideRight x) =
AlongsideRight (fmap (\(a, b) -> (a, f b)) x)
|
NICTA/lets-lens
|
src/Lets/Data.hs
|
bsd-3-clause
| 2,640
| 0
| 12
| 742
| 916
| 511
| 405
| 146
| 1
|
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Config
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Utilities for handling saved state such as known packages, known servers and
-- downloaded packages.
-----------------------------------------------------------------------------
module Distribution.Client.Config (
SavedConfig(..),
loadConfig,
getConfigFilePath,
showConfig,
showConfigWithComments,
parseConfig,
defaultCabalDir,
defaultConfigFile,
defaultCacheDir,
defaultCompiler,
defaultLogsDir,
defaultUserInstall,
baseSavedConfig,
commentSavedConfig,
initialSavedConfig,
configFieldDescriptions,
haddockFlagsFields,
installDirsFields,
withProgramsFields,
withProgramOptionsFields,
userConfigDiff,
userConfigUpdate,
createDefaultConfigFile,
remoteRepoFields
) where
import Distribution.Client.Types
( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, defaultGlobalFlags
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, showRepo, parseRepo, readRepo )
import Distribution.Utils.NubList
( NubList, fromNubList, toNubList)
import Distribution.Simple.Compiler
( DebugInfoLevel(..), OptimisationLevel(..) )
import Distribution.Simple.Setup
( ConfigFlags(..), configureOptions, defaultConfigFlags
, AllowNewer(..), RelaxDeps(..)
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, installDirsOptions, optionDistPref
, programConfigurationPaths', programConfigurationOptions
, Flag(..), toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs
, PathTemplate, toPathTemplate )
import Distribution.ParseUtils
( FieldDescr(..), liftField
, ParseResult(..), PError(..), PWarning(..)
, locatedErrorMsg, showPWarning
, readFields, warning, lineNo
, simpleField, listField, spaceListField
, parseFilePathQ, parseOptCommaList, parseTokenQ )
import Distribution.Client.ParseUtils
( parseFields, ppFields, ppSection )
import Distribution.Client.HttpUtils
( isOldHackageURI )
import qualified Distribution.ParseUtils as ParseUtils
( Field(..) )
import qualified Distribution.Text as Text
( Text(..) )
import Distribution.Simple.Command
( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..)
, viewAsFieldDescr )
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Utils
( die, notice, warn, lowercase, cabalVersion )
import Distribution.Compiler
( CompilerFlavor(..), defaultCompilerFlavor )
import Distribution.Verbosity
( Verbosity, normal )
import Distribution.Solver.Types.ConstraintSource
import Data.List
( partition, find, foldl' )
import Data.Maybe
( fromMaybe )
import Control.Monad
( when, unless, foldM, liftM, liftM2 )
import qualified Distribution.Compat.ReadP as Parse
( (<++), option )
import Distribution.Compat.Semigroup
import qualified Text.PrettyPrint as Disp
( render, text, empty )
import Text.PrettyPrint
( ($+$) )
import Text.PrettyPrint.HughesPJ
( text, Doc )
import System.Directory
( createDirectoryIfMissing, getAppUserDataDirectory, renameFile )
import Network.URI
( URI(..), URIAuth(..), parseURI )
import System.FilePath
( (<.>), (</>), takeDirectory )
import System.IO.Error
( isDoesNotExistError )
import Distribution.Compat.Environment
( getEnvironment )
import Distribution.Compat.Exception
( catchIO )
import qualified Paths_cabal_install
( version )
import Data.Version
( showVersion )
import Data.Char
( isSpace )
import qualified Data.Map as M
import Data.Function
( on )
import Data.List
( nubBy )
import GHC.Generics ( Generic )
--
-- * Configuration saved in the config file
--
data SavedConfig = SavedConfig {
savedGlobalFlags :: GlobalFlags,
savedInstallFlags :: InstallFlags,
savedConfigureFlags :: ConfigFlags,
savedConfigureExFlags :: ConfigExFlags,
savedUserInstallDirs :: InstallDirs (Flag PathTemplate),
savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate),
savedUploadFlags :: UploadFlags,
savedReportFlags :: ReportFlags,
savedHaddockFlags :: HaddockFlags
} deriving Generic
instance Monoid SavedConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup SavedConfig where
a <> b = SavedConfig {
savedGlobalFlags = combinedSavedGlobalFlags,
savedInstallFlags = combinedSavedInstallFlags,
savedConfigureFlags = combinedSavedConfigureFlags,
savedConfigureExFlags = combinedSavedConfigureExFlags,
savedUserInstallDirs = combinedSavedUserInstallDirs,
savedGlobalInstallDirs = combinedSavedGlobalInstallDirs,
savedUploadFlags = combinedSavedUploadFlags,
savedReportFlags = combinedSavedReportFlags,
savedHaddockFlags = combinedSavedHaddockFlags
}
where
-- This is ugly, but necessary. If we're mappending two config files, we
-- want the values of the *non-empty* list fields from the second one to
-- *override* the corresponding values from the first one. Default
-- behaviour (concatenation) is confusing and makes some use cases (see
-- #1884) impossible.
--
-- However, we also want to allow specifying multiple values for a list
-- field in a *single* config file. For example, we want the following to
-- continue to work:
--
-- remote-repo: hackage.haskell.org:http://hackage.haskell.org/
-- remote-repo: private-collection:http://hackage.local/
--
-- So we can't just wrap the list fields inside Flags; we have to do some
-- special-casing just for SavedConfig.
-- NB: the signature prevents us from using 'combine' on lists.
combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a
combine' field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon)
-> mon
combineMonoid field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a]
lastNonEmpty' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case b' of [] -> a'
_ -> b'
lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a)
-> NubList a
lastNonEmptyNL' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case fromNubList b' of [] -> a'
_ -> b'
combinedSavedGlobalFlags = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalSandboxConfigFile,
globalConstraintsFile = combine globalConstraintsFile,
globalRemoteRepos = lastNonEmptyNL globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = lastNonEmptyNL globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox,
globalIgnoreExpiry = combine globalIgnoreExpiry,
globalHttpTransport = combine globalHttpTransport
}
where
combine = combine' savedGlobalFlags
lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags
combinedSavedInstallFlags = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installMaxBackjumps = combine installMaxBackjumps,
installReorderGoals = combine installReorderGoals,
installCountConflicts = combine installCountConflicts,
installIndependentGoals = combine installIndependentGoals,
installShadowPkgs = combine installShadowPkgs,
installStrongFlags = combine installStrongFlags,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installUpgradeDeps = combine installUpgradeDeps,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = lastNonEmptyNL installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installReportPlanningFailure = combine installReportPlanningFailure,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot,
installNumJobs = combine installNumJobs,
installKeepGoing = combine installKeepGoing,
installRunTests = combine installRunTests,
installOfflineMode = combine installOfflineMode
}
where
combine = combine' savedInstallFlags
lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags
combinedSavedConfigureFlags = ConfigFlags {
configPrograms_ = configPrograms_ . savedConfigureFlags $ b,
-- TODO: NubListify
configProgramPaths = lastNonEmpty configProgramPaths,
-- TODO: NubListify
configProgramArgs = lastNonEmpty configProgramArgs,
configProgramPathExtra = lastNonEmptyNL configProgramPathExtra,
configHcFlavor = combine configHcFlavor,
configHcPath = combine configHcPath,
configHcPkg = combine configHcPkg,
configVanillaLib = combine configVanillaLib,
configProfLib = combine configProfLib,
configProf = combine configProf,
configSharedLib = combine configSharedLib,
configDynExe = combine configDynExe,
configProfExe = combine configProfExe,
configProfDetail = combine configProfDetail,
configProfLibDetail = combine configProfLibDetail,
-- TODO: NubListify
configConfigureArgs = lastNonEmpty configConfigureArgs,
configOptimization = combine configOptimization,
configDebugInfo = combine configDebugInfo,
configProgPrefix = combine configProgPrefix,
configProgSuffix = combine configProgSuffix,
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
configInstallDirs =
(configInstallDirs . savedConfigureFlags $ a)
`mappend` (configInstallDirs . savedConfigureFlags $ b),
configScratchDir = combine configScratchDir,
-- TODO: NubListify
configExtraLibDirs = lastNonEmpty configExtraLibDirs,
-- TODO: NubListify
configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs,
-- TODO: NubListify
configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs,
configIPID = combine configIPID,
configDistPref = combine configDistPref,
configVerbosity = combine configVerbosity,
configUserInstall = combine configUserInstall,
-- TODO: NubListify
configPackageDBs = lastNonEmpty configPackageDBs,
configGHCiLib = combine configGHCiLib,
configSplitObjs = combine configSplitObjs,
configStripExes = combine configStripExes,
configStripLibs = combine configStripLibs,
-- TODO: NubListify
configConstraints = lastNonEmpty configConstraints,
-- TODO: NubListify
configDependencies = lastNonEmpty configDependencies,
-- TODO: NubListify
configConfigurationsFlags = lastNonEmpty configConfigurationsFlags,
configTests = combine configTests,
configBenchmarks = combine configBenchmarks,
configCoverage = combine configCoverage,
configLibCoverage = combine configLibCoverage,
configExactConfiguration = combine configExactConfiguration,
configFlagError = combine configFlagError,
configRelocatable = combine configRelocatable,
configAllowNewer = combineMonoid savedConfigureFlags
configAllowNewer
}
where
combine = combine' savedConfigureFlags
lastNonEmpty = lastNonEmpty' savedConfigureFlags
lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags
combinedSavedConfigureExFlags = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
-- TODO: NubListify
configExConstraints = lastNonEmpty configExConstraints,
-- TODO: NubListify
configPreferences = lastNonEmpty configPreferences,
configSolver = combine configSolver
}
where
combine = combine' savedConfigureExFlags
lastNonEmpty = lastNonEmpty' savedConfigureExFlags
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedUserInstallDirs = savedUserInstallDirs a
`mappend` savedUserInstallDirs b
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a
`mappend` savedGlobalInstallDirs b
combinedSavedUploadFlags = UploadFlags {
uploadCandidate = combine uploadCandidate,
uploadDoc = combine uploadDoc,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadPasswordCmd = combine uploadPasswordCmd,
uploadVerbosity = combine uploadVerbosity
}
where
combine = combine' savedUploadFlags
combinedSavedReportFlags = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where
combine = combine' savedReportFlags
combinedSavedHaddockFlags = HaddockFlags {
-- TODO: NubListify
haddockProgramPaths = lastNonEmpty haddockProgramPaths,
-- TODO: NubListify
haddockProgramArgs = lastNonEmpty haddockProgramArgs,
haddockHoogle = combine haddockHoogle,
haddockHtml = combine haddockHtml,
haddockHtmlLocation = combine haddockHtmlLocation,
haddockForHackage = combine haddockForHackage,
haddockExecutables = combine haddockExecutables,
haddockTestSuites = combine haddockTestSuites,
haddockBenchmarks = combine haddockBenchmarks,
haddockInternal = combine haddockInternal,
haddockCss = combine haddockCss,
haddockHscolour = combine haddockHscolour,
haddockHscolourCss = combine haddockHscolourCss,
haddockContents = combine haddockContents,
haddockDistPref = combine haddockDistPref,
haddockKeepTempFiles = combine haddockKeepTempFiles,
haddockVerbosity = combine haddockVerbosity
}
where
combine = combine' savedHaddockFlags
lastNonEmpty = lastNonEmpty' savedHaddockFlags
--
-- * Default config
--
-- | These are the absolute basic defaults. The fields that must be
-- initialised. When we load the config from the file we layer the loaded
-- values over these ones, so any missing fields in the file take their values
-- from here.
--
baseSavedConfig :: IO SavedConfig
baseSavedConfig = do
userPrefix <- defaultCabalDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
return mempty {
savedConfigureFlags = mempty {
configHcFlavor = toFlag defaultCompiler,
configUserInstall = toFlag defaultUserInstall,
configVerbosity = toFlag normal
},
savedUserInstallDirs = mempty {
prefix = toFlag (toPathTemplate userPrefix)
},
savedGlobalFlags = mempty {
globalLogsDir = toFlag logsDir,
globalWorldFile = toFlag worldFile
}
}
-- | This is the initial configuration that we write out to to the config file
-- if the file does not exist (or the config we use if the file cannot be read
-- for some other reason). When the config gets loaded it gets layered on top
-- of 'baseSavedConfig' so we do not need to include it into the initial
-- values we save into the config file.
--
initialSavedConfig :: IO SavedConfig
initialSavedConfig = do
cacheDir <- defaultCacheDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
extraPath <- defaultExtraPath
return mempty {
savedGlobalFlags = mempty {
globalCacheDir = toFlag cacheDir,
globalRemoteRepos = toNubList [addInfoForKnownRepos defaultRemoteRepo],
globalWorldFile = toFlag worldFile
},
savedConfigureFlags = mempty {
configProgramPathExtra = toNubList extraPath
},
savedInstallFlags = mempty {
installSummaryFile = toNubList [toPathTemplate (logsDir </> "build.log")],
installBuildReports= toFlag AnonymousReports,
installNumJobs = toFlag Nothing
}
}
--TODO: misleading, there's no way to override this default
-- either make it possible or rename to simply getCabalDir.
defaultCabalDir :: IO FilePath
defaultCabalDir = getAppUserDataDirectory "cabal"
defaultConfigFile :: IO FilePath
defaultConfigFile = do
dir <- defaultCabalDir
return $ dir </> "config"
defaultCacheDir :: IO FilePath
defaultCacheDir = do
dir <- defaultCabalDir
return $ dir </> "packages"
defaultLogsDir :: IO FilePath
defaultLogsDir = do
dir <- defaultCabalDir
return $ dir </> "logs"
-- | Default position of the world file
defaultWorldFile :: IO FilePath
defaultWorldFile = do
dir <- defaultCabalDir
return $ dir </> "world"
defaultExtraPath :: IO [FilePath]
defaultExtraPath = do
dir <- defaultCabalDir
return [dir </> "bin"]
defaultCompiler :: CompilerFlavor
defaultCompiler = fromMaybe GHC defaultCompilerFlavor
defaultUserInstall :: Bool
defaultUserInstall = True
-- We do per-user installs by default on all platforms. We used to default to
-- global installs on Windows but that no longer works on Windows Vista or 7.
defaultRemoteRepo :: RemoteRepo
defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False
where
name = "hackage.haskell.org"
uri = URI "http:" (Just (URIAuth "" name "")) "/" "" ""
-- Note that lots of old ~/.cabal/config files will have the old url
-- http://hackage.haskell.org/packages/archive
-- but new config files can use the new url (without the /packages/archive)
-- and avoid having to do a http redirect
-- For the default repo we know extra information, fill this in.
--
-- We need this because the 'defaultRemoteRepo' above is only used for the
-- first time when a config file is made. So for users with older config files
-- we might have only have older info. This lets us fill that in even for old
-- config files.
--
-- TODO: Once we migrate from opt-in to opt-out security for the central
-- Hackage repository, we should enable security and specify keys and threshold
-- for repositories that have their security setting as 'Nothing' (default).
addInfoForKnownRepos :: RemoteRepo -> RemoteRepo
addInfoForKnownRepos repo@RemoteRepo{ remoteRepoName = "hackage.haskell.org" } =
tryHttps
$ if isOldHackageURI (remoteRepoURI repo) then defaultRemoteRepo else repo
where
tryHttps r = r { remoteRepoShouldTryHttps = True }
addInfoForKnownRepos other = other
--
-- * Config file reading
--
loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig
loadConfig verbosity configFileFlag = addBaseConf $ do
(source, configFile) <- getConfigFilePathAndSource configFileFlag
minp <- readConfigFile mempty configFile
case minp of
Nothing -> do
notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "."
notice verbosity $ "Config file " ++ configFile ++ " not found."
createDefaultConfigFile verbosity configFile
loadConfig verbosity configFileFlag
Just (ParseOk ws conf) -> do
unless (null ws) $ warn verbosity $
unlines (map (showPWarning configFile) ws)
return conf
Just (ParseFailed err) -> do
let (line, msg) = locatedErrorMsg err
die $
"Error parsing config file " ++ configFile
++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg
where
addBaseConf body = do
base <- baseSavedConfig
extra <- body
return (base `mappend` extra)
sourceMsg CommandlineOption = "commandline option"
sourceMsg EnvironmentVariable = "env var CABAL_CONFIG"
sourceMsg Default = "default config file"
data ConfigFileSource = CommandlineOption
| EnvironmentVariable
| Default
-- | Returns the config file path, without checking that the file exists.
-- The order of precedence is: input flag, CABAL_CONFIG, default location.
getConfigFilePath :: Flag FilePath -> IO FilePath
getConfigFilePath = fmap snd . getConfigFilePathAndSource
getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath)
getConfigFilePathAndSource configFileFlag =
getSource sources
where
sources =
[ (CommandlineOption, return . flagToMaybe $ configFileFlag)
, (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment)
, (Default, Just `liftM` defaultConfigFile) ]
getSource [] = error "no config file path candidate found."
getSource ((source,action): xs) =
action >>= maybe (getSource xs) (return . (,) source)
readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig))
readConfigFile initial file = handleNotExists $
fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial)
(readFile file)
where
handleNotExists action = catchIO action $ \ioe ->
if isDoesNotExistError ioe
then return Nothing
else ioError ioe
createDefaultConfigFile :: Verbosity -> FilePath -> IO ()
createDefaultConfigFile verbosity filePath = do
commentConf <- commentSavedConfig
initialConf <- initialSavedConfig
notice verbosity $ "Writing default configuration to " ++ filePath
writeConfigFile filePath commentConf initialConf
writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO ()
writeConfigFile file comments vals = do
let tmpFile = file <.> "tmp"
createDirectoryIfMissing True (takeDirectory file)
writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n"
renameFile tmpFile file
where
explanation = unlines
["-- This is the configuration file for the 'cabal' command line tool."
,""
,"-- The available configuration options are listed below."
,"-- Some of them have default values listed."
,""
,"-- Lines (like this one) beginning with '--' are comments."
,"-- Be careful with spaces and indentation because they are"
,"-- used to indicate layout for nested sections."
,""
,"-- Cabal library version: " ++ showVersion cabalVersion
,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version
,"",""
]
-- | These are the default values that get used in Cabal if a no value is
-- given. We use these here to include in comments when we write out the
-- initial config file so that the user can see what default value they are
-- overriding.
--
commentSavedConfig :: IO SavedConfig
commentSavedConfig = do
userInstallDirs <- defaultInstallDirs defaultCompiler True True
globalInstallDirs <- defaultInstallDirs defaultCompiler False True
return SavedConfig {
savedGlobalFlags = defaultGlobalFlags,
savedInstallFlags = defaultInstallFlags,
savedConfigureExFlags = defaultConfigExFlags,
savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) {
configUserInstall = toFlag defaultUserInstall,
configAllowNewer = Just (AllowNewer RelaxDepsNone)
},
savedUserInstallDirs = fmap toFlag userInstallDirs,
savedGlobalInstallDirs = fmap toFlag globalInstallDirs,
savedUploadFlags = commandDefaultFlags uploadCommand,
savedReportFlags = commandDefaultFlags reportCommand,
savedHaddockFlags = defaultHaddockFlags
}
-- | All config file fields.
--
configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig]
configFieldDescriptions src =
toSavedConfig liftGlobalFlag
(commandOptions (globalCommand []) ParseArgs)
["version", "numeric-version", "config-file", "sandbox-config-file"] []
++ toSavedConfig liftConfigFlag
(configureOptions ParseArgs)
(["builddir", "constraint", "dependency", "ipid"]
++ map fieldName installDirsFields)
-- This is only here because viewAsFieldDescr gives us a parser
-- that only recognises 'ghc' etc, the case-sensitive flag names, not
-- what the normal case-insensitive parser gives us.
[simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse)
configHcFlavor (\v flags -> flags { configHcFlavor = v })
,let showAllowNewer Nothing = mempty
showAllowNewer (Just (AllowNewer RelaxDepsNone)) = Disp.text "False"
showAllowNewer (Just _) = Disp.text "True"
toAllowNewer True = Just (AllowNewer RelaxDepsAll)
toAllowNewer False = Just (AllowNewer RelaxDepsNone)
pkgs = (Just . AllowNewer . RelaxDepsSome) `fmap` parseOptCommaList Text.parse
parseAllowNewer = (toAllowNewer `fmap` Text.parse) Parse.<++ pkgs in
simpleField "allow-newer"
showAllowNewer parseAllowNewer
configAllowNewer (\v flags -> flags { configAllowNewer = v })
-- TODO: The following is a temporary fix. The "optimization"
-- and "debug-info" fields are OptArg, and viewAsFieldDescr
-- fails on that. Instead of a hand-written hackaged parser
-- and printer, we should handle this case properly in the
-- library.
,liftField configOptimization (\v flags ->
flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name
++ "' field is case sensitive, use 'True' or 'False'.")
,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name
++ "' field is case sensitive, use 'True' or 'False'.")
]
++ toSavedConfig liftConfigExFlag
(configureExOptions ParseArgs src)
[] []
++ toSavedConfig liftInstallFlag
(installOptions ParseArgs)
["dry-run", "only", "only-dependencies", "dependencies-only"] []
++ toSavedConfig liftUploadFlag
(commandOptions uploadCommand ParseArgs)
["verbose", "check", "documentation", "publish"] []
++ toSavedConfig liftReportFlag
(commandOptions reportCommand ParseArgs)
["verbose", "username", "password"] []
--FIXME: this is a hack, hiding the user name and password.
-- But otherwise it masks the upload ones. Either need to
-- share the options or make then distinct. In any case
-- they should probably be per-server.
++ [ viewAsFieldDescr
$ optionDistPref
(configDistPref . savedConfigureFlags)
(\distPref config ->
config
{ savedConfigureFlags = (savedConfigureFlags config) {
configDistPref = distPref }
, savedHaddockFlags = (savedHaddockFlags config) {
haddockDistPref = distPref }
}
)
ParseArgs
]
where
toSavedConfig lift options exclusions replacements =
[ lift (fromMaybe field replacement)
| opt <- options
, let field = viewAsFieldDescr opt
name = fieldName field
replacement = find ((== name) . fieldName) replacements
, name `notElem` exclusions ]
optional = Parse.option mempty . fmap toFlag
-- TODO: next step, make the deprecated fields elicit a warning.
--
deprecatedFieldDescriptions :: [FieldDescr SavedConfig]
deprecatedFieldDescriptions =
[ liftGlobalFlag $
listField "repos"
(Disp.text . showRepo) parseRepo
(fromNubList . globalRemoteRepos)
(\rs cfg -> cfg { globalRemoteRepos = toNubList rs })
, liftGlobalFlag $
simpleField "cachedir"
(Disp.text . fromFlagOrDefault "") (optional parseFilePathQ)
globalCacheDir (\d cfg -> cfg { globalCacheDir = d })
, liftUploadFlag $
simpleField "hackage-username"
(Disp.text . fromFlagOrDefault "" . fmap unUsername)
(optional (fmap Username parseTokenQ))
uploadUsername (\d cfg -> cfg { uploadUsername = d })
, liftUploadFlag $
simpleField "hackage-password"
(Disp.text . fromFlagOrDefault "" . fmap unPassword)
(optional (fmap Password parseTokenQ))
uploadPassword (\d cfg -> cfg { uploadPassword = d })
, liftUploadFlag $
spaceListField "hackage-password-command"
Disp.text parseTokenQ
(fromFlagOrDefault [] . uploadPasswordCmd)
(\d cfg -> cfg { uploadPasswordCmd = Flag d })
]
++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields
++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields
where
optional = Parse.option mempty . fmap toFlag
modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a
modifyFieldName f d = d { fieldName = f (fieldName d) }
liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftUserInstallDirs = liftField
savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags })
liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftGlobalInstallDirs = liftField
savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags })
liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig
liftGlobalFlag = liftField
savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags })
liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig
liftConfigFlag = liftField
savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags })
liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig
liftConfigExFlag = liftField
savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags })
liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig
liftInstallFlag = liftField
savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags })
liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig
liftUploadFlag = liftField
savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags })
liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig
liftReportFlag = liftField
savedReportFlags (\flags conf -> conf { savedReportFlags = flags })
parseConfig :: ConstraintSource
-> SavedConfig
-> String
-> ParseResult SavedConfig
parseConfig src initial = \str -> do
fields <- readFields str
let (knownSections, others) = partition isKnownSection fields
config <- parse others
let user0 = savedUserInstallDirs config
global0 = savedGlobalInstallDirs config
(remoteRepoSections0, haddockFlags, user, global, paths, args) <-
foldM parseSections
([], savedHaddockFlags config, user0, global0, [], [])
knownSections
let remoteRepoSections =
map addInfoForKnownRepos
. reverse
. nubBy ((==) `on` remoteRepoName)
$ remoteRepoSections0
return config {
savedGlobalFlags = (savedGlobalFlags config) {
globalRemoteRepos = toNubList remoteRepoSections
},
savedConfigureFlags = (savedConfigureFlags config) {
configProgramPaths = paths,
configProgramArgs = args
},
savedHaddockFlags = haddockFlags,
savedUserInstallDirs = user,
savedGlobalInstallDirs = global
}
where
isKnownSection (ParseUtils.Section _ "repository" _ _) = True
isKnownSection (ParseUtils.F _ "remote-repo" _) = True
isKnownSection (ParseUtils.Section _ "haddock" _ _) = True
isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True
isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True
isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True
isKnownSection _ = False
parse = parseFields (configFieldDescriptions src
++ deprecatedFieldDescriptions) initial
parseSections (rs, h, u, g, p, a)
(ParseUtils.Section _ "repository" name fs) = do
r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs
when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $
warning $ "'key-threshold' for repository " ++ show (remoteRepoName r')
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys r'))
&& remoteRepoSecure r' /= Just True) $
warning $ "'root-keys' for repository " ++ show (remoteRepoName r')
++ " non-empty, but 'secure' not set to True."
return (r':rs, h, u, g, p, a)
parseSections (rs, h, u, g, p, a)
(ParseUtils.F lno "remote-repo" raw) = do
let mr' = readRepo raw
r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr'
return (r':rs, h, u, g, p, a)
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "haddock" name fs)
| name == "" = do h' <- parseFields haddockFlagsFields h fs
return (rs, h', u, g, p, a)
| otherwise = do
warning "The 'haddock' section should be unnamed"
return accum
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "install-dirs" name fs)
| name' == "user" = do u' <- parseFields installDirsFields u fs
return (rs, h, u', g, p, a)
| name' == "global" = do g' <- parseFields installDirsFields g fs
return (rs, h, u, g', p, a)
| otherwise = do
warning "The 'install-paths' section should be for 'user' or 'global'"
return accum
where name' = lowercase name
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "program-locations" name fs)
| name == "" = do p' <- parseFields withProgramsFields p fs
return (rs, h, u, g, p', a)
| otherwise = do
warning "The 'program-locations' section should be unnamed"
return accum
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "program-default-options" name fs)
| name == "" = do a' <- parseFields withProgramOptionsFields a fs
return (rs, h, u, g, p, a')
| otherwise = do
warning "The 'program-default-options' section should be unnamed"
return accum
parseSections accum f = do
warning $ "Unrecognized stanza on line " ++ show (lineNo f)
return accum
showConfig :: SavedConfig -> String
showConfig = showConfigWithComments mempty
showConfigWithComments :: SavedConfig -> SavedConfig -> String
showConfigWithComments comment vals = Disp.render $
case fmap ppRemoteRepoSection . fromNubList . globalRemoteRepos
. savedGlobalFlags $ vals of
[] -> Disp.text ""
(x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs
$+$ Disp.text ""
$+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown))
mcomment vals
$+$ Disp.text ""
$+$ ppSection "haddock" "" haddockFlagsFields
(fmap savedHaddockFlags mcomment) (savedHaddockFlags vals)
$+$ Disp.text ""
$+$ installDirsSection "user" savedUserInstallDirs
$+$ Disp.text ""
$+$ installDirsSection "global" savedGlobalInstallDirs
$+$ Disp.text ""
$+$ configFlagsSection "program-locations" withProgramsFields
configProgramPaths
$+$ Disp.text ""
$+$ configFlagsSection "program-default-options" withProgramOptionsFields
configProgramArgs
where
mcomment = Just comment
installDirsSection name field =
ppSection "install-dirs" name installDirsFields
(fmap field mcomment) (field vals)
configFlagsSection name fields field =
ppSection name "" fields
(fmap (field . savedConfigureFlags) mcomment)
((field . savedConfigureFlags) vals)
-- skip fields based on field name. currently only skips "remote-repo",
-- because that is rendered as a section. (see 'ppRemoteRepoSection'.)
skipSomeFields = filter ((/= "remote-repo") . fieldName)
-- | Fields for the 'install-dirs' sections.
installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))]
installDirsFields = map viewAsFieldDescr installDirsOptions
ppRemoteRepoSection :: RemoteRepo -> Doc
ppRemoteRepoSection vals = ppSection "repository" (remoteRepoName vals)
remoteRepoFields def vals
where
def = Just (emptyRemoteRepo "ignored") { remoteRepoSecure = Just False }
remoteRepoFields :: [FieldDescr RemoteRepo]
remoteRepoFields =
[ simpleField "url"
(text . show) (parseTokenQ >>= parseURI')
remoteRepoURI (\x repo -> repo { remoteRepoURI = x })
, simpleField "secure"
showSecure (Just `fmap` Text.parse)
remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x })
, listField "root-keys"
text parseTokenQ
remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x })
, simpleField "key-threshold"
showThreshold Text.parse
remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x })
]
where
parseURI' uriString =
case parseURI uriString of
Nothing -> fail $ "remote-repo: no parse on " ++ show uriString
Just uri -> return uri
showSecure Nothing = mempty -- default 'secure' setting
showSecure (Just True) = text "True" -- user explicitly enabled it
showSecure (Just False) = text "False" -- user explicitly disabled it
-- If the key-threshold is set to 0, we omit it as this is the default
-- and it looks odd to have a value for key-threshold but not for 'secure'
-- (note that an empty list of keys is already omitted by default, since
-- that is what we do for all list fields)
showThreshold 0 = mempty
showThreshold t = text (show t)
-- | Fields for the 'haddock' section.
haddockFlagsFields :: [FieldDescr HaddockFlags]
haddockFlagsFields = [ field
| opt <- haddockOptions ParseArgs
, let field = viewAsFieldDescr opt
name = fieldName field
, name `notElem` exclusions ]
where
exclusions = ["verbose", "builddir", "for-hackage"]
-- | Fields for the 'program-locations' section.
withProgramsFields :: [FieldDescr [(String, FilePath)]]
withProgramsFields =
map viewAsFieldDescr $
programConfigurationPaths' (++ "-location") defaultProgramConfiguration
ParseArgs id (++)
-- | Fields for the 'program-default-options' section.
withProgramOptionsFields :: [FieldDescr [(String, [String])]]
withProgramOptionsFields =
map viewAsFieldDescr $
programConfigurationOptions defaultProgramConfiguration ParseArgs id (++)
-- | Get the differences (as a pseudo code diff) between the user's
-- '~/.cabal/config' and the one that cabal would generate if it didn't exist.
userConfigDiff :: GlobalFlags -> IO [String]
userConfigDiff globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags)
testConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
return $ reverse . foldl' createDiff [] . M.toList
$ M.unionWith combine
(M.fromList . map justFst $ filterShow testConfig)
(M.fromList . map justSnd $ filterShow userConfig)
where
justFst (a, b) = (a, (Just b, Nothing))
justSnd (a, b) = (a, (Nothing, Just b))
combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b)
combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b)
combine x y = error $ "Can't happen : userConfigDiff "
++ show x ++ " " ++ show y
createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String]
createDiff acc (key, (Just a, Just b))
| a == b = acc
| otherwise = ("+ " ++ key ++ ": " ++ b)
: ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc
createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (_, (Nothing, Nothing)) = acc
filterShow :: SavedConfig -> [(String, String)]
filterShow cfg = map keyValueSplit
. filter (\s -> not (null s) && any (== ':') s)
. map nonComment
. lines
$ showConfig cfg
nonComment [] = []
nonComment ('-':'-':_) = []
nonComment (x:xs) = x : nonComment xs
topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace
keyValueSplit s =
let (left, right) = break (== ':') s
in (topAndTail left, topAndTail (drop 1 right))
-- | Update the user's ~/.cabal/config' keeping the user's customizations.
userConfigUpdate :: Verbosity -> GlobalFlags -> IO ()
userConfigUpdate verbosity globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags)
newConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
commentConf <- commentSavedConfig
cabalFile <- getConfigFilePath $ globalConfigFile globalFlags
let backup = cabalFile ++ ".backup"
notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "."
renameFile cabalFile backup
notice verbosity $ "Writing merged config to " ++ cabalFile ++ "."
writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig)
|
kolmodin/cabal
|
cabal-install/Distribution/Client/Config.hs
|
bsd-3-clause
| 46,598
| 0
| 27
| 12,668
| 9,747
| 5,243
| 4,504
| 827
| 13
|
import Disorder.Core.Main
import qualified Test.Mismi.EC2.Core.Data
main :: IO ()
main =
disorderMain [
Test.Mismi.EC2.Core.Data.tests
]
|
ambiata/mismi
|
mismi-ec2-core/test/test.hs
|
bsd-3-clause
| 161
| 0
| 7
| 39
| 48
| 28
| 20
| 6
| 1
|
module QueryArrow.ElasticSearch.ICAT where
-- http://swizec.com/blog/writing-a-rest-client-in-haskell/swizec/6152
import Prelude hiding (lookup)
import Data.Map.Strict hiding (map, elemAt)
import Data.Text (pack)
import QueryArrow.Syntax.Term
import QueryArrow.Syntax.Type
import QueryArrow.DB.GenericDatabase
import QueryArrow.DB.NoConnection
import qualified QueryArrow.ElasticSearch.Query as ESQ
import QueryArrow.ElasticSearch.ESQL
import QueryArrow.ElasticSearch.ElasticSearchConnection
esMetaPred :: String -> Pred
esMetaPred ns = Pred (esMetaPredName ns) (PredType ObjectPred [ParamType True True True False Int64Type, ParamType True True True False Int64Type, ParamType False True True False TextType, ParamType False True True False TextType, ParamType False True True False TextType])
esMetaPredName :: String -> PredName
esMetaPredName ns = QPredName ns [] "ES_META"
makeElasticSearchDBAdapter :: String -> String -> String -> ESQ.ElasticSearchConnInfo -> IO (NoConnectionDatabase (GenericDatabase ESTrans ElasticSearchDB))
makeElasticSearchDBAdapter ns _ _ conn = return (NoConnectionDatabase (GenericDatabase (ESTrans (constructPredTypeMap [esMetaPred ns]) (fromList [(esMetaPredName ns, (pack "ES_META", [pack "obj_id", pack "meta_id", pack "attribute", pack "value", pack "unit"]))])) conn ns [esMetaPred ns]))
|
xu-hao/QueryArrow
|
QueryArrow-db-elastic/src/QueryArrow/ElasticSearch/ICAT.hs
|
bsd-3-clause
| 1,333
| 0
| 18
| 143
| 377
| 205
| 172
| 17
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE Rank2Types #-}
--
-- RNG/Peripheral.hs --- Random Number Generator Peripheral Driver
--
-- Copyright (C) 2015, Galois, Inc.
-- All Rights Reserved.
--
module Ivory.BSP.STM32.Peripheral.RNG.Peripheral where
import Ivory.Language
import Ivory.HW
import Ivory.BSP.STM32.Interrupt
import Ivory.BSP.STM32.Peripheral.RNG.Regs
-- Convenience type synonyms
data RNG = RNG
{ rngRegCR :: BitDataReg RNG_CR
, rngRegSR :: BitDataReg RNG_SR
, rngRegDR :: BitDataReg RNG_DR
, rngInterrupt :: HasSTM32Interrupt
, rngRCCEnable :: forall eff . Ivory eff ()
, rngRCCDisable :: forall eff . Ivory eff ()
}
-- | Create an RNG given the base register address.
mkRNG :: (STM32Interrupt i)
=> Integer
-> (forall eff . Ivory eff ())
-> (forall eff . Ivory eff ())
-> i
-> RNG
mkRNG base rccen rccdis interrupt =
RNG
{ rngRegCR = reg 0x00 "cr"
, rngRegSR = reg 0x04 "sr"
, rngRegDR = reg 0x08 "dr"
, rngInterrupt = HasSTM32Interrupt interrupt
, rngRCCEnable = rccen
, rngRCCDisable = rccdis
}
where
reg :: (IvoryIOReg (BitDataRep d)) => Integer -> String -> BitDataReg d
reg offs name = mkBitDataRegNamed (base + offs) ("rng->" ++ name)
|
GaloisInc/ivory-tower-stm32
|
ivory-bsp-stm32/src/Ivory/BSP/STM32/Peripheral/RNG/Peripheral.hs
|
bsd-3-clause
| 1,404
| 0
| 12
| 372
| 318
| 185
| 133
| 32
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.LHC
-- Copyright : Isaac Jones 2003-2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
module Distribution.Simple.LHC (
configure, getInstalledPackages,
buildLib, buildExe,
installLib, installExe,
registerPackage,
hcPkgInfo,
ghcOptions,
ghcVerbosityOptions
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Types.UnqualComponentName
import Distribution.PackageDescription as PD hiding (Flag)
import Distribution.InstalledPackageInfo
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
import Distribution.Simple.PackageIndex
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Program
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Compiler
import Distribution.Version
import Distribution.Verbosity
import Distribution.Text
import Distribution.Compat.Exception
import Distribution.System
import Language.Haskell.Extension
import qualified Data.Map as Map ( empty )
import System.Directory ( removeFile, renameFile,
getDirectoryContents, doesFileExist,
getTemporaryDirectory )
import System.FilePath ( (</>), (<.>), takeExtension,
takeDirectory, replaceExtension )
import System.IO (hClose, hPutStrLn)
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramDb -> IO (Compiler, Maybe Platform, ProgramDb)
configure verbosity hcPath hcPkgPath progdb = do
(lhcProg, lhcVersion, progdb') <-
requireProgramVersion verbosity lhcProgram
(orLaterVersion (mkVersion [0,7]))
(userMaybeSpecifyPath "lhc" hcPath progdb)
(lhcPkgProg, lhcPkgVersion, progdb'') <-
requireProgramVersion verbosity lhcPkgProgram
(orLaterVersion (mkVersion [0,7]))
(userMaybeSpecifyPath "lhc-pkg" hcPkgPath progdb')
when (lhcVersion /= lhcPkgVersion) $ die' verbosity $
"Version mismatch between lhc and lhc-pkg: "
++ programPath lhcProg ++ " is version " ++ display lhcVersion ++ " "
++ programPath lhcPkgProg ++ " is version " ++ display lhcPkgVersion
languages <- getLanguages verbosity lhcProg
extensions <- getExtensions verbosity lhcProg
let comp = Compiler {
compilerId = CompilerId LHC lhcVersion,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = Map.empty
}
progdb''' = configureToolchain lhcProg progdb'' -- configure gcc and ld
compPlatform = Nothing
return (comp, compPlatform, progdb''')
-- | Adjust the way we find and configure gcc and ld
--
configureToolchain :: ConfiguredProgram -> ProgramDb
-> ProgramDb
configureToolchain lhcProg =
addKnownProgram gccProgram {
programFindLocation = findProg gccProgram (baseDir </> "gcc.exe"),
programPostConf = configureGcc
}
. addKnownProgram ldProgram {
programFindLocation = findProg ldProgram (gccLibDir </> "ld.exe"),
programPostConf = configureLd
}
where
compilerDir = takeDirectory (programPath lhcProg)
baseDir = takeDirectory compilerDir
gccLibDir = baseDir </> "gcc-lib"
includeDir = baseDir </> "include" </> "mingw"
isWindows = case buildOS of Windows -> True; _ -> False
-- on Windows finding and configuring ghc's gcc and ld is a bit special
findProg :: Program -> FilePath
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
findProg prog location | isWindows = \verbosity searchpath -> do
exists <- doesFileExist location
if exists then return (Just (location, []))
else do warn verbosity ("Couldn't find " ++ programName prog ++ " where I expected it. Trying the search path.")
programFindLocation prog verbosity searchpath
| otherwise = programFindLocation prog
configureGcc :: Verbosity -> ConfiguredProgram -> NoCallStackIO ConfiguredProgram
configureGcc
| isWindows = \_ gccProg -> case programLocation gccProg of
-- if it's found on system then it means we're using the result
-- of programFindLocation above rather than a user-supplied path
-- that means we should add this extra flag to tell ghc's gcc
-- where it lives and thus where gcc can find its various files:
FoundOnSystem {} -> return gccProg {
programDefaultArgs = ["-B" ++ gccLibDir,
"-I" ++ includeDir]
}
UserSpecified {} -> return gccProg
| otherwise = \_ gccProg -> return gccProg
-- we need to find out if ld supports the -x flag
configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram
configureLd verbosity ldProg = do
tempDir <- getTemporaryDirectory
ldx <- withTempFile tempDir ".c" $ \testcfile testchnd ->
withTempFile tempDir ".o" $ \testofile testohnd -> do
hPutStrLn testchnd "int foo() { return 0; }"
hClose testchnd; hClose testohnd
runProgram verbosity lhcProg ["-c", testcfile,
"-o", testofile]
withTempFile tempDir ".o" $ \testofile' testohnd' ->
do
hClose testohnd'
_ <- getProgramOutput verbosity ldProg
["-x", "-r", testofile, "-o", testofile']
return True
`catchIO` (\_ -> return False)
`catchExit` (\_ -> return False)
if ldx
then return ldProg { programDefaultArgs = ["-x"] }
else return ldProg
getLanguages :: Verbosity -> ConfiguredProgram -> NoCallStackIO [(Language, Flag)]
getLanguages _ _ = return [(Haskell98, "")]
--FIXME: does lhc support -XHaskell98 flag? from what version?
getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Flag)]
getExtensions verbosity lhcProg = do
exts <- rawSystemStdout verbosity (programPath lhcProg)
["--supported-languages"]
-- GHC has the annoying habit of inverting some of the extensions
-- so we have to try parsing ("No" ++ ghcExtensionName) first
let readExtension str = do
ext <- simpleParse ("No" ++ str)
case ext of
UnknownExtension _ -> simpleParse str
_ -> return ext
return $ [ (ext, "-X" ++ display ext)
| Just ext <- map readExtension (lines exts) ]
getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramDb
-> IO InstalledPackageIndex
getInstalledPackages verbosity packagedbs progdb = do
checkPackageDbStack verbosity packagedbs
pkgss <- getInstalledPackages' lhcPkg verbosity packagedbs progdb
let indexes = [ PackageIndex.fromList (map (substTopDir topDir) pkgs)
| (_, pkgs) <- pkgss ]
return $! (mconcat indexes)
where
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it
Just ghcProg = lookupProgram lhcProgram progdb
Just lhcPkg = lookupProgram lhcPkgProgram progdb
compilerDir = takeDirectory (programPath ghcProg)
topDir = takeDirectory compilerDir
checkPackageDbStack :: Verbosity -> PackageDBStack -> IO ()
checkPackageDbStack _ (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStack verbosity _ =
die' verbosity $
"GHC.getInstalledPackages: the global package db must be "
++ "specified first and cannot be specified multiple times"
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: ConfiguredProgram -> Verbosity
-> [PackageDB] -> ProgramDb
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' lhcPkg verbosity packagedbs progdb
=
sequenceA
[ do str <- getDbProgramOutput verbosity lhcPkgProgram progdb
["dump", packageDbGhcPkgFlag packagedb]
`catchExit` \_ -> die' verbosity $ "ghc-pkg dump failed"
case parsePackages str of
Left ok -> return (packagedb, ok)
_ -> die' verbosity "failed to parse output of 'ghc-pkg dump'"
| packagedb <- packagedbs ]
where
parsePackages str =
let parsed = map parseInstalledPackageInfo (splitPkgs str)
in case [ msg | ParseFailed msg <- parsed ] of
[] -> Left [ pkg | ParseOk _ pkg <- parsed ]
msgs -> Right msgs
splitPkgs :: String -> [String]
splitPkgs = map unlines . splitWith ("---" ==) . lines
where
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = ys : case zs of
[] -> []
_:ws -> splitWith p ws
where (ys,zs) = break p xs
packageDbGhcPkgFlag GlobalPackageDB = "--global"
packageDbGhcPkgFlag UserPackageDB = "--user"
packageDbGhcPkgFlag (SpecificPackageDB path) = "--" ++ packageDbFlag ++ "=" ++ path
packageDbFlag
| programVersion lhcPkg < Just (mkVersion [7,5])
= "package-conf"
| otherwise
= "package-db"
substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo
substTopDir topDir ipo
= ipo {
InstalledPackageInfo.importDirs
= map f (InstalledPackageInfo.importDirs ipo),
InstalledPackageInfo.libraryDirs
= map f (InstalledPackageInfo.libraryDirs ipo),
InstalledPackageInfo.includeDirs
= map f (InstalledPackageInfo.includeDirs ipo),
InstalledPackageInfo.frameworkDirs
= map f (InstalledPackageInfo.frameworkDirs ipo),
InstalledPackageInfo.haddockInterfaces
= map f (InstalledPackageInfo.haddockInterfaces ipo),
InstalledPackageInfo.haddockHTMLs
= map f (InstalledPackageInfo.haddockHTMLs ipo)
}
where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest
f x = x
-- -----------------------------------------------------------------------------
-- Building
-- | Build a library with LHC.
--
buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib verbosity pkg_descr lbi lib clbi = do
let lib_name = componentUnitId clbi
pref = componentBuildDir lbi clbi
pkgid = packageId pkg_descr
runGhcProg = runDbProgram verbosity lhcProgram (withPrograms lbi)
ifVanillaLib forceVanilla = when (forceVanilla || withVanillaLib lbi)
ifProfLib = when (withProfLib lbi)
ifSharedLib = when (withSharedLib lbi)
ifGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let libTargetDir = pref
forceVanillaLib = EnableExtension TemplateHaskell `elem` allExtensions libBi
-- TH always needs vanilla libs, even when building for profiling
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recursive modules?
let ghcArgs =
["-package-name", display pkgid ]
++ constructGHCCmdLine lbi libBi clbi libTargetDir verbosity
++ map display (allLibModules lib clbi)
lhcWrap x = ["--build-library", "--ghc-opts=" ++ unwords x]
ghcArgsProf = ghcArgs
++ ["-prof",
"-hisuf", "p_hi",
"-osuf", "p_o"
]
++ hcProfOptions GHC libBi
ghcArgsShared = ghcArgs
++ ["-dynamic",
"-hisuf", "dyn_hi",
"-osuf", "dyn_o", "-fPIC"
]
++ hcSharedOptions GHC libBi
unless (null (allLibModules lib clbi)) $
do ifVanillaLib forceVanillaLib (runGhcProg $ lhcWrap ghcArgs)
ifProfLib (runGhcProg $ lhcWrap ghcArgsProf)
ifSharedLib (runGhcProg $ lhcWrap ghcArgsShared)
-- build any C sources
unless (null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_ [do let (odir,args) = constructCcCmdLine lbi libBi clbi pref
filename verbosity
createDirectoryIfMissingVerbose verbosity True odir
runGhcProg args
ifSharedLib (runGhcProg (args ++ ["-fPIC", "-osuf dyn_o"]))
| filename <- cSources libBi]
-- link:
info verbosity "Linking..."
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) (cSources libBi)
cid = compilerId (compiler lbi)
vanillaLibFilePath = libTargetDir </> mkLibName lib_name
profileLibFilePath = libTargetDir </> mkProfLibName lib_name
sharedLibFilePath = libTargetDir </> mkSharedLibName cid lib_name
ghciLibFilePath = libTargetDir </> mkGHCiLibName lib_name
stubObjs <- fmap catMaybes $ sequenceA
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- allLibModules lib clbi ]
stubProfObjs <- fmap catMaybes $ sequenceA
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- allLibModules lib clbi ]
stubSharedObjs <- fmap catMaybes $ sequenceA
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| x <- allLibModules lib clbi ]
hObjs <- getHaskellObjects lib lbi clbi
pref objExtension True
hProfObjs <-
if (withProfLib lbi)
then getHaskellObjects lib lbi clbi
pref ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if (withSharedLib lbi)
then getHaskellObjects lib lbi clbi
pref ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
-- first remove library files if they exists
sequence_
[ removeFile libFilePath `catchIO` \_ -> return ()
| libFilePath <- [vanillaLibFilePath, profileLibFilePath
,sharedLibFilePath, ghciLibFilePath] ]
let arVerbosity | verbosity >= deafening = "v"
| verbosity >= normal = ""
| otherwise = "c"
arArgs = ["q"++ arVerbosity]
++ [vanillaLibFilePath]
arObjArgs =
hObjs
++ map (pref </>) cObjs
++ stubObjs
arProfArgs = ["q"++ arVerbosity]
++ [profileLibFilePath]
arProfObjArgs =
hProfObjs
++ map (pref </>) cObjs
++ stubProfObjs
ldArgs = ["-r"]
++ ["-o", ghciLibFilePath <.> "tmp"]
ldObjArgs =
hObjs
++ map (pref </>) cObjs
++ stubObjs
ghcSharedObjArgs =
hSharedObjs
++ map (pref </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
[ "-no-auto-link-packages",
"-shared",
"-dynamic",
"-o", sharedLibFilePath ]
++ ghcSharedObjArgs
++ ["-package-name", display pkgid ]
++ ghcPackageFlags lbi clbi
++ ["-l"++extraLib | extraLib <- extraLibs libBi]
++ ["-L"++extraLibDir | extraLibDir <- extraLibDirs libBi]
runLd ldLibName args = do
exists <- doesFileExist ldLibName
-- This method is called iteratively by xargs. The
-- output goes to <ldLibName>.tmp, and any existing file
-- named <ldLibName> is included when linking. The
-- output is renamed to <lib_name>.
runDbProgram verbosity ldProgram (withPrograms lbi)
(args ++ if exists then [ldLibName] else [])
renameFile (ldLibName <.> "tmp") ldLibName
runAr = runDbProgram verbosity arProgram (withPrograms lbi)
--TODO: discover this at configure time or runtime on Unix
-- The value is 32k on Windows and POSIX specifies a minimum of 4k
-- but all sensible Unixes use more than 4k.
-- we could use getSysVar ArgumentLimit but that's in the Unix lib
maxCommandLineSize = 30 * 1024
ifVanillaLib False $ xargs maxCommandLineSize
runAr arArgs arObjArgs
ifProfLib $ xargs maxCommandLineSize
runAr arProfArgs arProfObjArgs
ifGHCiLib $ xargs maxCommandLineSize
(runLd ghciLibFilePath) ldArgs ldObjArgs
ifSharedLib $ runGhcProg ghcSharedLinkArgs
-- | Build an executable with LHC.
--
buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe verbosity _pkg_descr lbi
exe@Executable { exeName = exeName', modulePath = modPath } clbi = do
let exeName'' = unUnqualComponentName exeName'
let pref = buildDir lbi
runGhcProg = runDbProgram verbosity lhcProgram (withPrograms lbi)
exeBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfExe lbi) (buildInfo exe)
-- exeNameReal, the name that GHC really uses (with .exe on Windows)
let exeNameReal = exeName'' <.>
(if null $ takeExtension exeName'' then exeExtension else "")
let targetDir = pref </> exeName''
let exeDir = targetDir </> (exeName'' ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True exeDir
-- TODO: do we need to put hs-boot files into place for mutually recursive modules?
-- FIX: what about exeName.hi-boot?
-- build executables
unless (null (cSources exeBi)) $ do
info verbosity "Building C Sources."
sequence_ [do let (odir,args) = constructCcCmdLine lbi exeBi clbi
exeDir filename verbosity
createDirectoryIfMissingVerbose verbosity True odir
runGhcProg args
| filename <- cSources exeBi]
srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath
let cObjs = map (`replaceExtension` objExtension) (cSources exeBi)
let lhcWrap x = ("--ghc-opts\"":x) ++ ["\""]
let binArgs linkExe profExe =
(if linkExe
then ["-o", targetDir </> exeNameReal]
else ["-c"])
++ constructGHCCmdLine lbi exeBi clbi exeDir verbosity
++ [exeDir </> x | x <- cObjs]
++ [srcMainFile]
++ ["-optl" ++ opt | opt <- PD.ldOptions exeBi]
++ ["-l"++lib | lib <- extraLibs exeBi]
++ ["-L"++extraLibDir | extraLibDir <- extraLibDirs exeBi]
++ concat [["-framework", f] | f <- PD.frameworks exeBi]
++ if profExe
then ["-prof",
"-hisuf", "p_hi",
"-osuf", "p_o"
] ++ hcProfOptions GHC exeBi
else []
-- For building exe's for profiling that use TH we actually
-- have to build twice, once without profiling and the again
-- with profiling. This is because the code that TH needs to
-- run at compile time needs to be the vanilla ABI so it can
-- be loaded up and run by the compiler.
when (withProfExe lbi && EnableExtension TemplateHaskell `elem` allExtensions exeBi)
(runGhcProg $ lhcWrap (binArgs False False))
runGhcProg (binArgs True (withProfExe lbi))
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < mkVersion [6, 10]
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- when using -split-objs, we need to search for object files in the
-- Module_split directory for each module.
getHaskellObjects :: Library -> LocalBuildInfo -> ComponentLocalBuildInfo
-> FilePath -> String -> Bool -> NoCallStackIO [FilePath]
getHaskellObjects lib lbi clbi pref wanted_obj_ext allow_split_objs
| splitObjs lbi && allow_split_objs = do
let dirs = [ pref </> (ModuleName.toFilePath x ++ "_split")
| x <- allLibModules lib clbi ]
objss <- traverse getDirectoryContents dirs
let objs = [ dir </> obj
| (objs',dir) <- zip objss dirs, obj <- objs',
let obj_ext = takeExtension obj,
'.':wanted_obj_ext == obj_ext ]
return objs
| otherwise =
return [ pref </> ModuleName.toFilePath x <.> wanted_obj_ext
| x <- allLibModules lib clbi ]
constructGHCCmdLine
:: LocalBuildInfo
-> BuildInfo
-> ComponentLocalBuildInfo
-> FilePath
-> Verbosity
-> [String]
constructGHCCmdLine lbi bi clbi odir verbosity =
["--make"]
++ ghcVerbosityOptions verbosity
-- Unsupported extensions have already been checked by configure
++ ghcOptions lbi bi clbi odir
ghcVerbosityOptions :: Verbosity -> [String]
ghcVerbosityOptions verbosity
| verbosity >= deafening = ["-v"]
| verbosity >= normal = []
| otherwise = ["-w", "-v0"]
ghcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> [String]
ghcOptions lbi bi clbi odir
= ["-hide-all-packages"]
++ ghcPackageDbOptions lbi
++ (if splitObjs lbi then ["-split-objs"] else [])
++ ["-i"]
++ ["-i" ++ odir]
++ ["-i" ++ l | l <- nub (hsSourceDirs bi)]
++ ["-i" ++ autogenComponentModulesDir lbi clbi]
++ ["-i" ++ autogenPackageModulesDir lbi]
++ ["-I" ++ autogenComponentModulesDir lbi clbi]
++ ["-I" ++ autogenPackageModulesDir lbi]
++ ["-I" ++ odir]
++ ["-I" ++ dir | dir <- PD.includeDirs bi]
++ ["-optP" ++ opt | opt <- cppOptions bi]
++ [ "-optP-include", "-optP"++ (autogenComponentModulesDir lbi clbi </> cppHeaderName) ]
++ [ "-#include \"" ++ inc ++ "\"" | inc <- PD.includes bi ]
++ [ "-odir", odir, "-hidir", odir ]
++ (if compilerVersion c >= mkVersion [6,8]
then ["-stubdir", odir] else [])
++ ghcPackageFlags lbi clbi
++ (case withOptimization lbi of
NoOptimisation -> []
NormalOptimisation -> ["-O"]
MaximumOptimisation -> ["-O2"])
++ hcOptions GHC bi
++ languageToFlags c (defaultLanguage bi)
++ extensionsToFlags c (usedExtensions bi)
where c = compiler lbi
ghcPackageFlags :: LocalBuildInfo -> ComponentLocalBuildInfo -> [String]
ghcPackageFlags lbi clbi
| ghcVer >= mkVersion [6,11]
= concat [ ["-package-id", display ipkgid]
| (ipkgid, _) <- componentPackageDeps clbi ]
| otherwise = concat [ ["-package", display pkgid]
| (_, pkgid) <- componentPackageDeps clbi ]
where
ghcVer = compilerVersion (compiler lbi)
ghcPackageDbOptions :: LocalBuildInfo -> [String]
ghcPackageDbOptions lbi = case dbstack of
(GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs
(GlobalPackageDB:dbs) -> ("-no-user-" ++ packageDbFlag)
: concatMap specific dbs
_ -> ierror
where
specific (SpecificPackageDB db) = [ '-':packageDbFlag, db ]
specific _ = ierror
ierror = error ("internal error: unexpected package db stack: " ++ show dbstack)
dbstack = withPackageDB lbi
packageDbFlag
| compilerVersion (compiler lbi) < mkVersion [7,5]
= "package-conf"
| otherwise
= "package-db"
constructCcCmdLine :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> FilePath -> Verbosity -> (FilePath,[String])
constructCcCmdLine lbi bi clbi pref filename verbosity
= let odir | compilerVersion (compiler lbi) >= mkVersion [6,4,1] = pref
| otherwise = pref </> takeDirectory filename
-- ghc 6.4.1 fixed a bug in -odir handling
-- for C compilations.
in
(odir,
ghcCcOptions lbi bi clbi odir
++ (if verbosity >= deafening then ["-v"] else [])
++ ["-c",filename])
ghcCcOptions :: LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> [String]
ghcCcOptions lbi bi clbi odir
= ["-I" ++ dir | dir <- PD.includeDirs bi]
++ ghcPackageDbOptions lbi
++ ghcPackageFlags lbi clbi
++ ["-optc" ++ opt | opt <- PD.ccOptions bi]
++ (case withOptimization lbi of
NoOptimisation -> []
_ -> ["-optc-O2"])
++ ["-odir", odir]
mkGHCiLibName :: UnitId -> String
mkGHCiLibName lib = getHSLibraryName lib <.> "o"
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^Where to copy the files to
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> Executable
-> IO ()
installExe verbosity lbi binDir buildPref (progprefix, progsuffix) _pkg exe = do
createDirectoryIfMissingVerbose verbosity True binDir
let exeFileName = unUnqualComponentName (exeName exe) <.> exeExtension
fixedExeBaseName = progprefix ++ unUnqualComponentName (exeName exe) ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> unUnqualComponentName (exeName exe) </> exeFileName)
(dest <.> exeExtension)
stripExe verbosity lbi exeFileName (dest <.> exeExtension)
installBinary (binDir </> fixedExeBaseName)
stripExe :: Verbosity -> LocalBuildInfo -> FilePath -> FilePath -> IO ()
stripExe verbosity lbi name path = when (stripExes lbi) $
case lookupProgram stripProgram (withPrograms lbi) of
Just strip -> runProgram verbosity strip args
Nothing -> unless (buildOS == Windows) $
-- Don't bother warning on windows, we don't expect them to
-- have the strip program anyway.
warn verbosity $ "Unable to strip executable '" ++ name
++ "' (missing the 'strip' program)"
where
args = path : case buildOS of
OSX -> ["-x"] -- By default, stripping the ghc binary on at least
-- some OS X installations causes:
-- HSbase-3.0.o: unknown symbol `_environ'"
-- The -x flag fixes that.
_ -> []
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do
-- copy .hi files over:
let copy src dst n = do
createDirectoryIfMissingVerbose verbosity True dst
installOrdinaryFile verbosity (src </> n) (dst </> n)
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (allLibModules lib clbi)
>>= installOrdinaryFiles verbosity targetDir
ifVanilla $ copyModuleFiles "hi"
ifProf $ copyModuleFiles "p_hi"
hcrFiles <- findModuleFiles (builtDir : hsSourceDirs (libBuildInfo lib)) ["hcr"] (allLibModules lib clbi)
flip traverse_ hcrFiles $ \(srcBase, srcFile) -> runLhc ["--install-library", srcBase </> srcFile]
-- copy the built library files over:
ifVanilla $ copy builtDir targetDir vanillaLibName
ifProf $ copy builtDir targetDir profileLibName
ifGHCi $ copy builtDir targetDir ghciLibName
ifShared $ copy builtDir dynlibTargetDir sharedLibName
where
cid = compilerId (compiler lbi)
lib_name = componentUnitId clbi
vanillaLibName = mkLibName lib_name
profileLibName = mkProfLibName lib_name
ghciLibName = mkGHCiLibName lib_name
sharedLibName = mkSharedLibName cid lib_name
hasLib = not $ null (allLibModules lib clbi)
&& null (cSources (libBuildInfo lib))
ifVanilla = when (hasLib && withVanillaLib lbi)
ifProf = when (hasLib && withProfLib lbi)
ifGHCi = when (hasLib && withGHCiLib lbi)
ifShared = when (hasLib && withSharedLib lbi)
runLhc = runDbProgram verbosity lhcProgram (withPrograms lbi)
-- -----------------------------------------------------------------------------
-- Registering
registerPackage
:: Verbosity
-> ProgramDb
-> PackageDBStack
-> InstalledPackageInfo
-> HcPkg.RegisterOptions
-> IO ()
registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions =
HcPkg.register (hcPkgInfo progdb) verbosity packageDbs
installedPkgInfo registerOptions
hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo
hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = lhcPkgProg
, HcPkg.noPkgDbStack = False
, HcPkg.noVerboseFlag = False
, HcPkg.flagPackageConf = False
, HcPkg.supportsDirDbs = True
, HcPkg.requiresDirDbs = True
, HcPkg.nativeMultiInstance = False -- ?
, HcPkg.recacheMultiInstance = False -- ?
, HcPkg.suppressFilesCheck = True
}
where
Just lhcPkgProg = lookupProgram lhcPkgProgram progdb
|
mydaum/cabal
|
Cabal/Distribution/Simple/LHC.hs
|
bsd-3-clause
| 32,924
| 0
| 27
| 9,361
| 7,294
| 3,759
| 3,535
| 577
| 6
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Setup
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Setup
( globalCommand, GlobalFlags(..), defaultGlobalFlags
, RepoContext(..), withRepoContext
, configureCommand, ConfigFlags(..), filterConfigureFlags
, configPackageDB', configCompilerAux'
, configureExCommand, ConfigExFlags(..), defaultConfigExFlags
, buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..)
, replCommand, testCommand, benchmarkCommand
, configureExOptions, reconfigureCommand
, installCommand, InstallFlags(..), installOptions, defaultInstallFlags
, defaultSolver, defaultMaxBackjumps
, listCommand, ListFlags(..)
, updateCommand, UpdateFlags(..)
, upgradeCommand
, uninstallCommand
, infoCommand, InfoFlags(..)
, fetchCommand, FetchFlags(..)
, freezeCommand, FreezeFlags(..)
, genBoundsCommand
, outdatedCommand, OutdatedFlags(..), IgnoreMajorVersionBumps(..)
, getCommand, unpackCommand, GetFlags(..)
, checkCommand
, formatCommand
, uploadCommand, UploadFlags(..), IsCandidate(..)
, reportCommand, ReportFlags(..)
, runCommand
, initCommand, IT.InitFlags(..)
, sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..)
, win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..)
, actAsSetupCommand, ActAsSetupFlags(..)
, sandboxCommand, defaultSandboxLocation, SandboxFlags(..)
, execCommand, ExecFlags(..)
, userConfigCommand, UserConfigFlags(..)
, manpageCommand
, parsePackageArgs
--TODO: stop exporting these:
, showRepo
, parseRepo
, readRepo
) where
import Prelude ()
import Distribution.Client.Compat.Prelude hiding (get)
import Distribution.Client.Types
( Username(..), Password(..), RemoteRepo(..)
, AllowNewer(..), AllowOlder(..), RelaxDeps(..)
)
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Dependency.Types
( PreSolver(..) )
import Distribution.Client.IndexUtils.Timestamp
( IndexState(..) )
import qualified Distribution.Client.Init.Types as IT
( InitFlags(..), PackageType(..) )
import Distribution.Client.Targets
( UserConstraint, readUserConstraint )
import Distribution.Utils.NubList
( NubList, toNubList, fromNubList)
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.Settings
import Distribution.Simple.Compiler ( Compiler, PackageDB, PackageDBStack )
import Distribution.Simple.Program (ProgramDb, defaultProgramDb)
import Distribution.Simple.Command hiding (boolOpt, boolOpt')
import qualified Distribution.Simple.Command as Command
import Distribution.Simple.Configure
( configCompilerAuxEx, interpretPackageDbFlags, computeEffectiveProfiling )
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
( ConfigFlags(..), BuildFlags(..), ReplFlags
, TestFlags(..), BenchmarkFlags(..)
, SDistFlags(..), HaddockFlags(..)
, readPackageDbList, showPackageDbList
, Flag(..), toFlag, flagToMaybe, flagToList, maybeToFlag
, BooleanFlag(..), optionVerbosity
, boolOpt, boolOpt', trueArg, falseArg
, optionNumJobs )
import Distribution.Simple.InstallDirs
( PathTemplate, InstallDirs(..)
, toPathTemplate, fromPathTemplate, combinePathTemplate )
import Distribution.Version
( Version, mkVersion, nullVersion, anyVersion, thisVersion )
import Distribution.Package
( PackageIdentifier, PackageName, packageName, packageVersion )
import Distribution.Types.Dependency
import Distribution.PackageDescription
( BuildType(..), RepoKind(..) )
import Distribution.System ( Platform )
import Distribution.Text
( Text(..), display )
import Distribution.ReadE
( ReadE(..), readP_to_E, succeedReadE )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, char, munch1, pfail, sepBy1, (+++) )
import Distribution.ParseUtils
( readPToMaybe )
import Distribution.Verbosity
( Verbosity, lessVerbose, normal, verboseNoFlags, verboseNoTimestamp )
import Distribution.Simple.Utils
( wrapText, wrapLine )
import Distribution.Client.GlobalFlags
( GlobalFlags(..), defaultGlobalFlags
, RepoContext(..), withRepoContext
)
import Data.List
( deleteFirstsBy )
import System.FilePath
( (</>) )
import Network.URI
( parseAbsoluteURI, uriToString )
globalCommand :: [Command action] -> CommandUI GlobalFlags
globalCommand commands = CommandUI {
commandName = "",
commandSynopsis =
"Command line interface to the Haskell Cabal infrastructure.",
commandUsage = \pname ->
"See http://www.haskell.org/cabal/ for more information.\n"
++ "\n"
++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n",
commandDescription = Just $ \pname ->
let
commands' = commands ++ [commandAddAction helpCommandUI undefined]
cmdDescs = getNormalCommandDescriptions commands'
-- if new commands are added, we want them to appear even if they
-- are not included in the custom listing below. Thus, we calculate
-- the `otherCmds` list and append it under the `other` category.
-- Alternatively, a new testcase could be added that ensures that
-- the set of commands listed here is equal to the set of commands
-- that are actually available.
otherCmds = deleteFirstsBy (==) (map fst cmdDescs)
[ "help"
, "update"
, "install"
, "fetch"
, "list"
, "info"
, "user-config"
, "get"
, "init"
, "configure"
, "reconfigure"
, "build"
, "clean"
, "run"
, "repl"
, "test"
, "bench"
, "check"
, "sdist"
, "upload"
, "report"
, "freeze"
, "gen-bounds"
, "outdated"
, "doctest"
, "haddock"
, "hscolour"
, "copy"
, "register"
, "sandbox"
, "exec"
, "new-build"
, "new-configure"
, "new-repl"
, "new-freeze"
, "new-run"
, "new-test"
, "new-bench"
, "new-haddock"
]
maxlen = maximum $ [length name | (name, _) <- cmdDescs]
align str = str ++ replicate (maxlen - length str) ' '
startGroup n = " ["++n++"]"
par = ""
addCmd n = case lookup n cmdDescs of
Nothing -> ""
Just d -> " " ++ align n ++ " " ++ d
addCmdCustom n d = case lookup n cmdDescs of -- make sure that the
-- command still exists.
Nothing -> ""
Just _ -> " " ++ align n ++ " " ++ d
in
"Commands:\n"
++ unlines (
[ startGroup "global"
, addCmd "update"
, addCmd "install"
, par
, addCmd "help"
, addCmd "info"
, addCmd "list"
, addCmd "fetch"
, addCmd "user-config"
, par
, startGroup "package"
, addCmd "get"
, addCmd "init"
, par
, addCmd "configure"
, addCmd "build"
, addCmd "clean"
, par
, addCmd "run"
, addCmd "repl"
, addCmd "test"
, addCmd "bench"
, par
, addCmd "check"
, addCmd "sdist"
, addCmd "upload"
, addCmd "report"
, par
, addCmd "freeze"
, addCmd "gen-bounds"
, addCmd "outdated"
, addCmd "doctest"
, addCmd "haddock"
, addCmd "hscolour"
, addCmd "copy"
, addCmd "register"
, addCmd "reconfigure"
, par
, startGroup "sandbox"
, addCmd "sandbox"
, addCmd "exec"
, addCmdCustom "repl" "Open interpreter with access to sandbox packages."
, par
, startGroup "new-style projects (beta)"
, addCmd "new-build"
, addCmd "new-configure"
, addCmd "new-repl"
, addCmd "new-run"
, addCmd "new-test"
, addCmd "new-bench"
, addCmd "new-freeze"
, addCmd "new-haddock"
] ++ if null otherCmds then [] else par
:startGroup "other"
:[addCmd n | n <- otherCmds])
++ "\n"
++ "For more information about a command use:\n"
++ " " ++ pname ++ " COMMAND --help\n"
++ "or " ++ pname ++ " help COMMAND\n"
++ "\n"
++ "To install Cabal packages from hackage use:\n"
++ " " ++ pname ++ " install foo [--dry-run]\n"
++ "\n"
++ "Occasionally you need to update the list of available packages:\n"
++ " " ++ pname ++ " update\n",
commandNotes = Nothing,
commandDefaultFlags = mempty,
commandOptions = args
}
where
args :: ShowOrParseArgs -> [OptionField GlobalFlags]
args ShowArgs = argsShown
args ParseArgs = argsShown ++ argsNotShown
-- arguments we want to show in the help
argsShown :: [OptionField GlobalFlags]
argsShown = [
option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
,option [] ["config-file"]
"Set an alternate location for the config file"
globalConfigFile (\v flags -> flags { globalConfigFile = v })
(reqArgFlag "FILE")
,option [] ["sandbox-config-file"]
"Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')"
globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v })
(reqArgFlag "FILE")
,option [] ["default-user-config"]
"Set a location for a cabal.config file for projects without their own cabal.config freeze file."
globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v})
(reqArgFlag "FILE")
,option [] ["require-sandbox"]
"requiring the presence of a sandbox for sandbox-aware commands"
globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v })
(boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"]))
,option [] ["ignore-sandbox"]
"Ignore any existing sandbox"
globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v })
trueArg
,option [] ["ignore-expiry"]
"Ignore expiry dates on signed metadata (use only in exceptional circumstances)"
globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v })
trueArg
,option [] ["http-transport"]
"Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')"
globalHttpTransport (\v flags -> flags { globalHttpTransport = v })
(reqArgFlag "HttpTransport")
,option [] ["nix"]
"Nix integration: run commands through nix-shell if a 'shell.nix' file exists"
globalNix (\v flags -> flags { globalNix = v })
(boolOpt [] [])
]
-- arguments we don't want shown in the help
argsNotShown :: [OptionField GlobalFlags]
argsNotShown = [
option [] ["remote-repo"]
"The name and url for a remote repository"
globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v })
(reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList))
,option [] ["remote-repo-cache"]
"The location where downloads from all remote repos are cached"
globalCacheDir (\v flags -> flags { globalCacheDir = v })
(reqArgFlag "DIR")
,option [] ["local-repo"]
"The location of a local repository"
globalLocalRepos (\v flags -> flags { globalLocalRepos = v })
(reqArg' "DIR" (\x -> toNubList [x]) fromNubList)
,option [] ["logs-dir"]
"The location to put log files"
globalLogsDir (\v flags -> flags { globalLogsDir = v })
(reqArgFlag "DIR")
,option [] ["world-file"]
"The location of the world file"
globalWorldFile (\v flags -> flags { globalWorldFile = v })
(reqArgFlag "FILE")
]
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
configureCommand :: CommandUI ConfigFlags
configureCommand = c
{ commandDefaultFlags = mempty
, commandNotes = Just $ \pname -> (case commandNotes c of
Nothing -> ""
Just n -> n pname ++ "\n")
++ "Examples:\n"
++ " " ++ pname ++ " configure\n"
++ " Configure with defaults;\n"
++ " " ++ pname ++ " configure --enable-tests -fcustomflag\n"
++ " Configure building package including tests,\n"
++ " with some package-specific flag.\n"
}
where
c = Cabal.configureCommand defaultProgramDb
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions = commandOptions configureCommand
-- | Given some 'ConfigFlags' for the version of Cabal that
-- cabal-install was built with, and a target older 'Version' of
-- Cabal that we want to pass these flags to, convert the
-- flags into a form that will be accepted by the older
-- Setup script. Generally speaking, this just means filtering
-- out flags that the old Cabal library doesn't understand, but
-- in some cases it may also mean "emulating" a feature using
-- some more legacy flags.
filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags
filterConfigureFlags flags cabalLibVersion
-- NB: we expect the latest version to be the most common case,
-- so test it first.
| cabalLibVersion >= mkVersion [2,1,0] = flags_latest
-- The naming convention is that flags_version gives flags with
-- all flags *introduced* in version eliminated.
-- It is NOT the latest version of Cabal library that
-- these flags work for; version of introduction is a more
-- natural metric.
| cabalLibVersion < mkVersion [1,3,10] = flags_1_3_10
| cabalLibVersion < mkVersion [1,10,0] = flags_1_10_0
| cabalLibVersion < mkVersion [1,12,0] = flags_1_12_0
| cabalLibVersion < mkVersion [1,14,0] = flags_1_14_0
| cabalLibVersion < mkVersion [1,18,0] = flags_1_18_0
| cabalLibVersion < mkVersion [1,19,1] = flags_1_19_1
| cabalLibVersion < mkVersion [1,19,2] = flags_1_19_2
| cabalLibVersion < mkVersion [1,21,1] = flags_1_21_1
| cabalLibVersion < mkVersion [1,22,0] = flags_1_22_0
| cabalLibVersion < mkVersion [1,23,0] = flags_1_23_0
| cabalLibVersion < mkVersion [1,25,0] = flags_1_25_0
| cabalLibVersion < mkVersion [2,1,0] = flags_2_1_0
| otherwise = flags_latest
where
flags_latest = flags {
-- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'.
configConstraints = []
}
flags_2_1_0 = flags_latest {
-- Cabal < 2.1 doesn't know about -v +timestamp modifier
configVerbosity = fmap verboseNoTimestamp (configVerbosity flags_latest)
}
flags_1_25_0 = flags_2_1_0 {
-- Cabal < 1.25.0 doesn't know about --dynlibdir.
configInstallDirs = configInstallDirs_1_25_0,
-- Cabal < 1.25 doesn't have extended verbosity syntax
configVerbosity = fmap verboseNoFlags (configVerbosity flags_2_1_0),
-- Cabal < 1.25 doesn't support --deterministic
configDeterministic = mempty
}
configInstallDirs_1_25_0 = let dirs = configInstallDirs flags in
dirs { dynlibdir = NoFlag
, libexecsubdir = NoFlag
, libexecdir = maybeToFlag $
combinePathTemplate <$> flagToMaybe (libexecdir dirs)
<*> flagToMaybe (libexecsubdir dirs)
}
-- Cabal < 1.23 doesn't know about '--profiling-detail'.
-- Cabal < 1.23 has a hacked up version of 'enable-profiling'
-- which we shouldn't use.
(tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling flags
flags_1_23_0 = flags_1_25_0 { configProfDetail = NoFlag
, configProfLibDetail = NoFlag
, configIPID = NoFlag
, configProf = NoFlag
, configProfExe = Flag tryExeProfiling
, configProfLib = Flag tryLibProfiling
}
-- Cabal < 1.22 doesn't know about '--disable-debug-info'.
flags_1_22_0 = flags_1_23_0 { configDebugInfo = NoFlag }
-- Cabal < 1.21.1 doesn't know about 'disable-relocatable'
-- Cabal < 1.21.1 doesn't know about 'enable-profiling'
-- (but we already dealt with it in flags_1_23_0)
flags_1_21_1 =
flags_1_22_0 { configRelocatable = NoFlag
, configCoverage = NoFlag
, configLibCoverage = configCoverage flags
}
-- Cabal < 1.19.2 doesn't know about '--exact-configuration' and
-- '--enable-library-stripping'.
flags_1_19_2 = flags_1_21_1 { configExactConfiguration = NoFlag
, configStripLibs = NoFlag }
-- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'.
flags_1_19_1 = flags_1_19_2 { configDependencies = []
, configConstraints = configConstraints flags }
-- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir.
flags_1_18_0 = flags_1_19_1 { configProgramPathExtra = toNubList []
, configInstallDirs = configInstallDirs_1_18_0}
configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_1) { sysconfdir = NoFlag }
-- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'.
flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag }
-- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic'
-- and '--enable/disable-library-coverage'.
flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag
, configDynExe = NoFlag }
-- Cabal < 1.10.0 doesn't know about '--disable-tests'.
flags_1_10_0 = flags_1_12_0 { configTests = NoFlag }
-- Cabal < 1.3.10 does not grok the '--constraints' flag.
flags_1_3_10 = flags_1_10_0 { configConstraints = [] }
-- | Get the package database settings from 'ConfigFlags', accounting for
-- @--package-db@ and @--user@ flags.
configPackageDB' :: ConfigFlags -> PackageDBStack
configPackageDB' cfg =
interpretPackageDbFlags userInstall (configPackageDBs cfg)
where
userInstall = Cabal.fromFlagOrDefault True (configUserInstall cfg)
-- | Configure the compiler, but reduce verbosity during this step.
configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramDb)
configCompilerAux' configFlags =
configCompilerAuxEx configFlags
--FIXME: make configCompilerAux use a sensible verbosity
{ configVerbosity = fmap lessVerbose (configVerbosity configFlags) }
-- ------------------------------------------------------------
-- * Config extra flags
-- ------------------------------------------------------------
-- | cabal configure takes some extra flags beyond runghc Setup configure
--
data ConfigExFlags = ConfigExFlags {
configCabalVersion :: Flag Version,
configExConstraints:: [(UserConstraint, ConstraintSource)],
configPreferences :: [Dependency],
configSolver :: Flag PreSolver,
configAllowNewer :: Maybe AllowNewer,
configAllowOlder :: Maybe AllowOlder
}
deriving (Eq, Generic)
defaultConfigExFlags :: ConfigExFlags
defaultConfigExFlags = mempty { configSolver = Flag defaultSolver }
configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags)
configureExCommand = configureCommand {
commandDefaultFlags = (mempty, defaultConfigExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst
(filter ((`notElem` ["constraint", "dependency", "exact-configuration"])
. optionName) $ configureOptions showOrParseArgs)
++ liftOptions snd setSnd
(configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
configureExOptions :: ShowOrParseArgs
-> ConstraintSource
-> [OptionField ConfigExFlags]
configureExOptions _showOrParseArgs src =
[ option [] ["cabal-lib-version"]
("Select which version of the Cabal lib to use to build packages "
++ "(useful for testing).")
configCabalVersion (\v flags -> flags { configCabalVersion = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++)
(fmap toFlag parse))
(map display . flagToList))
, option [] ["constraint"]
"Specify constraints on a package (version, installed/source, flags)"
configExConstraints (\v flags -> flags { configExConstraints = v })
(reqArg "CONSTRAINT"
((\x -> [(x, src)]) `fmap` ReadE readUserConstraint)
(map $ display . fst))
, option [] ["preference"]
"Specify preferences (soft constraints) on the version of a package"
configPreferences (\v flags -> flags { configPreferences = v })
(reqArg "CONSTRAINT"
(readP_to_E (const "dependency expected")
(fmap (\x -> [x]) parse))
(map display))
, optionSolver configSolver (\v flags -> flags { configSolver = v })
, option [] ["allow-older"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowOlder . configAllowOlder)
(\v flags -> flags { configAllowOlder = fmap AllowOlder v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
, option [] ["allow-newer"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowNewer . configAllowNewer)
(\v flags -> flags { configAllowNewer = fmap AllowNewer v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
]
relaxDepsParser :: Parse.ReadP r (Maybe RelaxDeps)
relaxDepsParser =
(Just . RelaxDepsSome) `fmap` Parse.sepBy1 parse (Parse.char ',')
relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String]
relaxDepsPrinter Nothing = []
relaxDepsPrinter (Just RelaxDepsNone) = []
relaxDepsPrinter (Just RelaxDepsAll) = [Nothing]
relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . display) $ pkgs
instance Monoid ConfigExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ConfigExFlags where
(<>) = gmappend
reconfigureCommand :: CommandUI (ConfigFlags, ConfigExFlags)
reconfigureCommand
= configureExCommand
{ commandName = "reconfigure"
, commandSynopsis = "Reconfigure the package if necessary."
, commandDescription = Just $ \pname -> wrapText $
"Run `configure` with the most recently used flags, or append FLAGS "
++ "to the most recently used configuration. "
++ "Accepts the same flags as `" ++ pname ++ " configure'. "
++ "If the package has never been configured, the default flags are "
++ "used."
, commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " reconfigure\n"
++ " Configure with the most recently used flags.\n"
++ " " ++ pname ++ " reconfigure -w PATH\n"
++ " Reconfigure with the most recently used flags,\n"
++ " but use the compiler at PATH.\n\n"
, commandUsage = usageAlternatives "reconfigure" [ "[FLAGS]" ]
, commandDefaultFlags = mempty
}
-- ------------------------------------------------------------
-- * Build flags
-- ------------------------------------------------------------
data SkipAddSourceDepsCheck =
SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck
deriving Eq
data BuildExFlags = BuildExFlags {
buildOnly :: Flag SkipAddSourceDepsCheck
} deriving Generic
buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags]
buildExOptions _showOrParseArgs =
option [] ["only"]
"Don't reinstall add-source dependencies (sandbox-only)"
buildOnly (\v flags -> flags { buildOnly = v })
(noArg (Flag SkipAddSourceDepsCheck))
: []
buildCommand :: CommandUI (BuildFlags, BuildExFlags)
buildCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramDb
instance Monoid BuildExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup BuildExFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Repl command
-- ------------------------------------------------------------
replCommand :: CommandUI (ReplFlags, BuildExFlags)
replCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.replCommand defaultProgramDb
-- ------------------------------------------------------------
-- * Test command
-- ------------------------------------------------------------
testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags)
testCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progDb showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.testCommand
progDb = defaultProgramDb
-- ------------------------------------------------------------
-- * Bench command
-- ------------------------------------------------------------
benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags)
benchmarkCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progDb showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.benchmarkCommand
progDb = defaultProgramDb
-- ------------------------------------------------------------
-- * Fetch command
-- ------------------------------------------------------------
data FetchFlags = FetchFlags {
-- fetchOutput :: Flag FilePath,
fetchDeps :: Flag Bool,
fetchDryRun :: Flag Bool,
fetchSolver :: Flag PreSolver,
fetchMaxBackjumps :: Flag Int,
fetchReorderGoals :: Flag ReorderGoals,
fetchCountConflicts :: Flag CountConflicts,
fetchIndependentGoals :: Flag IndependentGoals,
fetchShadowPkgs :: Flag ShadowPkgs,
fetchStrongFlags :: Flag StrongFlags,
fetchAllowBootLibInstalls :: Flag AllowBootLibInstalls,
fetchVerbosity :: Flag Verbosity
}
defaultFetchFlags :: FetchFlags
defaultFetchFlags = FetchFlags {
-- fetchOutput = mempty,
fetchDeps = toFlag True,
fetchDryRun = toFlag False,
fetchSolver = Flag defaultSolver,
fetchMaxBackjumps = Flag defaultMaxBackjumps,
fetchReorderGoals = Flag (ReorderGoals False),
fetchCountConflicts = Flag (CountConflicts True),
fetchIndependentGoals = Flag (IndependentGoals False),
fetchShadowPkgs = Flag (ShadowPkgs False),
fetchStrongFlags = Flag (StrongFlags False),
fetchAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
fetchVerbosity = toFlag normal
}
fetchCommand :: CommandUI FetchFlags
fetchCommand = CommandUI {
commandName = "fetch",
commandSynopsis = "Downloads packages for later installation.",
commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ ->
"Note that it currently is not possible to fetch the dependencies for a\n"
++ "package in the current directory.\n",
commandNotes = Nothing,
commandDefaultFlags = defaultFetchFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v })
-- , option "o" ["output"]
-- "Put the package(s) somewhere specific rather than the usual cache."
-- fetchOutput (\v flags -> flags { fetchOutput = v })
-- (reqArgFlag "PATH")
, option [] ["dependencies", "deps"]
"Resolve and fetch dependencies (default)"
fetchDeps (\v flags -> flags { fetchDeps = v })
trueArg
, option [] ["no-dependencies", "no-deps"]
"Ignore dependencies"
fetchDeps (\v flags -> flags { fetchDeps = v })
falseArg
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
fetchDryRun (\v flags -> flags { fetchDryRun = v })
trueArg
] ++
optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) :
optionSolverFlags showOrParseArgs
fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v })
fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v })
fetchCountConflicts (\v flags -> flags { fetchCountConflicts = v })
fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v })
fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v })
fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v })
fetchAllowBootLibInstalls (\v flags -> flags { fetchAllowBootLibInstalls = v })
}
-- ------------------------------------------------------------
-- * Freeze command
-- ------------------------------------------------------------
data FreezeFlags = FreezeFlags {
freezeDryRun :: Flag Bool,
freezeTests :: Flag Bool,
freezeBenchmarks :: Flag Bool,
freezeSolver :: Flag PreSolver,
freezeMaxBackjumps :: Flag Int,
freezeReorderGoals :: Flag ReorderGoals,
freezeCountConflicts :: Flag CountConflicts,
freezeIndependentGoals :: Flag IndependentGoals,
freezeShadowPkgs :: Flag ShadowPkgs,
freezeStrongFlags :: Flag StrongFlags,
freezeAllowBootLibInstalls :: Flag AllowBootLibInstalls,
freezeVerbosity :: Flag Verbosity
}
defaultFreezeFlags :: FreezeFlags
defaultFreezeFlags = FreezeFlags {
freezeDryRun = toFlag False,
freezeTests = toFlag False,
freezeBenchmarks = toFlag False,
freezeSolver = Flag defaultSolver,
freezeMaxBackjumps = Flag defaultMaxBackjumps,
freezeReorderGoals = Flag (ReorderGoals False),
freezeCountConflicts = Flag (CountConflicts True),
freezeIndependentGoals = Flag (IndependentGoals False),
freezeShadowPkgs = Flag (ShadowPkgs False),
freezeStrongFlags = Flag (StrongFlags False),
freezeAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
freezeVerbosity = toFlag normal
}
freezeCommand :: CommandUI FreezeFlags
freezeCommand = CommandUI {
commandName = "freeze",
commandSynopsis = "Freeze dependencies.",
commandDescription = Just $ \_ -> wrapText $
"Calculates a valid set of dependencies and their exact versions. "
++ "If successful, saves the result to the file `cabal.config`.\n"
++ "\n"
++ "The package versions specified in `cabal.config` will be used for "
++ "any future installs.\n"
++ "\n"
++ "An existing `cabal.config` is ignored and overwritten.\n",
commandNotes = Nothing,
commandUsage = usageFlags "freeze",
commandDefaultFlags = defaultFreezeFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity freezeVerbosity
(\v flags -> flags { freezeVerbosity = v })
, option [] ["dry-run"]
"Do not freeze anything, only print what would be frozen"
freezeDryRun (\v flags -> flags { freezeDryRun = v })
trueArg
, option [] ["tests"]
("freezing of the dependencies of any tests suites "
++ "in the package description file.")
freezeTests (\v flags -> flags { freezeTests = v })
(boolOpt [] [])
, option [] ["benchmarks"]
("freezing of the dependencies of any benchmarks suites "
++ "in the package description file.")
freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v })
(boolOpt [] [])
] ++
optionSolver
freezeSolver (\v flags -> flags { freezeSolver = v }):
optionSolverFlags showOrParseArgs
freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v })
freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v })
freezeCountConflicts (\v flags -> flags { freezeCountConflicts = v })
freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v })
freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v })
freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v })
freezeAllowBootLibInstalls (\v flags -> flags { freezeAllowBootLibInstalls = v })
}
-- ------------------------------------------------------------
-- * 'gen-bounds' command
-- ------------------------------------------------------------
genBoundsCommand :: CommandUI FreezeFlags
genBoundsCommand = CommandUI {
commandName = "gen-bounds",
commandSynopsis = "Generate dependency bounds.",
commandDescription = Just $ \_ -> wrapText $
"Generates bounds for all dependencies that do not currently have them. "
++ "Generated bounds are printed to stdout. "
++ "You can then paste them into your .cabal file.\n"
++ "\n",
commandNotes = Nothing,
commandUsage = usageFlags "gen-bounds",
commandDefaultFlags = defaultFreezeFlags,
commandOptions = \ _ -> [
optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v })
]
}
-- ------------------------------------------------------------
-- * 'outdated' command
-- ------------------------------------------------------------
data IgnoreMajorVersionBumps = IgnoreMajorVersionBumpsNone
| IgnoreMajorVersionBumpsAll
| IgnoreMajorVersionBumpsSome [PackageName]
instance Monoid IgnoreMajorVersionBumps where
mempty = IgnoreMajorVersionBumpsNone
mappend = (<>)
instance Semigroup IgnoreMajorVersionBumps where
IgnoreMajorVersionBumpsNone <> r = r
l@IgnoreMajorVersionBumpsAll <> _ = l
l@(IgnoreMajorVersionBumpsSome _) <> IgnoreMajorVersionBumpsNone = l
(IgnoreMajorVersionBumpsSome _) <> r@IgnoreMajorVersionBumpsAll = r
(IgnoreMajorVersionBumpsSome a) <> (IgnoreMajorVersionBumpsSome b) =
IgnoreMajorVersionBumpsSome (a ++ b)
data OutdatedFlags = OutdatedFlags {
outdatedVerbosity :: Flag Verbosity,
outdatedFreezeFile :: Flag Bool,
outdatedNewFreezeFile :: Flag Bool,
outdatedSimpleOutput :: Flag Bool,
outdatedExitCode :: Flag Bool,
outdatedQuiet :: Flag Bool,
outdatedIgnore :: [PackageName],
outdatedMinor :: Maybe IgnoreMajorVersionBumps
}
defaultOutdatedFlags :: OutdatedFlags
defaultOutdatedFlags = OutdatedFlags {
outdatedVerbosity = toFlag normal,
outdatedFreezeFile = mempty,
outdatedNewFreezeFile = mempty,
outdatedSimpleOutput = mempty,
outdatedExitCode = mempty,
outdatedQuiet = mempty,
outdatedIgnore = mempty,
outdatedMinor = mempty
}
outdatedCommand :: CommandUI OutdatedFlags
outdatedCommand = CommandUI {
commandName = "outdated",
commandSynopsis = "Check for outdated dependencies",
commandDescription = Just $ \_ -> wrapText $
"Checks for outdated dependencies in the package description file "
++ "or freeze file",
commandNotes = Nothing,
commandUsage = usageFlags "outdated",
commandDefaultFlags = defaultOutdatedFlags,
commandOptions = \ _ -> [
optionVerbosity outdatedVerbosity
(\v flags -> flags { outdatedVerbosity = v })
,option [] ["freeze-file"]
"Act on the freeze file"
outdatedFreezeFile (\v flags -> flags { outdatedFreezeFile = v })
trueArg
,option [] ["new-freeze-file"]
"Act on the new-style freeze file"
outdatedNewFreezeFile (\v flags -> flags { outdatedNewFreezeFile = v })
trueArg
,option [] ["simple-output"]
"Only print names of outdated dependencies, one per line"
outdatedSimpleOutput (\v flags -> flags { outdatedSimpleOutput = v })
trueArg
,option [] ["exit-code"]
"Exit with non-zero when there are outdated dependencies"
outdatedExitCode (\v flags -> flags { outdatedExitCode = v })
trueArg
,option ['q'] ["quiet"]
"Don't print any output. Implies '--exit-code' and '-v0'"
outdatedQuiet (\v flags -> flags { outdatedQuiet = v })
trueArg
,option [] ["ignore"]
"Packages to ignore"
outdatedIgnore (\v flags -> flags { outdatedIgnore = v })
(reqArg "PKGS" pkgNameListParser (map display))
,option [] ["minor"]
"Ignore major version bumps for these packages"
outdatedMinor (\v flags -> flags { outdatedMinor = v })
(optArg "PKGS" ignoreMajorVersionBumpsParser
(Just IgnoreMajorVersionBumpsAll) ignoreMajorVersionBumpsPrinter)
]
}
where
ignoreMajorVersionBumpsPrinter :: (Maybe IgnoreMajorVersionBumps)
-> [Maybe String]
ignoreMajorVersionBumpsPrinter Nothing = []
ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsNone)= []
ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsAll) = [Nothing]
ignoreMajorVersionBumpsPrinter (Just (IgnoreMajorVersionBumpsSome pkgs)) =
map (Just . display) $ pkgs
ignoreMajorVersionBumpsParser =
(Just . IgnoreMajorVersionBumpsSome) `fmap` pkgNameListParser
pkgNameListParser = readP_to_E
("Couldn't parse the list of package names: " ++)
(Parse.sepBy1 parse (Parse.char ','))
-- ------------------------------------------------------------
-- * Update command
-- ------------------------------------------------------------
data UpdateFlags
= UpdateFlags {
updateVerbosity :: Flag Verbosity,
updateIndexState :: Flag IndexState
} deriving Generic
defaultUpdateFlags :: UpdateFlags
defaultUpdateFlags
= UpdateFlags {
updateVerbosity = toFlag normal,
updateIndexState = toFlag IndexStateHead
}
updateCommand :: CommandUI UpdateFlags
updateCommand = CommandUI {
commandName = "update",
commandSynopsis = "Updates list of known packages.",
commandDescription = Just $ \_ ->
"For all known remote repositories, download the package list.\n",
commandNotes = Just $ \_ ->
relevantConfigValuesText ["remote-repo"
,"remote-repo-cache"
,"local-repo"],
commandUsage = usageFlags "update",
commandDefaultFlags = defaultUpdateFlags,
commandOptions = \_ -> [
optionVerbosity updateVerbosity (\v flags -> flags { updateVerbosity = v }),
option [] ["index-state"]
("Update the source package index to its state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').")
updateIndexState (\v flags -> flags { updateIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
]
}
-- ------------------------------------------------------------
-- * Other commands
-- ------------------------------------------------------------
upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
upgradeCommand = configureCommand {
commandName = "upgrade",
commandSynopsis = "(command disabled, use install instead)",
commandDescription = Nothing,
commandUsage = usageFlagsOrPackages "upgrade",
commandDefaultFlags = (commandDefaultFlags configureCommand,
defaultConfigExFlags,
defaultInstallFlags,
Cabal.defaultHaddockFlags),
commandOptions = commandOptions installCommand
}
{-
cleanCommand :: CommandUI ()
cleanCommand = makeCommand name shortDesc longDesc emptyFlags options
where
name = "clean"
shortDesc = "Removes downloaded files"
longDesc = Nothing
emptyFlags = ()
options _ = []
-}
checkCommand :: CommandUI (Flag Verbosity)
checkCommand = CommandUI {
commandName = "check",
commandSynopsis = "Check the package for common mistakes.",
commandDescription = Just $ \_ -> wrapText $
"Expects a .cabal package file in the current directory.\n"
++ "\n"
++ "The checks correspond to the requirements to packages on Hackage. "
++ "If no errors and warnings are reported, Hackage will accept this "
++ "package.\n",
commandNotes = Nothing,
commandUsage = \pname -> "Usage: " ++ pname ++ " check\n",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
formatCommand :: CommandUI (Flag Verbosity)
formatCommand = CommandUI {
commandName = "format",
commandSynopsis = "Reformat the .cabal file using the standard style.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "format" ["[FILE]"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
uninstallCommand :: CommandUI (Flag Verbosity)
uninstallCommand = CommandUI {
commandName = "uninstall",
commandSynopsis = "Warn about 'uninstall' not being implemented.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "uninstall" ["PACKAGES"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
manpageCommand :: CommandUI (Flag Verbosity)
manpageCommand = CommandUI {
commandName = "manpage",
commandSynopsis = "Outputs manpage source.",
commandDescription = Just $ \_ ->
"Output manpage source to STDOUT.\n",
commandNotes = Nothing,
commandUsage = usageFlags "manpage",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> [optionVerbosity id const]
}
runCommand :: CommandUI (BuildFlags, BuildExFlags)
runCommand = CommandUI {
commandName = "run",
commandSynopsis = "Builds and runs an executable.",
commandDescription = Just $ \pname -> wrapText $
"Builds and then runs the specified executable. If no executable is "
++ "specified, but the package contains just one executable, that one "
++ "is built and executed.\n"
++ "\n"
++ "Use `" ++ pname ++ " test --show-details=streaming` to run a "
++ "test-suite and get its full output.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " run\n"
++ " Run the only executable in the current package;\n"
++ " " ++ pname ++ " run foo -- --fooflag\n"
++ " Works similar to `./foo --fooflag`.\n",
commandUsage = usageAlternatives "run"
["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"],
commandDefaultFlags = mempty,
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd
(buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramDb
-- ------------------------------------------------------------
-- * Report flags
-- ------------------------------------------------------------
data ReportFlags = ReportFlags {
reportUsername :: Flag Username,
reportPassword :: Flag Password,
reportVerbosity :: Flag Verbosity
} deriving Generic
defaultReportFlags :: ReportFlags
defaultReportFlags = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = toFlag normal
}
reportCommand :: CommandUI ReportFlags
reportCommand = CommandUI {
commandName = "report",
commandSynopsis = "Upload build reports to a remote server.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = usageAlternatives "report" ["[FLAGS]"],
commandDefaultFlags = defaultReportFlags,
commandOptions = \_ ->
[optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v })
,option ['u'] ["username"]
"Hackage username."
reportUsername (\v flags -> flags { reportUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
reportPassword (\v flags -> flags { reportPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid ReportFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ReportFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Get flags
-- ------------------------------------------------------------
data GetFlags = GetFlags {
getDestDir :: Flag FilePath,
getPristine :: Flag Bool,
getIndexState :: Flag IndexState,
getSourceRepository :: Flag (Maybe RepoKind),
getVerbosity :: Flag Verbosity
} deriving Generic
defaultGetFlags :: GetFlags
defaultGetFlags = GetFlags {
getDestDir = mempty,
getPristine = mempty,
getIndexState = mempty,
getSourceRepository = mempty,
getVerbosity = toFlag normal
}
getCommand :: CommandUI GetFlags
getCommand = CommandUI {
commandName = "get",
commandSynopsis = "Download/Extract a package's source code (repository).",
commandDescription = Just $ \_ -> wrapText $
"Creates a local copy of a package's source code. By default it gets "
++ "the source\ntarball and unpacks it in a local subdirectory. "
++ "Alternatively, with -s it will\nget the code from the source "
++ "repository specified by the package.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " get hlint\n"
++ " Download the latest stable version of hlint;\n"
++ " " ++ pname ++ " get lens --source-repository=head\n"
++ " Download the source repository (i.e. git clone from github).\n",
commandUsage = usagePackages "get",
commandDefaultFlags = defaultGetFlags,
commandOptions = \_ -> [
optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v })
,option "d" ["destdir"]
"Where to place the package source, defaults to the current directory."
getDestDir (\v flags -> flags { getDestDir = v })
(reqArgFlag "PATH")
,option "s" ["source-repository"]
"Copy the package's source repository (ie git clone, darcs get, etc as appropriate)."
getSourceRepository (\v flags -> flags { getSourceRepository = v })
(optArg "[head|this|...]" (readP_to_E (const "invalid source-repository")
(fmap (toFlag . Just) parse))
(Flag Nothing)
(map (fmap show) . flagToList))
, option [] ["index-state"]
("Use source package index state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD'). " ++
"This determines which package versions are available as well as " ++
".cabal file revision is selected (unless --pristine is used).")
getIndexState (\v flags -> flags { getIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["pristine"]
("Unpack the original pristine tarball, rather than updating the "
++ ".cabal file with the latest revision from the package archive.")
getPristine (\v flags -> flags { getPristine = v })
trueArg
]
}
-- 'cabal unpack' is a deprecated alias for 'cabal get'.
unpackCommand :: CommandUI GetFlags
unpackCommand = getCommand {
commandName = "unpack",
commandUsage = usagePackages "unpack"
}
instance Monoid GetFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup GetFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * List flags
-- ------------------------------------------------------------
data ListFlags = ListFlags {
listInstalled :: Flag Bool,
listSimpleOutput :: Flag Bool,
listVerbosity :: Flag Verbosity,
listPackageDBs :: [Maybe PackageDB]
} deriving Generic
defaultListFlags :: ListFlags
defaultListFlags = ListFlags {
listInstalled = Flag False,
listSimpleOutput = Flag False,
listVerbosity = toFlag normal,
listPackageDBs = []
}
listCommand :: CommandUI ListFlags
listCommand = CommandUI {
commandName = "list",
commandSynopsis = "List packages matching a search string.",
commandDescription = Just $ \_ -> wrapText $
"List all packages, or all packages matching one of the search"
++ " strings.\n"
++ "\n"
++ "If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " list pandoc\n"
++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n",
commandUsage = usageAlternatives "list" [ "[FLAGS]"
, "[FLAGS] STRINGS"],
commandDefaultFlags = defaultListFlags,
commandOptions = \_ -> [
optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v })
, option [] ["installed"]
"Only print installed packages"
listInstalled (\v flags -> flags { listInstalled = v })
trueArg
, option [] ["simple-output"]
"Print in a easy-to-parse format"
listSimpleOutput (\v flags -> flags { listSimpleOutput = v })
trueArg
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
listPackageDBs (\v flags -> flags { listPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid ListFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ListFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Info flags
-- ------------------------------------------------------------
data InfoFlags = InfoFlags {
infoVerbosity :: Flag Verbosity,
infoPackageDBs :: [Maybe PackageDB]
} deriving Generic
defaultInfoFlags :: InfoFlags
defaultInfoFlags = InfoFlags {
infoVerbosity = toFlag normal,
infoPackageDBs = []
}
infoCommand :: CommandUI InfoFlags
infoCommand = CommandUI {
commandName = "info",
commandSynopsis = "Display detailed information about a particular package.",
commandDescription = Just $ \_ -> wrapText $
"If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Nothing,
commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"],
commandDefaultFlags = defaultInfoFlags,
commandOptions = \_ -> [
optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v })
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
infoPackageDBs (\v flags -> flags { infoPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid InfoFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup InfoFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Install takes the same flags as configure along with a few extras.
--
data InstallFlags = InstallFlags {
installDocumentation :: Flag Bool,
installHaddockIndex :: Flag PathTemplate,
installDryRun :: Flag Bool,
installMaxBackjumps :: Flag Int,
installReorderGoals :: Flag ReorderGoals,
installCountConflicts :: Flag CountConflicts,
installIndependentGoals :: Flag IndependentGoals,
installShadowPkgs :: Flag ShadowPkgs,
installStrongFlags :: Flag StrongFlags,
installAllowBootLibInstalls :: Flag AllowBootLibInstalls,
installReinstall :: Flag Bool,
installAvoidReinstalls :: Flag AvoidReinstalls,
installOverrideReinstall :: Flag Bool,
installUpgradeDeps :: Flag Bool,
installOnly :: Flag Bool,
installOnlyDeps :: Flag Bool,
installIndexState :: Flag IndexState,
installRootCmd :: Flag String,
installSummaryFile :: NubList PathTemplate,
installLogFile :: Flag PathTemplate,
installBuildReports :: Flag ReportLevel,
installReportPlanningFailure :: Flag Bool,
installSymlinkBinDir :: Flag FilePath,
installPerComponent :: Flag Bool,
installOneShot :: Flag Bool,
installNumJobs :: Flag (Maybe Int),
installKeepGoing :: Flag Bool,
installRunTests :: Flag Bool,
installOfflineMode :: Flag Bool,
-- | The cabal project file name; defaults to @cabal.project@.
-- Th name itself denotes the cabal project file name, but it also
-- is the base of auxiliary project files, such as
-- @cabal.project.local@ and @cabal.project.freeze@ which are also
-- read and written out in some cases. If the path is not found
-- in the current working directory, we will successively probe
-- relative to parent directories until this name is found.
installProjectFileName :: Flag FilePath
}
deriving (Eq, Generic)
instance Binary InstallFlags
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installDocumentation = Flag False,
installHaddockIndex = Flag docIndexFile,
installDryRun = Flag False,
installMaxBackjumps = Flag defaultMaxBackjumps,
installReorderGoals = Flag (ReorderGoals False),
installCountConflicts = Flag (CountConflicts True),
installIndependentGoals= Flag (IndependentGoals False),
installShadowPkgs = Flag (ShadowPkgs False),
installStrongFlags = Flag (StrongFlags False),
installAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
installReinstall = Flag False,
installAvoidReinstalls = Flag (AvoidReinstalls False),
installOverrideReinstall = Flag False,
installUpgradeDeps = Flag False,
installOnly = Flag False,
installOnlyDeps = Flag False,
installIndexState = mempty,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = Flag NoReports,
installReportPlanningFailure = Flag False,
installSymlinkBinDir = mempty,
installPerComponent = Flag True,
installOneShot = Flag False,
installNumJobs = mempty,
installKeepGoing = Flag False,
installRunTests = mempty,
installOfflineMode = Flag False,
installProjectFileName = mempty
}
where
docIndexFile = toPathTemplate ("$datadir" </> "doc"
</> "$arch-$os-$compiler" </> "index.html")
defaultMaxBackjumps :: Int
defaultMaxBackjumps = 2000
defaultSolver :: PreSolver
defaultSolver = AlwaysModular
allSolvers :: String
allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver]))
installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
installCommand = CommandUI {
commandName = "install",
commandSynopsis = "Install packages.",
commandUsage = usageAlternatives "install" [ "[FLAGS]"
, "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ -> wrapText $
"Installs one or more packages. By default, the installed package"
++ " will be registered in the user's package database or, if a sandbox"
++ " is present in the current directory, inside the sandbox.\n"
++ "\n"
++ "If PACKAGES are specified, downloads and installs those packages."
++ " Otherwise, install the package in the current directory (and/or its"
++ " dependencies) (there must be exactly one .cabal file in the current"
++ " directory).\n"
++ "\n"
++ "When using a sandbox, the flags for `install` only affect the"
++ " current command and have no effect on future commands. (To achieve"
++ " that, `configure` must be used.)\n"
++ " In contrast, without a sandbox, the flags to `install` are saved and"
++ " affect future commands such as `build` and `repl`. See the help for"
++ " `configure` for a list of commands being affected.\n"
++ "\n"
++ "Installed executables will by default (and without a sandbox)"
++ " be put into `~/.cabal/bin/`."
++ " If you want installed executable to be available globally, make"
++ " sure that the PATH environment variable contains that directory.\n"
++ "When using a sandbox, executables will be put into"
++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n"
++ "\n"
++ "When specifying --bindir, consider also specifying --datadir;"
++ " this way the sandbox can be deleted and the executable should"
++ " continue working as long as bindir and datadir are left untouched.",
commandNotes = Just $ \pname ->
( case commandNotes
$ Cabal.configureCommand defaultProgramDb
of Just desc -> desc pname ++ "\n"
Nothing -> ""
)
++ "Examples:\n"
++ " " ++ pname ++ " install "
++ " Package in the current directory\n"
++ " " ++ pname ++ " install foo "
++ " Package from the hackage server\n"
++ " " ++ pname ++ " install foo-1.0 "
++ " Specific version of a package\n"
++ " " ++ pname ++ " install 'foo < 2' "
++ " Constrained package version\n"
++ " " ++ pname ++ " install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n"
++ " " ++ (map (const ' ') pname)
++ " "
++ " Change installation destination\n",
commandDefaultFlags = (commandDefaultFlags configureCommand,
defaultConfigExFlags,
defaultInstallFlags,
Cabal.defaultHaddockFlags),
commandOptions = \showOrParseArgs ->
liftOptions get1 set1
(filter ((`notElem` ["constraint", "dependency"
, "exact-configuration"])
. optionName) $
configureOptions showOrParseArgs)
++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag)
++ liftOptions get3 set3 (installOptions showOrParseArgs)
++ liftOptions get4 set4 (haddockOptions showOrParseArgs)
}
where
get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d)
get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d)
get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d)
get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d)
haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags]
haddockOptions showOrParseArgs
= [ opt { optionName = "haddock-" ++ name,
optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr
| descr <- optionDescr opt] }
| opt <- commandOptions Cabal.haddockCommand showOrParseArgs
, let name = optionName opt
, name `elem` ["hoogle", "html", "html-location"
,"executables", "tests", "benchmarks", "all", "internal", "css"
,"hyperlink-source", "hscolour-css"
,"contents-location", "for-hackage"]
]
where
fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a
fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w
fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w
fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs]
fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w
installOptions :: ShowOrParseArgs -> [OptionField InstallFlags]
installOptions showOrParseArgs =
[ option "" ["documentation"]
"building of documentation"
installDocumentation (\v flags -> flags { installDocumentation = v })
(boolOpt [] [])
, option [] ["doc-index-file"]
"A central index of haddock API documentation (template cannot use $pkgid)"
installHaddockIndex (\v flags -> flags { installHaddockIndex = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
installDryRun (\v flags -> flags { installDryRun = v })
trueArg
] ++
optionSolverFlags showOrParseArgs
installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v })
installReorderGoals (\v flags -> flags { installReorderGoals = v })
installCountConflicts (\v flags -> flags { installCountConflicts = v })
installIndependentGoals (\v flags -> flags { installIndependentGoals = v })
installShadowPkgs (\v flags -> flags { installShadowPkgs = v })
installStrongFlags (\v flags -> flags { installStrongFlags = v })
installAllowBootLibInstalls (\v flags -> flags { installAllowBootLibInstalls = v }) ++
[ option [] ["reinstall"]
"Install even if it means installing the same version again."
installReinstall (\v flags -> flags { installReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["avoid-reinstalls"]
"Do not select versions that would destructively overwrite installed packages."
(fmap asBool . installAvoidReinstalls)
(\v flags -> flags { installAvoidReinstalls = fmap AvoidReinstalls v })
(yesNoOpt showOrParseArgs)
, option [] ["force-reinstalls"]
"Reinstall packages even if they will most likely break other installed packages."
installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["upgrade-dependencies"]
"Pick the latest version for all dependencies, rather than trying to pick an installed version."
installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["only-dependencies"]
"Install only the dependencies necessary to build the given packages"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["dependencies-only"]
"A synonym for --only-dependencies"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["index-state"]
("Use source package index state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').")
installIndexState (\v flags -> flags { installIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["root-cmd"]
"(No longer supported, do not use.)"
installRootCmd (\v flags -> flags { installRootCmd = v })
(reqArg' "COMMAND" toFlag flagToList)
, option [] ["symlink-bindir"]
"Add symlinks to installed executables into this directory."
installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v })
(reqArgFlag "DIR")
, option [] ["build-summary"]
"Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)"
installSummaryFile (\v flags -> flags { installSummaryFile = v })
(reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList))
, option [] ["build-log"]
"Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)"
installLogFile (\v flags -> flags { installLogFile = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["remote-build-reporting"]
"Generate build reports to send to a remote server (none, anonymous or detailed)."
installBuildReports (\v flags -> flags { installBuildReports = v })
(reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', "
++ "'anonymous' or 'detailed'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["report-planning-failure"]
"Generate build reports when the dependency solver fails. This is used by the Hackage build bot."
installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v })
trueArg
, option "" ["per-component"]
"Per-component builds when possible"
installPerComponent (\v flags -> flags { installPerComponent = v })
(boolOpt [] [])
, option [] ["one-shot"]
"Do not record the packages in the world file."
installOneShot (\v flags -> flags { installOneShot = v })
(yesNoOpt showOrParseArgs)
, option [] ["run-tests"]
"Run package test suites during installation."
installRunTests (\v flags -> flags { installRunTests = v })
trueArg
, optionNumJobs
installNumJobs (\v flags -> flags { installNumJobs = v })
, option [] ["keep-going"]
"After a build failure, continue to build other unaffected packages."
installKeepGoing (\v flags -> flags { installKeepGoing = v })
trueArg
, option [] ["offline"]
"Don't download packages from the Internet."
installOfflineMode (\v flags -> flags { installOfflineMode = v })
(yesNoOpt showOrParseArgs)
, option [] ["project-file"]
"Set the name of the cabal.project file to search for in parent directories"
installProjectFileName (\v flags -> flags {installProjectFileName = v})
(reqArgFlag "FILE")
] ++ case showOrParseArgs of -- TODO: remove when "cabal install"
-- avoids
ParseArgs ->
[ option [] ["only"]
"Only installs the package in the current directory."
installOnly (\v flags -> flags { installOnly = v })
trueArg ]
_ -> []
instance Monoid InstallFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup InstallFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Upload flags
-- ------------------------------------------------------------
-- | Is this a candidate package or a package to be published?
data IsCandidate = IsCandidate | IsPublished
deriving Eq
data UploadFlags = UploadFlags {
uploadCandidate :: Flag IsCandidate,
uploadDoc :: Flag Bool,
uploadUsername :: Flag Username,
uploadPassword :: Flag Password,
uploadPasswordCmd :: Flag [String],
uploadVerbosity :: Flag Verbosity
} deriving Generic
defaultUploadFlags :: UploadFlags
defaultUploadFlags = UploadFlags {
uploadCandidate = toFlag IsCandidate,
uploadDoc = toFlag False,
uploadUsername = mempty,
uploadPassword = mempty,
uploadPasswordCmd = mempty,
uploadVerbosity = toFlag normal
}
uploadCommand :: CommandUI UploadFlags
uploadCommand = CommandUI {
commandName = "upload",
commandSynopsis = "Uploads source packages or documentation to Hackage.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n"
++ relevantConfigValuesText ["username", "password"],
commandUsage = \pname ->
"Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n",
commandDefaultFlags = defaultUploadFlags,
commandOptions = \_ ->
[optionVerbosity uploadVerbosity
(\v flags -> flags { uploadVerbosity = v })
,option [] ["publish"]
"Publish the package instead of uploading it as a candidate."
uploadCandidate (\v flags -> flags { uploadCandidate = v })
(noArg (Flag IsPublished))
,option ['d'] ["documentation"]
("Upload documentation instead of a source package. "
++ "By default, this uploads documentation for a package candidate. "
++ "To upload documentation for "
++ "a published package, combine with --publish.")
uploadDoc (\v flags -> flags { uploadDoc = v })
trueArg
,option ['u'] ["username"]
"Hackage username."
uploadUsername (\v flags -> flags { uploadUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
uploadPassword (\v flags -> flags { uploadPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
,option ['P'] ["password-command"]
"Command to get Hackage password."
uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v })
(reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe))
]
}
instance Monoid UploadFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup UploadFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Init flags
-- ------------------------------------------------------------
emptyInitFlags :: IT.InitFlags
emptyInitFlags = mempty
defaultInitFlags :: IT.InitFlags
defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal }
initCommand :: CommandUI IT.InitFlags
initCommand = CommandUI {
commandName = "init",
commandSynopsis = "Create a new .cabal package file (interactively).",
commandDescription = Just $ \_ -> wrapText $
"Cabalise a project by creating a .cabal, Setup.hs, and "
++ "optionally a LICENSE file.\n"
++ "\n"
++ "Calling init with no arguments (recommended) uses an "
++ "interactive mode, which will try to guess as much as "
++ "possible and prompt you for the rest. Command-line "
++ "arguments are provided for scripting purposes. "
++ "If you don't want interactive mode, be sure to pass "
++ "the -n flag.\n",
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " init [FLAGS]\n",
commandDefaultFlags = defaultInitFlags,
commandOptions = \_ ->
[ option ['n'] ["non-interactive"]
"Non-interactive mode."
IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v })
trueArg
, option ['q'] ["quiet"]
"Do not generate log messages to stdout."
IT.quiet (\v flags -> flags { IT.quiet = v })
trueArg
, option [] ["no-comments"]
"Do not generate explanatory comments in the .cabal file."
IT.noComments (\v flags -> flags { IT.noComments = v })
trueArg
, option ['m'] ["minimal"]
"Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments."
IT.minimal (\v flags -> flags { IT.minimal = v })
trueArg
, option [] ["overwrite"]
"Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning."
IT.overwrite (\v flags -> flags { IT.overwrite = v })
trueArg
, option [] ["package-dir"]
"Root directory of the package (default = current directory)."
IT.packageDir (\v flags -> flags { IT.packageDir = v })
(reqArgFlag "DIRECTORY")
, option ['p'] ["package-name"]
"Name of the Cabal package to create."
IT.packageName (\v flags -> flags { IT.packageName = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["version"]
"Initial version of the package."
IT.version (\v flags -> flags { IT.version = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["cabal-version"]
"Required version of the Cabal library."
IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v })
(reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['l'] ["license"]
"Project license."
IT.license (\v flags -> flags { IT.license = v })
(reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['a'] ["author"]
"Name of the project's author."
IT.author (\v flags -> flags { IT.author = v })
(reqArgFlag "NAME")
, option ['e'] ["email"]
"Email address of the maintainer."
IT.email (\v flags -> flags { IT.email = v })
(reqArgFlag "EMAIL")
, option ['u'] ["homepage"]
"Project homepage and/or repository."
IT.homepage (\v flags -> flags { IT.homepage = v })
(reqArgFlag "URL")
, option ['s'] ["synopsis"]
"Short project synopsis."
IT.synopsis (\v flags -> flags { IT.synopsis = v })
(reqArgFlag "TEXT")
, option ['c'] ["category"]
"Project category."
IT.category (\v flags -> flags { IT.category = v })
(reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s))
(flagToList . fmap (either id show)))
, option ['x'] ["extra-source-file"]
"Extra source file to be distributed with tarball."
IT.extraSrc (\v flags -> flags { IT.extraSrc = v })
(reqArg' "FILE" (Just . (:[]))
(fromMaybe []))
, option [] ["is-library"]
"Build a library."
IT.packageType (\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Library))
, option [] ["is-executable"]
"Build an executable."
IT.packageType
(\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Executable))
, option [] ["main-is"]
"Specify the main module."
IT.mainIs
(\v flags -> flags { IT.mainIs = v })
(reqArgFlag "FILE")
, option [] ["language"]
"Specify the default language."
IT.language
(\v flags -> flags { IT.language = v })
(reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['o'] ["expose-module"]
"Export a module from the package."
IT.exposedModules
(\v flags -> flags { IT.exposedModules = v })
(reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["extension"]
"Use a LANGUAGE extension (in the other-extensions field)."
IT.otherExts
(\v flags -> flags { IT.otherExts = v })
(reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option ['d'] ["dependency"]
"Package dependency."
IT.dependencies (\v flags -> flags { IT.dependencies = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["source-dir"]
"Directory containing package source."
IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v })
(reqArg' "DIR" (Just . (:[]))
(fromMaybe []))
, option [] ["build-tool"]
"Required external build tool."
IT.buildTools (\v flags -> flags { IT.buildTools = v })
(reqArg' "TOOL" (Just . (:[]))
(fromMaybe []))
, optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v })
]
}
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Extra flags to @sdist@ beyond runghc Setup sdist
--
data SDistExFlags = SDistExFlags {
sDistFormat :: Flag ArchiveFormat
}
deriving (Show, Generic)
data ArchiveFormat = TargzFormat | ZipFormat -- ...
deriving (Show, Eq)
defaultSDistExFlags :: SDistExFlags
defaultSDistExFlags = SDistExFlags {
sDistFormat = Flag TargzFormat
}
sdistCommand :: CommandUI (SDistFlags, SDistExFlags)
sdistCommand = Cabal.sdistCommand {
commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs)
++ liftOptions snd setSnd sdistExOptions
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
sdistExOptions =
[option [] ["archive-format"] "archive-format"
sDistFormat (\v flags -> flags { sDistFormat = v })
(choiceOpt
[ (Flag TargzFormat, ([], ["targz"]),
"Produce a '.tar.gz' format archive (default and required for uploading to hackage)")
, (Flag ZipFormat, ([], ["zip"]),
"Produce a '.zip' format archive")
])
]
instance Monoid SDistExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup SDistExFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Win32SelfUpgrade flags
-- ------------------------------------------------------------
data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity :: Flag Verbosity
} deriving Generic
defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags
defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity = toFlag normal
}
win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags
win32SelfUpgradeCommand = CommandUI {
commandName = "win32selfupgrade",
commandSynopsis = "Self-upgrade the executable on Windows",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " win32selfupgrade PID PATH\n",
commandDefaultFlags = defaultWin32SelfUpgradeFlags,
commandOptions = \_ ->
[optionVerbosity win32SelfUpgradeVerbosity
(\v flags -> flags { win32SelfUpgradeVerbosity = v})
]
}
instance Monoid Win32SelfUpgradeFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup Win32SelfUpgradeFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * ActAsSetup flags
-- ------------------------------------------------------------
data ActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType :: Flag BuildType
} deriving Generic
defaultActAsSetupFlags :: ActAsSetupFlags
defaultActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType = toFlag Simple
}
actAsSetupCommand :: CommandUI ActAsSetupFlags
actAsSetupCommand = CommandUI {
commandName = "act-as-setup",
commandSynopsis = "Run as-if this was a Setup.hs",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " act-as-setup\n",
commandDefaultFlags = defaultActAsSetupFlags,
commandOptions = \_ ->
[option "" ["build-type"]
"Use the given build type."
actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v })
(reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++)
(fmap toFlag parse))
(map display . flagToList))
]
}
instance Monoid ActAsSetupFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ActAsSetupFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Sandbox-related flags
-- ------------------------------------------------------------
data SandboxFlags = SandboxFlags {
sandboxVerbosity :: Flag Verbosity,
sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only
-- flag.
sandboxLocation :: Flag FilePath
} deriving Generic
defaultSandboxLocation :: FilePath
defaultSandboxLocation = ".cabal-sandbox"
defaultSandboxFlags :: SandboxFlags
defaultSandboxFlags = SandboxFlags {
sandboxVerbosity = toFlag normal,
sandboxSnapshot = toFlag False,
sandboxLocation = toFlag defaultSandboxLocation
}
sandboxCommand :: CommandUI SandboxFlags
sandboxCommand = CommandUI {
commandName = "sandbox",
commandSynopsis = "Create/modify/delete a sandbox.",
commandDescription = Just $ \pname -> concat
[ paragraph $ "Sandboxes are isolated package databases that can be used"
++ " to prevent dependency conflicts that arise when many different"
++ " packages are installed in the same database (i.e. the user's"
++ " database in the home directory)."
, paragraph $ "A sandbox in the current directory (created by"
++ " `sandbox init`) will be used instead of the user's database for"
++ " commands such as `install` and `build`. Note that (a directly"
++ " invoked) GHC will not automatically be aware of sandboxes;"
++ " only if called via appropriate " ++ pname
++ " commands, e.g. `repl`, `build`, `exec`."
, paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox"
++ " in folders above the current one, so cabal will not see the sandbox"
++ " if you are in a subfolder of a sandbox."
, paragraph "Subcommands:"
, headLine "init:"
, indentParagraph $ "Initialize a sandbox in the current directory."
++ " An existing package database will not be modified, but settings"
++ " (such as the location of the database) can be modified this way."
, headLine "delete:"
, indentParagraph $ "Remove the sandbox; deleting all the packages"
++ " installed inside."
, headLine "add-source:"
, indentParagraph $ "Make one or more local packages available in the"
++ " sandbox. PATHS may be relative or absolute."
++ " Typical usecase is when you need"
++ " to make a (temporary) modification to a dependency: You download"
++ " the package into a different directory, make the modification,"
++ " and add that directory to the sandbox with `add-source`."
, indentParagraph $ "Unless given `--snapshot`, any add-source'd"
++ " dependency that was modified since the last build will be"
++ " re-installed automatically."
, headLine "delete-source:"
, indentParagraph $ "Remove an add-source dependency; however, this will"
++ " not delete the package(s) that have been installed in the sandbox"
++ " from this dependency. You can either unregister the package(s) via"
++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the"
++ " sandbox (`sandbox delete; sandbox init`)."
, headLine "list-sources:"
, indentParagraph $ "List the directories of local packages made"
++ " available via `" ++ pname ++ " add-source`."
, headLine "hc-pkg:"
, indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package"
++ " database. Can be used to list specific/all packages that are"
++ " installed in the sandbox. For subcommands, see the help for"
++ " ghc-pkg. Affected by the compiler version specified by `configure`."
],
commandNotes = Just $ \pname ->
relevantConfigValuesText ["require-sandbox"
,"ignore-sandbox"]
++ "\n"
++ "Examples:\n"
++ " Set up a sandbox with one local dependency, located at ../foo:\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " sandbox add-source ../foo\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " Reset the sandbox:\n"
++ " " ++ pname ++ " sandbox delete\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " List the packages in the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg list\n"
++ " Unregister the `broken` package from the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg -- --force unregister broken\n",
commandUsage = usageAlternatives "sandbox"
[ "init [FLAGS]"
, "delete [FLAGS]"
, "add-source [FLAGS] PATHS"
, "delete-source [FLAGS] PATHS"
, "list-sources [FLAGS]"
, "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]"
],
commandDefaultFlags = defaultSandboxFlags,
commandOptions = \_ ->
[ optionVerbosity sandboxVerbosity
(\v flags -> flags { sandboxVerbosity = v })
, option [] ["snapshot"]
"Take a snapshot instead of creating a link (only applies to 'add-source')"
sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v })
trueArg
, option [] ["sandbox"]
"Sandbox location (default: './.cabal-sandbox')."
sandboxLocation (\v flags -> flags { sandboxLocation = v })
(reqArgFlag "DIR")
]
}
instance Monoid SandboxFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup SandboxFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Exec Flags
-- ------------------------------------------------------------
data ExecFlags = ExecFlags {
execVerbosity :: Flag Verbosity,
execDistPref :: Flag FilePath
} deriving Generic
defaultExecFlags :: ExecFlags
defaultExecFlags = ExecFlags {
execVerbosity = toFlag normal,
execDistPref = NoFlag
}
execCommand :: CommandUI ExecFlags
execCommand = CommandUI {
commandName = "exec",
commandSynopsis = "Give a command access to the sandbox package repository.",
commandDescription = Just $ \pname -> wrapText $
-- TODO: this is too GHC-focused for my liking..
"A directly invoked GHC will not automatically be aware of any"
++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what"
++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:"
++ " COMMAND will be executed in a modified environment and thereby uses"
++ " the sandbox package database.\n"
++ "\n"
++ "If there is no sandbox, behaves as identity (executing COMMAND).\n"
++ "\n"
++ "Note that other " ++ pname ++ " commands change the environment"
++ " variable appropriately already, so there is no need to wrap those"
++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user"
++ " has more control and can, for example, execute custom scripts which"
++ " indirectly execute GHC.\n"
++ "\n"
++ "Note that `" ++ pname ++ " repl` is different from `" ++ pname
++ " exec -- ghci` as the latter will not forward any additional flags"
++ " being defined in the local package to ghci.\n"
++ "\n"
++ "See `" ++ pname ++ " sandbox`.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " exec -- ghci -Wall\n"
++ " Start a repl session with sandbox packages and all warnings;\n"
++ " " ++ pname ++ " exec gitit -- -f gitit.cnf\n"
++ " Give gitit access to the sandbox packages, and pass it a flag;\n"
++ " " ++ pname ++ " exec runghc Foo.hs\n"
++ " Execute runghc on Foo.hs with runghc configured to use the\n"
++ " sandbox package database (if a sandbox is being used).\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n",
commandDefaultFlags = defaultExecFlags,
commandOptions = \showOrParseArgs ->
[ optionVerbosity execVerbosity
(\v flags -> flags { execVerbosity = v })
, Cabal.optionDistPref
execDistPref (\d flags -> flags { execDistPref = d })
showOrParseArgs
]
}
instance Monoid ExecFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ExecFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * UserConfig flags
-- ------------------------------------------------------------
data UserConfigFlags = UserConfigFlags {
userConfigVerbosity :: Flag Verbosity,
userConfigForce :: Flag Bool
} deriving Generic
instance Monoid UserConfigFlags where
mempty = UserConfigFlags {
userConfigVerbosity = toFlag normal,
userConfigForce = toFlag False
}
mappend = (<>)
instance Semigroup UserConfigFlags where
(<>) = gmappend
userConfigCommand :: CommandUI UserConfigFlags
userConfigCommand = CommandUI {
commandName = "user-config",
commandSynopsis = "Display and update the user's global cabal configuration.",
commandDescription = Just $ \_ -> wrapText $
"When upgrading cabal, the set of configuration keys and their default"
++ " values may change. This command provides means to merge the existing"
++ " config in ~/.cabal/config"
++ " (i.e. all bindings that are actually defined and not commented out)"
++ " and the default config of the new version.\n"
++ "\n"
++ "init: Creates a new config file at either ~/.cabal/config or as"
++ " specified by --config-file, if given. An existing file won't be "
++ " overwritten unless -f or --force is given.\n"
++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and"
++ " the default configuration that would be created by cabal if the"
++ " config file did not exist.\n"
++ "update: Applies the pseudo-diff to the configuration that would be"
++ " created by default, and write the result back to ~/.cabal/config.",
commandNotes = Nothing,
commandUsage = usageAlternatives "user-config" ["init", "diff", "update"],
commandDefaultFlags = mempty,
commandOptions = \ _ -> [
optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v })
, option ['f'] ["force"]
"Overwrite the config file if it already exists."
userConfigForce (\v flags -> flags { userConfigForce = v })
trueArg
]
}
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
reqArgFlag :: ArgPlaceHolder ->
MkOptDescr (b -> Flag String) (Flag String -> b -> b) b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
liftOptions :: (b -> a) -> (a -> b -> b)
-> [OptionField a] -> [OptionField b]
liftOptions get set = map (liftOption get set)
yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b
yesNoOpt ShowArgs sf lf = trueArg sf lf
yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf
optionSolver :: (flags -> Flag PreSolver)
-> (Flag PreSolver -> flags -> flags)
-> OptionField flags
optionSolver get set =
option [] ["solver"]
("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".")
get set
(reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers)
(toFlag `fmap` parse))
(flagToList . fmap display))
optionSolverFlags :: ShowOrParseArgs
-> (flags -> Flag Int ) -> (Flag Int -> flags -> flags)
-> (flags -> Flag ReorderGoals) -> (Flag ReorderGoals -> flags -> flags)
-> (flags -> Flag CountConflicts) -> (Flag CountConflicts -> flags -> flags)
-> (flags -> Flag IndependentGoals) -> (Flag IndependentGoals -> flags -> flags)
-> (flags -> Flag ShadowPkgs) -> (Flag ShadowPkgs -> flags -> flags)
-> (flags -> Flag StrongFlags) -> (Flag StrongFlags -> flags -> flags)
-> (flags -> Flag AllowBootLibInstalls) -> (Flag AllowBootLibInstalls -> flags -> flags)
-> [OptionField flags]
optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg getcc setcc getig setig
getsip setsip getstrfl setstrfl getib setib =
[ option [] ["max-backjumps"]
("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.")
getmbj setmbj
(reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse))
(map show . flagToList))
, option [] ["reorder-goals"]
"Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages."
(fmap asBool . getrg)
(setrg . fmap ReorderGoals)
(yesNoOpt showOrParseArgs)
, option [] ["count-conflicts"]
"Try to speed up solving by preferring goals that are involved in a lot of conflicts (default)."
(fmap asBool . getcc)
(setcc . fmap CountConflicts)
(yesNoOpt showOrParseArgs)
, option [] ["independent-goals"]
"Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen."
(fmap asBool . getig)
(setig . fmap IndependentGoals)
(yesNoOpt showOrParseArgs)
, option [] ["shadow-installed-packages"]
"If multiple package instances of the same version are installed, treat all but one as shadowed."
(fmap asBool . getsip)
(setsip . fmap ShadowPkgs)
(yesNoOpt showOrParseArgs)
, option [] ["strong-flags"]
"Do not defer flag choices (this used to be the default in cabal-install <= 1.20)."
(fmap asBool . getstrfl)
(setstrfl . fmap StrongFlags)
(yesNoOpt showOrParseArgs)
, option [] ["allow-boot-library-installs"]
"Allow cabal to install base, ghc-prim, integer-simple, integer-gmp, and template-haskell."
(fmap asBool . getib)
(setib . fmap AllowBootLibInstalls)
(yesNoOpt showOrParseArgs)
]
usageFlagsOrPackages :: String -> String -> String
usageFlagsOrPackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usagePackages :: String -> String -> String
usagePackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usageFlags :: String -> String -> String
usageFlags name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
--TODO: do we want to allow per-package flags?
parsePackageArgs :: [String] -> Either String [Dependency]
parsePackageArgs = parsePkgArgs []
where
parsePkgArgs ds [] = Right (reverse ds)
parsePkgArgs ds (arg:args) =
case readPToMaybe parseDependencyOrPackageId arg of
Just dep -> parsePkgArgs (dep:ds) args
Nothing -> Left $
show arg ++ " is not valid syntax for a package name or"
++ " package dependency."
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
v | v == nullVersion -> Dependency (packageName p) anyVersion
| otherwise -> Dependency (packageName p) (thisVersion v)
showRepo :: RemoteRepo -> String
showRepo repo = remoteRepoName repo ++ ":"
++ uriToString id (remoteRepoURI repo) []
readRepo :: String -> Maybe RemoteRepo
readRepo = readPToMaybe parseRepo
parseRepo :: Parse.ReadP r RemoteRepo
parseRepo = do
name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.")
_ <- Parse.char ':'
uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~")
uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr)
return RemoteRepo {
remoteRepoName = name,
remoteRepoURI = uri,
remoteRepoSecure = Nothing,
remoteRepoRootKeys = [],
remoteRepoKeyThreshold = 0,
remoteRepoShouldTryHttps = False
}
-- ------------------------------------------------------------
-- * Helpers for Documentation
-- ------------------------------------------------------------
headLine :: String -> String
headLine = unlines
. map unwords
. wrapLine 79
. words
paragraph :: String -> String
paragraph = (++"\n")
. unlines
. map unwords
. wrapLine 79
. words
indentParagraph :: String -> String
indentParagraph = unlines
. (flip (++)) [""]
. map ((" "++).unwords)
. wrapLine 77
. words
relevantConfigValuesText :: [String] -> String
relevantConfigValuesText vs =
"Relevant global configuration keys:\n"
++ concat [" " ++ v ++ "\n" |v <- vs]
|
mydaum/cabal
|
cabal-install/Distribution/Client/Setup.hs
|
bsd-3-clause
| 104,767
| 0
| 40
| 29,075
| 20,595
| 11,635
| 8,960
| 1,996
| 5
|
{-# LANGUAGE GADTs, TypeFamilies, TypeOperators, CPP, FlexibleContexts, FlexibleInstances, ScopedTypeVariables, MultiParamTypeClasses, UndecidableInstances #-}
{-# OPTIONS_GHC -fenable-rewrite-rules #-}
----------------------------------------------------------------------
-- |
-- Module : Data.Functor.Representable.Trie
-- Copyright : (c) Edward Kmett 2011
-- License : BSD3
--
-- Maintainer : ekmett@gmail.com
-- Stability : experimental
--
----------------------------------------------------------------------
module Data.Functor.Representable.Trie
(
-- * Representations of polynomial functors
HasTrie(..)
-- * Memoizing functions
, mup, memo, memo2, memo3
, inTrie, inTrie2, inTrie3
-- * Workarounds for current GHC limitations
, trie, untrie
, (:->:)(..)
, Entry(..)
) where
import Control.Applicative
import Control.Arrow
import Control.Comonad
import Control.Monad.Reader.Class
import Control.Monad.Representable.Reader
import Data.Bits
import Data.Distributive
import Data.Semigroup
import Data.Word
import Data.Int
import Data.Foldable
import Data.Function (on)
import Data.Functor.Adjunction
import Data.Functor.Bind
import Data.Functor.Extend
import Data.Functor.Identity
import Data.Functor.Representable.Trie.Bool
import Data.Functor.Representable.Trie.Either
import Data.Functor.Representable.Trie.List
import Data.Key
import qualified Data.Monoid as Monoid
import Data.Semigroup.Foldable
import Data.Semigroup.Traversable
import Data.Sequence (Seq, (<|))
import qualified Data.Sequence as Seq
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Traversable
import Prelude hiding (lookup, foldr)
class (Adjustable (BaseTrie a), TraversableWithKey1 (BaseTrie a), Representable (BaseTrie a)) => HasTrie a where
type BaseTrie a :: * -> *
-- projectKey . embedKey = id
embedKey :: a -> Key (BaseTrie a)
projectKey :: Key (BaseTrie a) -> a
{-
validKey :: Key (BaseTrie a) -> Bool
validKey _ = True
-}
newtype a :->: b = Trie { runTrie :: BaseTrie a b }
type instance Key ((:->:) a) = a
data Entry a b = Entry a b
-- * Combinators
-- Matt Hellige's notation for @argument f . result g@.
-- <http://matt.immute.net/content/pointless-fun>
(~>) :: (a' -> a) -> (b -> b') -> (a -> b) -> a' -> b'
g ~> f = (f .) . (. g)
untrie :: HasTrie t => (t :->: a) -> t -> a
untrie = index
trie :: HasTrie t => (t -> a) -> (t :->: a)
trie = tabulate
{-# RULES
"trie/untrie" forall t. trie (untrie t) = t
"embedKey/projectKey" forall t. projectKey (embedKey t) = t
#-}
memo :: HasTrie t => (t -> a) -> t -> a
memo = untrie . trie
-- | Lift a memoizer to work with one more argument.
mup :: HasTrie t => (b -> c) -> (t -> b) -> t -> c
mup mem f = memo (mem . f)
-- | Memoize a binary function, on its first argument and then on its
-- second. Take care to exploit any partial evaluation.
memo2 :: (HasTrie s, HasTrie t) => (s -> t -> a) -> s -> t -> a
memo2 = mup memo
-- | Memoize a ternary function on successive arguments. Take care to
-- exploit any partial evaluation.
memo3 :: (HasTrie r, HasTrie s, HasTrie t) => (r -> s -> t -> a) -> r -> s -> t -> a
memo3 = mup memo2
-- | Apply a unary function inside of a tabulate
inTrie
:: (HasTrie a, HasTrie c)
=> ((a -> b) -> c -> d)
-> (a :->: b) -> c :->: d
inTrie = untrie ~> trie
-- | Apply a binary function inside of a tabulate
inTrie2
:: (HasTrie a, HasTrie c, HasTrie e)
=> ((a -> b) -> (c -> d) -> e -> f)
-> (a :->: b) -> (c :->: d) -> e :->: f
inTrie2 = untrie ~> inTrie
-- | Apply a ternary function inside of a tabulate
inTrie3
:: (HasTrie a, HasTrie c, HasTrie e, HasTrie g)
=> ((a -> b) -> (c -> d) -> (e -> f) -> g -> h)
-> (a :->: b) -> (c :->: d) -> (e :->: f) -> g :->: h
inTrie3 = untrie ~> inTrie2
-- * Implementation details
instance Functor (Entry a) where
fmap f (Entry a b) = Entry a (f b)
instance HasTrie e => Lookup ((:->:)e) where
lookup = lookupDefault
instance HasTrie e => Indexable ((:->:)e) where
index (Trie f) = index f . embedKey
instance HasTrie e => Distributive ((:->:) e) where
distribute = distributeRep
instance HasTrie e => Representable ((:->:) e) where
tabulate f = Trie $ tabulate (f . projectKey)
instance HasTrie e => Adjustable ((:->:) e) where
adjust f k (Trie as) = Trie (adjust f (embedKey k) as)
instance HasTrie e => Zip ((:->:) e)
instance HasTrie e => ZipWithKey ((:->:) e)
instance HasTrie e => Adjunction (Entry e) ((:->:) e) where
unit = mapWithKey Entry . pure
counit (Entry a t) = index t a
instance HasTrie a => Functor ((:->:) a) where
fmap f (Trie g) = Trie (fmap f g)
instance HasTrie a => Keyed ((:->:) a) where
mapWithKey f (Trie a) = Trie (mapWithKey (f . projectKey) a)
instance HasTrie a => Foldable ((:->:) a) where
foldMap f (Trie a) = foldMap f a
instance HasTrie a => FoldableWithKey ((:->:) a) where
foldMapWithKey f (Trie a) = foldMapWithKey (f . projectKey) a
instance HasTrie a => Traversable ((:->:) a) where
traverse f (Trie a) = Trie <$> traverse f a
instance HasTrie a => TraversableWithKey ((:->:) a) where
traverseWithKey f (Trie a) = Trie <$> traverseWithKey (f . projectKey) a
instance HasTrie a => Foldable1 ((:->:) a) where
foldMap1 f (Trie a) = foldMap1 f a
instance HasTrie a => FoldableWithKey1 ((:->:) a) where
foldMapWithKey1 f (Trie a) = foldMapWithKey1 (f . projectKey) a
instance HasTrie a => Traversable1 ((:->:) a) where
traverse1 f (Trie a) = Trie <$> traverse1 f a
instance HasTrie a => TraversableWithKey1 ((:->:) a) where
traverseWithKey1 f (Trie a) = Trie <$> traverseWithKey1 (f . projectKey) a
instance (HasTrie a, Eq b) => Eq (a :->: b) where
(==) = (==) `on` toList
instance (HasTrie a, Ord b) => Ord (a :->: b) where
compare = compare `on` toList
instance (HasTrie a, Show a, Show b) => Show (a :->: b) where
showsPrec d = showsPrec d . toKeyedList
instance HasTrie a => Apply ((:->:) a) where
(<.>) = apRep
a <. _ = a
_ .> b = b
instance HasTrie a => Applicative ((:->:) a) where
pure a = Trie (pureRep a)
(<*>) = apRep
a <* _ = a
_ *> b = b
instance HasTrie a => Bind ((:->:) a) where
Trie m >>- f = Trie (tabulate (\a -> index (runTrie (f (index m a))) a))
instance HasTrie a => Monad ((:->:) a) where
return a = Trie (pureRep a)
(>>=) = (>>-)
_ >> m = m
instance HasTrie a => MonadReader a ((:->:) a) where
ask = askRep
local = localRep
-- TODO: remove dependency on HasTrie in these:
instance (HasTrie m, Monoid m) => Comonad ((:->:) m) where
duplicate = duplicateRep
extract = flip index mempty
instance (HasTrie m, Semigroup m) => Extend ((:->:) m) where
duplicated = duplicatedRep
-- * Instances
instance HasTrie () where
type BaseTrie () = Identity
embedKey = id
projectKey = id
instance HasTrie Bool where
type BaseTrie Bool = BoolTrie
embedKey = id
projectKey = id
instance HasTrie Any where
type BaseTrie Any = BoolTrie
embedKey = getAny
projectKey = Any
instance HasTrie a => HasTrie (Dual a) where
type BaseTrie (Dual a) = BaseTrie a
embedKey = embedKey . getDual
projectKey = Dual . projectKey
instance HasTrie a => HasTrie (Sum a) where
type BaseTrie (Sum a) = BaseTrie a
embedKey = embedKey . getSum
projectKey = Sum . projectKey
instance HasTrie a => HasTrie (Monoid.Product a) where
type BaseTrie (Monoid.Product a) = BaseTrie a
embedKey = embedKey . Monoid.getProduct
projectKey = Monoid.Product . projectKey
instance (HasTrie a, HasTrie b) => HasTrie (a, b) where
type BaseTrie (a, b) = ReaderT (BaseTrie a) (BaseTrie b)
embedKey = embedKey *** embedKey
projectKey = projectKey *** projectKey
instance (HasTrie a, HasTrie b) => HasTrie (Entry a b) where
type BaseTrie (Entry a b) = ReaderT (BaseTrie a) (BaseTrie b)
embedKey (Entry a b) = (embedKey a, embedKey b)
projectKey (a, b) = Entry (projectKey a) (projectKey b)
instance (HasTrie a, HasTrie b) => HasTrie (Either a b) where
type BaseTrie (Either a b) = EitherTrie (BaseTrie a) (BaseTrie b)
embedKey = embedKey +++ embedKey
projectKey = projectKey +++ projectKey
instance HasTrie a => HasTrie (Maybe a) where
type BaseTrie (Maybe a) = EitherTrie Identity (BaseTrie a)
embedKey = maybe (Left ()) (Right . embedKey)
projectKey = either (const Nothing) (Just . projectKey)
instance HasTrie a => HasTrie [a] where
type BaseTrie [a] = ListTrie (BaseTrie a)
embedKey = map embedKey
projectKey = map projectKey
instance HasTrie a => HasTrie (Seq a) where
type BaseTrie (Seq a) = ListTrie (BaseTrie a)
embedKey = foldr ((:) . embedKey) []
projectKey = foldr ((<|) . projectKey) (Seq.empty)
instance (HasTrie k, HasTrie v) => HasTrie (Map k v) where
type BaseTrie (Map k v) = ListTrie (BaseTrie (k, v))
embedKey = foldrWithKey (\k v t -> embedKey (k,v) : t) []
projectKey = Map.fromDistinctAscList . map projectKey
instance (HasTrie v) => HasTrie (IntMap v) where
type BaseTrie (IntMap v) = ListTrie (BaseTrie (Int, v))
embedKey = foldrWithKey (\k v t -> embedKey (k,v) : t) []
projectKey = IntMap.fromDistinctAscList . map projectKey
-- | Extract bits in little-endian order
bits :: (Num t, Bits t) => t -> [Bool]
bits 0 = []
bits x = testBit x 0 : bits (shiftR x 1)
-- | Convert boolean to 0 (False) or 1 (True)
unbit :: Num t => Bool -> t
unbit False = 0
unbit True = 1
-- | Bit list to value
unbits :: (Num t, Bits t) => [Bool] -> t
unbits [] = 0
unbits (x:xs) = unbit x .|. shiftL (unbits xs) 1
unbitsZ :: (Num n, Bits n) => (Bool,[Bool]) -> n
unbitsZ (positive,bs) = sig (unbits bs)
where
sig | positive = id
| otherwise = negate
bitsZ :: (Ord n, Num n, Bits n) => n -> (Bool,[Bool])
bitsZ = (>= 0) &&& (bits . abs)
-- TODO: fix the show instance of this
instance HasTrie Int where
type BaseTrie Int = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Int8 where
type BaseTrie Int8 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Int16 where
type BaseTrie Int16 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Int32 where
type BaseTrie Int32 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Int64 where
type BaseTrie Int64 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Word where
type BaseTrie Word = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Word8 where
type BaseTrie Word8 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Word16 where
type BaseTrie Word16 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Word32 where
type BaseTrie Word32 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
instance HasTrie Word64 where
type BaseTrie Word64 = BaseTrie (Bool, [Bool])
embedKey = embedKey . bitsZ
projectKey = unbitsZ . projectKey
-- TODO: fix tree to 21 bit depth
instance HasTrie Char where
type BaseTrie Char = BaseTrie [Bool]
embedKey = bits . fromEnum
projectKey = toEnum . unbits
instance (HasTrie a, HasTrie b, HasTrie c) => HasTrie (a,b,c) where
type BaseTrie (a,b,c) = BaseTrie (a,(b,c))
embedKey (a,b,c) = embedKey (a,(b,c))
projectKey p = let (a,(b,c)) = projectKey p in (a,b,c)
instance (HasTrie a, HasTrie b, HasTrie c, HasTrie d) => HasTrie (a,b,c,d) where
type BaseTrie (a,b,c,d) = BaseTrie ((a,b),(c,d))
embedKey (a,b,c,d) = embedKey ((a,b),(c,d))
projectKey p = let ((a,b),(c,d)) = projectKey p in (a,b,c,d)
|
ekmett/representable-tries
|
src/Data/Functor/Representable/Trie.hs
|
bsd-3-clause
| 11,836
| 0
| 17
| 2,371
| 4,614
| 2,508
| 2,106
| 269
| 1
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module Prepro where
import Data.Functor.Base
import Data.Functor.Foldable
import Data.Tree
{-
data TreeF a r = NodeF { rootLabelF :: a, subForestF :: [r] }
deriving (Functor, Foldable, Traversable)
type instance Base (Tree a) = TreeF a
instance Recursive (Tree a) where project (Node a ts) = NodeF a ts
instance Corecursive (Tree a) where embed (NodeF a ts) = Node a ts
-}
tree :: Tree Integer
tree = Node 2 [Node 1 [Node 3 []], Node 7 [Node 1 [], Node 5 []]]
rootLabelF (NodeF x _) = x
subForestF (NodeF _ y) = y
main = do
drawTree' tree
-- 0th layer : *1
-- 1st layer : *2
-- 2nd layer : *4
drawTree' $ prepro (\(NodeF x y) -> NodeF (x*2) y) embed tree
-- sum with deeper values weighted more
print $ prepro (\(NodeF x y) -> NodeF (x*2) y) ((+) <$> sum <*> rootLabelF) tree
where
drawTree' = putStr . drawTree . fmap show
{--
sumAlg :: Num a => ListF a a -> a
sumAlg = \case
Cons h t -> h + t
Nil -> 0
lenAlg :: ListF a Int -> Int
lenAlg = \case
Cons _ t -> 1 + t
Nil -> 0
small :: (Ord a, Num a) => ListF a b -> ListF a b
small Nil = Nil
small term@(Cons h t) | h <= 10 = term
| otherwise = Nil
sum :: Num a => [a] -> a
sum = cata sumAlg
len :: [a] -> Int
len = cata lenAlg
smallSum :: (Ord a, Num a) => [a] -> a
smallSum = prepro small sumAlg
smallLen :: (Ord a, Num a) => [a] -> Int
smallLen = prepro small lenAlg
--}
|
cutsea110/aop
|
src/Prepro.hs
|
bsd-3-clause
| 1,530
| 0
| 13
| 383
| 253
| 136
| 117
| 17
| 1
|
-- |Authentication related functions. Low-level version.
module RPC.Auth
( module Types.Auth
, auth_login
, auth_logout
, auth_token_add
, auth_token_remove
, auth_token_generate
, auth_token_list
) where
import MSF.Host (Server,Con(..))
import Types.Auth
-- | Log into the metasploit team server with username & password. Silent operation.
auth_login :: Con Server -> Username -> Password -> IO (Either String Token)
auth_login addr username password = send_request "auth.login" addr
[ toObject username
, toObject password
]
-- |Log a token out. Silent operation.
auth_logout :: Con Server -> Token -> Token -> IO ()
auth_logout addr auth tok = success "auth.logout" =<< send_request "auth.logout" addr
[ toObject auth
, toObject tok
]
-- | Add a permanent authentication token. Silent operation.
auth_token_add :: Con Server -> Token -> Token -> IO ()
auth_token_add addr auth candidate = success "auth.token_add" =<< send_request "auth.token_add" addr
[ toObject auth
, toObject candidate
]
-- | Remove either a temporary or perminant token. Silent operation.
auth_token_remove :: Con Server -> Token -> Token -> IO ()
auth_token_remove addr auth target = success "auth.token_remove" =<< send_request "auth.token_remove" addr
[ toObject auth
, toObject target
]
-- |Create a 32 byte authentication token. Silent operation.
auth_token_generate :: Con Server -> Token -> IO Token
auth_token_generate addr auth = send_request "auth.token_generate" addr
[ toObject auth
]
-- | Get a list of all the authentication tokens. Silent operation.
auth_token_list :: Con Server -> Token -> IO [Token]
auth_token_list addr auth = field "auth.token_list" "tokens" =<< send_request "auth.token_list" addr
[ toObject auth
]
|
GaloisInc/msf-haskell
|
src/RPC/Auth.hs
|
bsd-3-clause
| 1,768
| 0
| 10
| 307
| 408
| 208
| 200
| 32
| 1
|
import System.Environment(getArgs, getProgName)
import System.Exit(exitFailure)
import Control.Monad(when)
import Denominate
usage = getProgName >>= \name ->
return ("Usage: " ++ name ++ " [-h|-n] base_directory\n" ++
" -h: show help\n" ++
" -n: dry run; show what would be renamed") >>=
putStrLn >> exitFailure
main = do
getArgs >>= \args ->
when (doUsageAndExit args) usage >>
let forReal = not(elem "-n" args)
pathArgs = filter (\s -> not(elem s ["-n", "-h", "--help"])) args
in (fileToTypedFilePath . base) pathArgs >>=
renameAll forReal defaultFilenameConverter >>=
mapM_ handle
-- Get base dir to recurse in and ensure there is no terminal '/'.
base [] = error "Main.base"
base (d:_) =
case d == "/" of
True -> d
False -> if (head revD) /= '/'
then d
else reverse $ tail revD
where revD = reverse d
handle result =
case result of
(Failure (_, fName) msg) -> putStr msg >> putStr " [" >>
putStr fName >> putStrLn "]"
(Success _ _ ) -> return ()
doUsageAndExit args = null args || elem "-h" args || elem "--help" args
|
eukaryote/denominate
|
src/Main.hs
|
bsd-3-clause
| 1,250
| 3
| 20
| 404
| 398
| 196
| 202
| 31
| 3
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Rho.MagnetSpec where
import Control.Monad
import qualified Data.ByteString.Char8 as B
import Data.List
import Data.Maybe
import Network.URI
import Test.Hspec
import Test.Hspec.Contrib.HUnit
import Test.Hspec.QuickCheck
import Test.HUnit
import Test.QuickCheck
import Rho.Magnet
import Rho.PeerCommsSpec ()
import Rho.Tracker
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parsing" $ do
fromHUnitTest $ TestLabel "should parse (from file)" shouldParse
describe "parsing-printing" $ do
prop "forall m, parseMagnet . printMagnet m == m" $ \m ->
assertEqual "" (Right m) (parseMagnet (printMagnet m))
instance Arbitrary Magnet where
arbitrary = do
xt <- arbitrary
trs <- listOf trackerGen
dn <- oneof [return Nothing, return $ Just "display name"]
return $ Magnet xt trs dn
shrink (Magnet _ [] _ ) = []
shrink (Magnet h ts dn) = map (\t -> Magnet h t dn) (init $ subsequences ts)
trackerGen :: Gen Tracker
trackerGen = oneof [http, udp]
where
http = return $ HTTPTracker $ fromJust $ parseURI "http://testserver.com:1234/announce"
udp = return $ UDPTracker "0.0.0.0" (fromIntegral (1234 :: Int))
shouldParse :: Test
shouldParse = TestCase $ do
magnetUrls <- B.lines `fmap` B.readFile "tests/magnets_should_parse"
parseMagnetUrls magnetUrls
parseMagnetUrls :: [B.ByteString] -> Assertion
parseMagnetUrls magnetUrls =
forM_ magnetUrls $ \magnetUrl ->
case parseMagnet magnetUrl of
Right Magnet{mTrackers=trackers} ->
-- TODO: This is not a problem since magnets can omit trackers,
-- but I couldn't find any torrents like this and we doesn't
-- support DHT(yet) anyways.
assertBool ("Can't parse trackers from magnet URL: " ++ B.unpack magnetUrl)
(not $ null trackers)
Left err' ->
assertFailure $ "Can't parse magnet URL: " ++ B.unpack magnetUrl ++ "\n" ++ err'
|
osa1/rho-torrent
|
test/Rho/MagnetSpec.hs
|
bsd-3-clause
| 2,142
| 0
| 16
| 558
| 552
| 285
| 267
| 50
| 2
|
module Parsing.ByteString (
parseQuote
) where
import Parsing.Base
import Control.Monad
import qualified Network.Pcap as Pcap
import qualified Data.ByteString.Char8 as BS
import qualified Data.Time as T
import qualified Data.Time.Clock.POSIX as T
import qualified Data.List as L
-- [todo] statically verify bytestring lengths and formats using LiquidHaskell
type Payload = BS.ByteString
type QuotePacket = BS.ByteString
-- extracts substring of length `n` at location `i` of a bytestring
-- returns Nothing if out of bound
range :: Int -> Int -> BS.ByteString -> Maybe BS.ByteString
range i n bs =
let bs' = BS.take n . BS.drop i $ bs in
if n == BS.length bs' then Just bs' else Nothing
-- checks payload contains correct quote header
hasQuoteHeader :: Payload -> Bool
hasQuoteHeader = maybe False (== BS.pack "B6034") . range 42 5
-- parses quote object from pcap packet
parseQuote :: Packet -> Maybe Quote
parseQuote (hdr, pl) =
if hasQuoteHeader pl
then quotePacketFromPayload pl
>>= quoteFromQuotePacket (packetAcceptTimeFromHeader hdr)
else fail "cannot find header" -- this is ignored in `Maybe`
-- parses UDP payload into quote packets
quotePacketFromPayload :: Payload -> Maybe QuotePacket
quotePacketFromPayload = range 42 215
-- constructs quote object from quote packet and packet accept time
-- does not check that quote packet begins with "B6034"
quoteFromQuotePacket :: T.UTCTime -> QuotePacket -> Maybe Quote
quoteFromQuotePacket ptime p = do
aToD <- parseAcceptTimeOfDay =<< range 206 8 p
-- [todo] handle exception explicitly
acceptTime <- extrapolateAcceptTime ptime aToD
issueCode <- range 5 12 p
bids <- parseBids =<< range 29 60 p
asks <- parseBids =<< range 96 60 p
return Quote {
acceptTime = acceptTime,
packetTime = ptime,
issueCode = issueCode,
bids = reverse bids,
asks = asks
}
where
-- assumes input is a bytestring of 8 digits
parseAcceptTimeOfDay :: BS.ByteString -> Maybe T.TimeOfDay
parseAcceptTimeOfDay bs = do
hh <- safeRead =<< liftM BS.unpack (range 0 2 bs)
mm <- safeRead =<< liftM BS.unpack (range 2 2 bs)
ss <- safeRead =<< liftM BS.unpack (range 4 2 bs)
uu <- safeRead =<< liftM BS.unpack (range 6 2 bs)
let pico = fromRational $ (fromIntegral ss) + (fromIntegral uu) / 100
return $ T.TimeOfDay hh mm pico
-- assumes input is a bytestring of 60 (= (5+7)*5) digits
-- [todo] check statically with LiquidHaskell
parseBids :: BS.ByteString -> Maybe [Bid]
parseBids bs =
liftM fst $ foldM (\(bids, remain) _ -> do
let (bid, remain') = BS.splitAt 12 remain
let (pstr, qstr) = BS.splitAt 5 bid
p <- safeRead $ BS.unpack pstr
q <- safeRead $ BS.unpack qstr
return (Bid p q : bids, remain')
) ([],bs) [1..5]
-- Helpers
safeRead :: Read a => String -> Maybe a
safeRead s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
|
iteloo/tsuru-sample
|
src/Parsing/ByteString.hs
|
bsd-3-clause
| 2,995
| 0
| 17
| 697
| 855
| 442
| 413
| 59
| 2
|
-- |
-- Module : Basement.Compat.Primitive
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : portable
--
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE UnliftedFFITypes #-}
module Basement.Compat.Primitive
( bool#
, PinnedStatus(..), toPinnedStatus#
, compatMkWeak#
, compatIsByteArrayPinned#
, compatIsMutableByteArrayPinned#
, unsafeCoerce#
, Word(..)
) where
import qualified Prelude
import GHC.Exts
import GHC.Prim
import GHC.Word
import GHC.IO
import Basement.Compat.PrimTypes
-- GHC 9.0 | Base 4.15
-- GHC 8.8 | Base 4.13 4.14
-- GHC 8.6 | Base 4.12
-- GHC 8.4 | Base 4.11
-- GHC 8.2 | Base 4.10
-- GHC 8.0 | Base 4.9
-- GHC 7.10 | Base 4.8
-- GHC 7.8 | Base 4.7
-- GHC 7.6 | Base 4.6
-- GHC 7.4 | Base 4.5
--
-- More complete list:
-- https://wiki.haskell.org/Base_package
-- | Flag record whether a specific byte array is pinned or not
data PinnedStatus = Pinned | Unpinned
deriving (Prelude.Eq)
toPinnedStatus# :: Pinned# -> PinnedStatus
toPinnedStatus# 0# = Unpinned
toPinnedStatus# _ = Pinned
-- | turn an Int# into a Bool
bool# :: Int# -> Prelude.Bool
bool# v = isTrue# v
{-# INLINE bool# #-}
-- | A mkWeak# version that keep working on 8.0
--
-- signature change in ghc-prim:
-- * 0.4: mkWeak# :: o -> b -> c -> State# RealWorld -> (#State# RealWorld, Weak# b#)
-- * 0.5 :mkWeak# :: o -> b -> (State# RealWorld -> (#State# RealWorld, c#)) -> State# RealWorld -> (#State# RealWorld, Weak# b#)
--
compatMkWeak# :: o -> b -> Prelude.IO () -> State# RealWorld -> (#State# RealWorld, Weak# b #)
compatMkWeak# o b c s = mkWeak# o b (case c of { IO f -> f }) s
{-# INLINE compatMkWeak# #-}
#if __GLASGOW_HASKELL__ >= 802
compatIsByteArrayPinned# :: ByteArray# -> Pinned#
compatIsByteArrayPinned# ba = isByteArrayPinned# ba
compatIsMutableByteArrayPinned# :: MutableByteArray# s -> Pinned#
compatIsMutableByteArrayPinned# ba = isMutableByteArrayPinned# ba
#else
foreign import ccall unsafe "basement_is_bytearray_pinned"
compatIsByteArrayPinned# :: ByteArray# -> Pinned#
foreign import ccall unsafe "basement_is_bytearray_pinned"
compatIsMutableByteArrayPinned# :: MutableByteArray# s -> Pinned#
#endif
|
vincenthz/hs-foundation
|
basement/Basement/Compat/Primitive.hs
|
bsd-3-clause
| 2,405
| 2
| 10
| 525
| 305
| 184
| 121
| 33
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
module WebhookAPISpec (spec) where
import Data.Aeson
import qualified Data.ByteString.Lazy as BSL
import System.FilePath
import System.IO.Unsafe
import Test.Hspec
import Web.FBMessenger.API.Bot.WebhookAPI
import Paths_fbmessenger_api
run acBSLn name = unsafePerformIO $ do
dataDir <- getDataDir
let testFile name = dataDir </> "test-files" </> name
content <- BSL.readFile $ testFile name
return $ acBSLn content
-- TODO: add more tests for other cases, failures or corner cases
spec :: Spec
spec = do
let pid = "111111"
let sid = "USER_ID"
let rid = "PAGE_ID"
let authMessage = EventMessage sid rid (Just 1234567890) $ EmAuth "PASS_THROUGH_PARAM"
let authEvent = RemoteEvent pid 12341 [authMessage]
let ar = RemoteEventList [authEvent]
let deliveryMessage = EventMessage sid rid Nothing $ EmDelivery 37 1458668856253 (Just ["mid.1458668856218:ed81099e15d3f4f233"])
let deliveryEvent = RemoteEvent pid 1458668856451 [deliveryMessage]
let dr = RemoteEventList [deliveryEvent]
let postbackMessage = EventMessage sid rid (Just 1458692752478) $ EmPostback "USER_DEFINED_PAYLOAD"
let postbackEvent = RemoteEvent pid 1458692752478 [postbackMessage]
let pr = RemoteEventList [postbackEvent]
let textMessage = EventMessage sid rid (Just 1457764197627) $ EmTextMessage "mid.1457764197618:41d102a3e1ae206a38" 73 "hello, world!"
let textEvent = RemoteEvent pid 1457764198246 [textMessage]
let tr = RemoteEventList [textEvent]
let structuredMessage = EventMessage sid rid (Just 1458696618268) $ EmStructuredMessage "mid.1458696618141:b4ef9d19ec21086067" 51 [EmAttachment EmImage "IMAGE_URL"]
let structuredMessageEvent = RemoteEvent pid 1458696618911 [structuredMessage]
let sr = RemoteEventList [structuredMessageEvent]
describe "webhook request parsing and generation" $ do
it "auth message is parsed properly" $
decodeWR "wsAuthRequest.json" ar
it "delivery message is parsed properly" $
decodeWR "wsDeliveryRequest.json" dr
it "postback message is parsed properly" $
decodeWR "wsPostbackRequest.json" pr
it "text message is parsed properly" $
decodeWR "wsTextMessageRequest.json" tr
it "structured message is parsed properly" $
decodeWR "wsStructuredMessageRequest.json" sr
describe "webhook request serialization" $ do
it "auth message is serialized properly" $
checkWR ar
it "delivery message is serialized properly" $
checkWR dr
it "postback message is serialized properly" $
checkWR pr
it "text message is serialized properly" $
checkWR tr
it "structured message is serialized properly" $
checkWR sr
where
decodeWR fName m = run (\l -> eitherDecode l :: Either String RemoteEventList) fName `shouldBe` Right m
checkWR m = (eitherDecode $ encode m :: Either String RemoteEventList) `shouldBe` Right m
|
mseri/fbmessenger-api-hs
|
test/WebhookAPISpec.hs
|
bsd-3-clause
| 3,336
| 0
| 14
| 876
| 737
| 348
| 389
| 61
| 1
|
{-# LANGUAGE
FlexibleInstances,
OverloadedStrings #-}
module Command where
import System.Posix.Types
import System.Posix.ByteString.FilePath
import System.Posix.Process.ByteString
import System.Posix.Directory.ByteString
import System.Posix.IO
import System.IO
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Text (Text)
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Char8 (ByteString)
import Control.Applicative
import Control.Monad.IO.Class
import Types
import Run
import InternalCommands
import ShellParser
class Runnable r where
run :: r -> Shell SuccessState
instance Runnable ExtCommand where
run (ExtCommand cmd args) = do
pid <- liftIO $ executeInFork cmd True args Nothing Nothing Nothing
Just res <- liftIO $ collectProcess True pid
return $ CommandResult res
instance Runnable Command where
run c = do
(cmd, args) <- commandToArgv c
case getInternalCommand cmd of
Just cmd' -> run ((cmd' args) `asTypeOf` (return undefined))
Nothing -> run (ExtCommand cmd args)
instance Runnable SuccessState where
run = return
instance Runnable (Shell SuccessState) where
run f = f
commandToArgv :: Command -> Shell (ByteString, [ByteString])
commandToArgv (Command prog args) = (,) <$> expToArgv prog <*> mapM expToArgv args
expToArgv e = case e of
StrExp t -> return $ T.encodeUtf8 t
ConcatExp es -> B.concat <$> mapM expToArgv es
QuoteExp e -> expToArgv e
BackTickExp e -> do cmd <- expToArgv e
case parse command (B.unpack cmd) cmd of
Left err -> liftIO $ print err >> return ""
Right c -> do
(cmd', args) <- commandToArgv c
(_, out) <- liftIO $ executeInForkAndRead cmd' True args Nothing
return out
ParenExp e -> expToArgv e -- unimplemented
BraceExp e -> expToArgv e -- unimplemented
BracketExp e -> expToArgv e -- unimplemented
DollarExp e -> expToArgv e -- unimplemented
|
firefrorefiddle/hssh
|
src/Command.hs
|
bsd-3-clause
| 2,086
| 0
| 17
| 516
| 630
| 323
| 307
| 55
| 9
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Concurrent
(ThreadId, forkIO, forkFinally, threadDelay)
import Control.Concurrent.Chan.Unagi
(InChan, OutChan, dupChan, newChan, readChan, writeChan)
import Control.Concurrent.MVar
(MVar, newEmptyMVar, newMVar, putMVar, takeMVar)
import Control.Lens
(Lens', ix, lens, makeClassy, only, over, preview, set, to, view, (^.), (^?), (.~))
import Control.Lens.Prism
(Prism', prism, _Just)
import Control.Lens.Tuple
(_1, _2, _3)
import Control.Monad
(forever)
import Control.Monad.Reader
(MonadReader, ReaderT, ask, runReaderT)
import Data.ByteString
(ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as CharBS
import qualified Data.ByteString.Lazy as LazyBS
import Data.Char
(chr)
import Data.List.Lens
import Data.Monoid
((<>))
import Data.Text.Encoding.Error
(lenientDecode)
import Data.Text.Lazy
(Text)
import qualified Data.Text.Lazy as Text
import Data.Text.Lazy.Encoding
(decodeUtf8With, encodeUtf8)
import Network.IRC
import Network.URI
import Network.Wreq
(Response, get, responseBody)
import Pipes
import qualified Pipes.ByteString as PipesBS
import Pipes.Network.TCP.Safe
(Socket, closeSock, connect, fromSocket, toSocket)
import qualified Pipes.Prelude as Pipes
import Pipes.Safe
(SafeT, runSafeT)
import System.IO.Unsafe
(unsafePerformIO)
import Text.Taggy.Lens
(allNamed, contents, html)
-- ----------------------------------------------------------------------------
-- Global config
-- ----------------------------------------------------------------------------
data KromConfig = KromConfig
{ _kromNickName :: String
, _kromServerName :: String
, _kromRemoteHost :: String
, _kromRemotePort :: String
, _kromRemotePassword :: String
, _kromJoinChannels :: [String]
} deriving (Show)
makeClassy ''KromConfig
defaultKromConfig :: KromConfig
defaultKromConfig = KromConfig
{ _kromNickName = error "not configured" -- e.g. "krom"
, _kromServerName = error "not configured" -- e.g. "localhost"
, _kromRemoteHost = error "not configured" -- e.g. "localhost"
, _kromRemotePort = error "not configured" -- e.g. "6667"
, _kromRemotePassword = error "not configured" -- e.g. "password"
, _kromJoinChannels = error "not configured" -- e.g. ["#krom"]
}
registrationMessages :: KromConfig -> [Message]
registrationMessages config =
let configuredPassword = CharBS.pack $ config ^. kromRemotePassword
configuredNickName = CharBS.pack $ config ^. kromNickName
joinChanMessages = map (joinChan . CharBS.pack) $ config ^. kromJoinChannels
in [ password configuredPassword
, nick configuredNickName
, user configuredNickName "0" "*" configuredNickName
] ++ joinChanMessages
-- ----------------------------------------------------------------------------
-- IRC lenses and prisms
-- ----------------------------------------------------------------------------
_MsgPrefix :: Lens' Message (Maybe Prefix)
_MsgPrefix =
lens msg_prefix $ \msg p -> msg { msg_prefix=p }
_MsgCommand :: Lens' Message Command
_MsgCommand =
lens msg_command $ \msg c -> msg { msg_command=c }
_MsgParams :: Lens' Message [Parameter]
_MsgParams =
lens msg_params $ \msg ps -> msg { msg_params=ps }
_PrefixServer :: Prism' Prefix ByteString
_PrefixServer =
prism Server $
\p ->
case p of
Server serverName -> Right serverName
_ -> Left p
_PrefixNickName :: Prism' Prefix (ByteString, Maybe UserName, Maybe ServerName)
_PrefixNickName =
prism (\(nickName, userName, serverName) -> NickName nickName userName serverName) $
\p ->
case p of
NickName nickName userName serverName ->
Right (nickName, userName, serverName)
_ ->
Left p
-- ----------------------------------------------------------------------------
-- IRC extensions
-- ----------------------------------------------------------------------------
password :: ByteString -> Message
password p = Message Nothing "PASS" [p]
bold :: ByteString -> ByteString
bold content = BS.singleton 0x02 <> content <> BS.singleton 0x0f
nickNamePrefix :: String -> String -> String -> Prefix
nickNamePrefix nickName userName serverName =
NickName (CharBS.pack nickName) (Just $ CharBS.pack userName) (Just $ CharBS.pack serverName)
-- ----------------------------------------------------------------------------
-- IRC message predicates
-- ----------------------------------------------------------------------------
isPingMessage :: Message -> Bool
isPingMessage Message { msg_command="PING" } = True
isPingMessage _ = False
isPrivateMessage :: Message -> Bool
isPrivateMessage Message { msg_command="PRIVMSG" } = True
isPrivateMessage _ = False
-- ----------------------------------------------------------------------------
-- IRC message pipes
-- ----------------------------------------------------------------------------
receiveMessages :: MonadIO m => Socket -> Producer ByteString m ()
receiveMessages clientSocket =
PipesBS.concats . view PipesBS.lines $ fromSocket clientSocket 1024
broadcastMessages :: MonadIO m => InChan Message -> Consumer Message m ()
broadcastMessages messages = forever $
do message <- await
liftIO $ writeChan messages message
subscribeMessages :: MonadIO m => OutChan Message -> Producer Message m ()
subscribeMessages messages = forever $
do message <- liftIO $ readChan messages
yield message
parseMessages :: Monad m => Pipe ByteString Message m ()
parseMessages = loop
where
loop =
do buffer <- await
case decode buffer of
(Just message) -> yield message >> loop
Nothing -> loop
formatMessages :: Monad m => Pipe Message ByteString m ()
formatMessages = forever $
do message <- await
yield $ CharBS.pack (show message ++ "\n")
encodeMessages :: Monad m => Pipe Message ByteString m ()
encodeMessages = forever $
do message <- await
yield $ BS.append (encode message) "\r\n"
pongPingMessages :: Monad m => Pipe Message Message m ()
pongPingMessages = forever $
do pingMessage <- await
yield $ pingMessage { msg_command="PONG" }
echoMessages :: Monad m => Prefix -> Pipe Message Message m ()
echoMessages nickName = forever $
do message <- await
yield $ message { msg_prefix = Just nickName }
previewMessagesWithLinks :: MonadIO m => Prefix -> Pipe Message Message m ()
previewMessagesWithLinks nickName = loop
where
loop =
do msg <- await
let parsedURI = uriToString unEscapeString <$> (parseURI . CharBS.unpack =<< msg ^? _MsgParams . ix 1) <*> pure ""
case parsedURI of
Just uri ->
do response <- liftIO . get $ uri
yield $ previewMessage msg response
loop
Nothing ->
loop
getTitle :: Response LazyBS.ByteString -> Parameter
getTitle response =
let title = response ^. responseBody
. to (decodeUtf8With lenientDecode)
. html
. allNamed (only "title")
. contents
in (LazyBS.toStrict . encodeUtf8 . Text.strip . Text.fromStrict) title
previewMessage :: Message -> Response LazyBS.ByteString -> Message
previewMessage msg response =
let title = getTitle response
linker = msg ^. _MsgPrefix . _Just . _PrefixNickName . _1
channel = msg ^. _MsgParams . ix 0
previewMsgBody = title <> " linked by " <> linker
in msg { msg_prefix = Just nickName
, msg_params = [channel, previewMsgBody]
}
-- ----------------------------------------------------------------------------
-- Thread management
-- ----------------------------------------------------------------------------
forkEffect :: Effect IO () -> SafeT IO ThreadId
forkEffect = liftIO . forkChild . runEffect
children :: MVar [MVar ()]
{-# NOINLINE children #-}
children = unsafePerformIO (newMVar [])
waitForChildren :: IO ()
waitForChildren =
do cs <- takeMVar children
case cs of
[] -> return ()
m:ms -> do
putMVar children ms
takeMVar m
waitForChildren
forkChild :: IO () -> IO ThreadId
forkChild io =
do mvar <- newEmptyMVar
cs <- takeMVar children
putMVar children (mvar:cs)
forkFinally io (\_ -> putMVar mvar ())
-- ----------------------------------------------------------------------------
-- Application entry point
-- ----------------------------------------------------------------------------
startPipes :: ReaderT KromConfig (SafeT IO) ()
startPipes =
do config <- ask
connect (config ^. kromRemoteHost) (config ^. kromRemotePort) $ \(clientSocket, remoteAddr) -> lift $
do (fromIRCServer, toLogger) <- liftIO newChan
toPonger <- liftIO $ dupChan fromIRCServer
toEchoer <- liftIO $ dupChan fromIRCServer
toLinkPreviewer <- liftIO $ dupChan fromIRCServer
let nickName = nickNamePrefix (config ^. kromNickName) (config ^. kromNickName) (config ^. kromServerName)
-- establish session
runEffect $ each (registrationMessages config)
>-> encodeMessages
>-> toSocket clientSocket
-- broadcaster pipeline
forkEffect $ receiveMessages clientSocket
>-> parseMessages
>-> broadcastMessages fromIRCServer
-- logger pipeline
forkEffect $ subscribeMessages toLogger
>-> formatMessages
>-> PipesBS.stdout
-- ponger pipeline
forkEffect $ subscribeMessages toPonger
>-> Pipes.filter isPingMessage
>-> pongPingMessages
>-> encodeMessages
>-> toSocket clientSocket
-- echoer pipeline
forkEffect $ subscribeMessages toEchoer
>-> Pipes.filter isPrivateMessage
>-> echoMessages nickName
>-> encodeMessages
>-> toSocket clientSocket
-- link previewer pipeline
forkEffect $ subscribeMessages toLinkPreviewer
>-> previewMessagesWithLinks nickName
>-> encodeMessages
>-> toSocket clientSocket
liftIO waitForChildren
main :: IO ()
main = runSafeT . flip runReaderT defaultKromConfig $ startPipes
|
zerokarmaleft/krom
|
src/Main.hs
|
bsd-3-clause
| 11,044
| 0
| 19
| 2,887
| 2,590
| 1,366
| 1,224
| -1
| -1
|
{-# LANGUAGE GADTs, BangPatterns, RecordWildCards,
GeneralizedNewtypeDeriving, NondecreasingIndentation, TupleSections,
ScopedTypeVariables, OverloadedStrings #-}
module GHC.Cmm.Info.Build
( CAFSet, CAFEnv, cafAnal, cafAnalData
, doSRTs, ModuleSRTInfo (..), emptySRT
, SRTMap, srtMapNonCAFs
) where
import GhcPrelude hiding (succ)
import Id
import IdInfo
import GHC.Cmm.BlockId
import GHC.Cmm.Dataflow.Block
import GHC.Cmm.Dataflow.Graph
import GHC.Cmm.Dataflow.Label
import GHC.Cmm.Dataflow.Collections
import GHC.Cmm.Dataflow
import Module
import GHC.Platform
import Digraph
import GHC.Cmm.CLabel
import GHC.Cmm
import GHC.Cmm.Utils
import DynFlags
import Maybes
import Outputable
import GHC.Runtime.Layout
import UniqSupply
import CostCentre
import GHC.StgToCmm.Heap
import ErrUtils
import Control.Monad
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Control.Monad.Trans.State
import Control.Monad.Trans.Class
import Data.List (unzip4)
import NameSet
{- Note [SRTs]
SRTs are the mechanism by which the garbage collector can determine
the live CAFs in the program.
Representation
^^^^^^^^^^^^^^
+------+
| info |
| | +-----+---+---+---+
| -------->|SRT_2| | | | | 0 |
|------| +-----+-|-+-|-+---+
| | | |
| code | | |
| | v v
An SRT is simply an object in the program's data segment. It has the
same representation as a static constructor. There are 16
pre-compiled SRT info tables: stg_SRT_1_info, .. stg_SRT_16_info,
representing SRT objects with 1-16 pointers, respectively.
The entries of an SRT object point to static closures, which are either
- FUN_STATIC, THUNK_STATIC or CONSTR
- Another SRT (actually just a CONSTR)
The final field of the SRT is the static link field, used by the
garbage collector to chain together static closures that it visits and
to determine whether a static closure has been visited or not. (see
Note [STATIC_LINK fields])
By traversing the transitive closure of an SRT, the GC will reach all
of the CAFs that are reachable from the code associated with this SRT.
If we need to create an SRT with more than 16 entries, we build a
chain of SRT objects with all but the last having 16 entries.
+-----+---+- -+---+---+
|SRT16| | | | | | 0 |
+-----+-|-+- -+-|-+---+
| |
v v
+----+---+---+---+
|SRT2| | | | | 0 |
+----+-|-+-|-+---+
| |
| |
v v
Referring to an SRT from the info table
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The following things have SRTs:
- Static functions (FUN)
- Static thunks (THUNK), ie. CAFs
- Continuations (RET_SMALL, etc.)
In each case, the info table points to the SRT.
- info->srt is zero if there's no SRT, otherwise:
- info->srt == 1 and info->f.srt_offset points to the SRT
e.g. for a FUN with an SRT:
StgFunInfoTable +------+
info->f.srt_offset | ------------> offset to SRT object
StgStdInfoTable +------+
info->layout.ptrs | ... |
info->layout.nptrs | ... |
info->srt | 1 |
info->type | ... |
|------|
On x86_64, we optimise the info table representation further. The
offset to the SRT can be stored in 32 bits (all code lives within a
2GB region in x86_64's small memory model), so we can save a word in
the info table by storing the srt_offset in the srt field, which is
half a word.
On x86_64 with TABLES_NEXT_TO_CODE (except on MachO, due to #15169):
- info->srt is zero if there's no SRT, otherwise:
- info->srt is an offset from the info pointer to the SRT object
StgStdInfoTable +------+
info->layout.ptrs | |
info->layout.nptrs | |
info->srt | ------------> offset to SRT object
|------|
EXAMPLE
^^^^^^^
f = \x. ... g ...
where
g = \y. ... h ... c1 ...
h = \z. ... c2 ...
c1 & c2 are CAFs
g and h are local functions, but they have no static closures. When
we generate code for f, we start with a CmmGroup of four CmmDecls:
[ f_closure, f_entry, g_entry, h_entry ]
we process each CmmDecl separately in cpsTop, giving us a list of
CmmDecls. e.g. for f_entry, we might end up with
[ f_entry, f1_ret, f2_proc ]
where f1_ret is a return point, and f2_proc is a proc-point. We have
a CAFSet for each of these CmmDecls, let's suppose they are
[ f_entry{g_info}, f1_ret{g_info}, f2_proc{} ]
[ g_entry{h_info, c1_closure} ]
[ h_entry{c2_closure} ]
Next, we make an SRT for each of these functions:
f_srt : [g_info]
g_srt : [h_info, c1_closure]
h_srt : [c2_closure]
Now, for g_info and h_info, we want to refer to the SRTs for g and h
respectively, which we'll label g_srt and h_srt:
f_srt : [g_srt]
g_srt : [h_srt, c1_closure]
h_srt : [c2_closure]
Now, when an SRT has a single entry, we don't actually generate an SRT
closure for it, instead we just replace references to it with its
single element. So, since h_srt == c2_closure, we have
f_srt : [g_srt]
g_srt : [c2_closure, c1_closure]
h_srt : [c2_closure]
and the only SRT closure we generate is
g_srt = SRT_2 [c2_closure, c1_closure]
Algorithm
^^^^^^^^^
0. let srtMap :: Map CAFLabel (Maybe SRTEntry) = {}
Maps closures to their SRT entries (i.e. how they appear in a SRT payload)
1. Start with decls :: [CmmDecl]. This corresponds to an SCC of bindings in STG
after code-generation.
2. CPS-convert each CmmDecl (cpsTop), resulting in a list [CmmDecl]. There might
be multiple CmmDecls in the result, due to proc-point splitting.
3. In cpsTop, *before* proc-point splitting, when we still have a single
CmmDecl, we do cafAnal for procs:
* cafAnal performs a backwards analysis on the code blocks
* For each labelled block, the analysis produces a CAFSet (= Set CAFLabel),
representing all the CAFLabels reachable from this label.
* A label is added to the set if it refers to a FUN, THUNK, or RET,
and its CafInfo /= NoCafRefs.
(NB. all CafInfo for Ids in the current module should be initialised to
MayHaveCafRefs)
* The result is CAFEnv = LabelMap CAFSet
(Why *before* proc-point splitting? Because the analysis needs to propagate
information across branches, and proc-point splitting turns branches into
CmmCalls to top-level CmmDecls. The analysis would fail to find all the
references to CAFFY labels if we did it after proc-point splitting.)
For static data, cafAnalData simply returns set of all labels that refer to a
FUN, THUNK, and RET whose CafInfos /= NoCafRefs.
4. The result of cpsTop is (CAFEnv, [CmmDecl]) for procs and (CAFSet, CmmDecl)
for static data. So after `mapM cpsTop decls` we have
[Either (CAFEnv, [CmmDecl]) (CAFSet, CmmDecl)]
5. For procs concat the decls and union the CAFEnvs to get (CAFEnv, [CmmDecl])
6. For static data generate a Map CLabel CAFSet (maps static data to their CAFSets)
7. Dependency-analyse the decls using CAFEnv and CAFSets, giving us SCC CAFLabel
8. For each SCC in dependency order
- Let lbls :: [CAFLabel] be the non-recursive labels in this SCC
- Apply CAFEnv to each label and concat the result :: [CAFLabel]
- For each CAFLabel in the set apply srtMap (and ignore Nothing) to get
srt :: [SRTEntry]
- Make a label for this SRT, call it l
- If the SRT is not empty (i.e. the group is CAFFY) add FUN_STATICs in the
group to the SRT (see Note [Invalid optimisation: shortcutting])
- Add to srtMap: lbls -> if null srt then Nothing else Just l
9. At the end, for every top-level binding x, if srtMap x == Nothing, then the
binding is non-CAFFY, otherwise it is CAFFY.
Optimisations
^^^^^^^^^^^^^
To reduce the code size overhead and the cost of traversing SRTs in
the GC, we want to simplify SRTs where possible. We therefore apply
the following optimisations. Each has a [keyword]; search for the
keyword in the code below to see where the optimisation is
implemented.
1. [Inline] we never create an SRT with a single entry, instead we
point to the single entry directly from the info table.
i.e. instead of
+------+
| info |
| | +-----+---+---+
| -------->|SRT_1| | | 0 |
|------| +-----+-|-+---+
| | |
| code | |
| | v
C
we can point directly to the closure:
+------+
| info |
| |
| -------->C
|------|
| |
| code |
| |
Furthermore, the SRT for any code that refers to this info table
can point directly to C.
The exception to this is when we're doing dynamic linking. In that
case, if the closure is not locally defined then we can't point to
it directly from the info table, because this is the text section
which cannot contain runtime relocations. In this case we skip this
optimisation and generate the singleton SRT, because SRTs are in the
data section and *can* have relocatable references.
2. [FUN] A static function closure can also be an SRT, we simply put
the SRT entries as fields in the static closure. This makes a lot
of sense: the static references are just like the free variables of
the FUN closure.
i.e. instead of
f_closure:
+-----+---+
| | | 0 |
+- |--+---+
| +------+
| | info | f_srt:
| | | +-----+---+---+---+
| | -------->|SRT_2| | | | + 0 |
`----------->|------| +-----+-|-+-|-+---+
| | | |
| code | | |
| | v v
We can generate:
f_closure:
+-----+---+---+---+
| | | | | | | 0 |
+- |--+-|-+-|-+---+
| | | +------+
| v v | info |
| | |
| | 0 |
`----------->|------|
| |
| code |
| |
(note: we can't do this for THUNKs, because the thunk gets
overwritten when it is entered, so we wouldn't be able to share
this SRT with other info tables that want to refer to it (see
[Common] below). FUNs are immutable so don't have this problem.)
3. [Common] Identical SRTs can be commoned up.
4. [Filter] If an SRT A refers to an SRT B and a closure C, and B also
refers to C (perhaps transitively), then we can omit the reference
to C from A.
Note that there are many other optimisations that we could do, but
aren't implemented. In general, we could omit any reference from an
SRT if everything reachable from it is also reachable from the other
fields in the SRT. Our [Filter] optimisation is a special case of
this.
Another opportunity we don't exploit is this:
A = {X,Y,Z}
B = {Y,Z}
C = {X,B}
Here we could use C = {A} and therefore [Inline] C = A.
-}
-- ---------------------------------------------------------------------
{- Note [Invalid optimisation: shortcutting]
You might think that if we have something like
A's SRT = {B}
B's SRT = {X}
that we could replace the reference to B in A's SRT with X.
A's SRT = {X}
B's SRT = {X}
and thereby perhaps save a little work at runtime, because we don't
have to visit B.
But this is NOT valid.
Consider these cases:
0. B can't be a constructor, because constructors don't have SRTs
1. B is a CAF. This is the easy one. Obviously we want A's SRT to
point to B, so that it keeps B alive.
2. B is a function. This is the tricky one. The reason we can't
shortcut in this case is that we aren't allowed to resurrect static
objects.
== How does this cause a problem? ==
The particular case that cropped up when we tried this was #15544.
- A is a thunk
- B is a static function
- X is a CAF
- suppose we GC when A is alive, and B is not otherwise reachable.
- B is "collected", meaning that it doesn't make it onto the static
objects list during this GC, but nothing bad happens yet.
- Next, suppose we enter A, and then call B. (remember that A refers to B)
At the entry point to B, we GC. This puts B on the stack, as part of the
RET_FUN stack frame that gets pushed when we GC at a function entry point.
- This GC will now reach B
- But because B was previous "collected", it breaks the assumption
that static objects are never resurrected. See Note [STATIC_LINK
fields] in rts/sm/Storage.h for why this is bad.
- In practice, the GC thinks that B has already been visited, and so
doesn't visit X, and catastrophe ensues.
== Isn't this caused by the RET_FUN business? ==
Maybe, but could you prove that RET_FUN is the only way that
resurrection can occur?
So, no shortcutting.
-}
-- ---------------------------------------------------------------------
-- Label types
-- Labels that come from cafAnal can be:
-- - _closure labels for static functions or CAFs
-- - _info labels for dynamic functions, thunks, or continuations
-- - _entry labels for functions or thunks
--
-- Meanwhile the labels on top-level blocks are _entry labels.
--
-- To put everything in the same namespace we convert all labels to
-- closure labels using toClosureLbl. Note that some of these
-- labels will not actually exist; that's ok because we're going to
-- map them to SRTEntry later, which ranges over labels that do exist.
--
newtype CAFLabel = CAFLabel CLabel
deriving (Eq,Ord,Outputable)
type CAFSet = Set CAFLabel
type CAFEnv = LabelMap CAFSet
mkCAFLabel :: CLabel -> CAFLabel
mkCAFLabel lbl = CAFLabel $! toClosureLbl lbl
-- This is a label that we can put in an SRT. It *must* be a closure label,
-- pointing to either a FUN_STATIC, THUNK_STATIC, or CONSTR.
newtype SRTEntry = SRTEntry CLabel
deriving (Eq, Ord, Outputable)
-- ---------------------------------------------------------------------
-- CAF analysis
addCafLabel :: CLabel -> CAFSet -> CAFSet
addCafLabel l s
| Just _ <- hasHaskellName l
, let caf_label = mkCAFLabel l
-- For imported Ids hasCAF will have accurate CafInfo
-- Locals are initialized as CAFFY. We turn labels with empty SRTs into
-- non-CAFFYs in doSRTs
, hasCAF l
= Set.insert caf_label s
| otherwise
= s
cafAnalData
:: CmmStatics
-> CAFSet
cafAnalData (CmmStaticsRaw _lbl _data) =
Set.empty
cafAnalData (CmmStatics _lbl _itbl _ccs payload) =
foldl' analyzeStatic Set.empty payload
where
analyzeStatic s lit =
case lit of
CmmLabel c -> addCafLabel c s
CmmLabelOff c _ -> addCafLabel c s
CmmLabelDiffOff c1 c2 _ _ -> addCafLabel c1 $! addCafLabel c2 s
_ -> s
-- |
-- For each code block:
-- - collect the references reachable from this code block to FUN,
-- THUNK or RET labels for which hasCAF == True
--
-- This gives us a `CAFEnv`: a mapping from code block to sets of labels
--
cafAnal
:: LabelSet -- The blocks representing continuations, ie. those
-- that will get RET info tables. These labels will
-- get their own SRTs, so we don't aggregate CAFs from
-- references to these labels, we just use the label.
-> CLabel -- The top label of the proc
-> CmmGraph
-> CAFEnv
cafAnal contLbls topLbl cmmGraph =
analyzeCmmBwd cafLattice
(cafTransfers contLbls (g_entry cmmGraph) topLbl) cmmGraph mapEmpty
cafLattice :: DataflowLattice CAFSet
cafLattice = DataflowLattice Set.empty add
where
add (OldFact old) (NewFact new) =
let !new' = old `Set.union` new
in changedIf (Set.size new' > Set.size old) new'
cafTransfers :: LabelSet -> Label -> CLabel -> TransferFun CAFSet
cafTransfers contLbls entry topLbl
block@(BlockCC eNode middle xNode) fBase =
let joined :: CAFSet
joined = cafsInNode xNode $! live'
result :: CAFSet
!result = foldNodesBwdOO cafsInNode middle joined
facts :: [Set CAFLabel]
facts = mapMaybe successorFact (successors xNode)
live' :: CAFSet
live' = joinFacts cafLattice facts
successorFact :: Label -> Maybe (Set CAFLabel)
successorFact s
-- If this is a loop back to the entry, we can refer to the
-- entry label.
| s == entry = Just (addCafLabel topLbl Set.empty)
-- If this is a continuation, we want to refer to the
-- SRT for the continuation's info table
| s `setMember` contLbls
= Just (Set.singleton (mkCAFLabel (infoTblLbl s)))
-- Otherwise, takes the CAF references from the destination
| otherwise
= lookupFact s fBase
cafsInNode :: CmmNode e x -> CAFSet -> CAFSet
cafsInNode node set = foldExpDeep addCafExpr node set
addCafExpr :: CmmExpr -> Set CAFLabel -> Set CAFLabel
addCafExpr expr !set =
case expr of
CmmLit (CmmLabel c) ->
addCafLabel c set
CmmLit (CmmLabelOff c _) ->
addCafLabel c set
CmmLit (CmmLabelDiffOff c1 c2 _ _) ->
addCafLabel c1 $! addCafLabel c2 set
_ ->
set
in
srtTrace "cafTransfers" (text "block:" <+> ppr block $$
text "contLbls:" <+> ppr contLbls $$
text "entry:" <+> ppr entry $$
text "topLbl:" <+> ppr topLbl $$
text "cafs in exit:" <+> ppr joined $$
text "result:" <+> ppr result) $
mapSingleton (entryLabel eNode) result
-- -----------------------------------------------------------------------------
-- ModuleSRTInfo
data ModuleSRTInfo = ModuleSRTInfo
{ thisModule :: Module
-- ^ Current module being compiled. Required for calling labelDynamic.
, dedupSRTs :: Map (Set SRTEntry) SRTEntry
-- ^ previous SRTs we've emitted, so we can de-duplicate.
-- Used to implement the [Common] optimisation.
, flatSRTs :: Map SRTEntry (Set SRTEntry)
-- ^ The reverse mapping, so that we can remove redundant
-- entries. e.g. if we have an SRT [a,b,c], and we know that b
-- points to [c,d], we can omit c and emit [a,b].
-- Used to implement the [Filter] optimisation.
, moduleSRTMap :: SRTMap
}
instance Outputable ModuleSRTInfo where
ppr ModuleSRTInfo{..} =
text "ModuleSRTInfo {" $$
(nest 4 $ text "dedupSRTs =" <+> ppr dedupSRTs $$
text "flatSRTs =" <+> ppr flatSRTs $$
text "moduleSRTMap =" <+> ppr moduleSRTMap) $$ char '}'
emptySRT :: Module -> ModuleSRTInfo
emptySRT mod =
ModuleSRTInfo
{ thisModule = mod
, dedupSRTs = Map.empty
, flatSRTs = Map.empty
, moduleSRTMap = Map.empty
}
-- -----------------------------------------------------------------------------
-- Constructing SRTs
{- Implementation notes
- In each CmmDecl there is a mapping info_tbls from Label -> CmmInfoTable
- The entry in info_tbls corresponding to g_entry is the closure info
table, the rest are continuations.
- Each entry in info_tbls possibly needs an SRT. We need to make a
label for each of these.
- We get the CAFSet for each entry from the CAFEnv
-}
data SomeLabel
= BlockLabel !Label
| DeclLabel CLabel
deriving (Eq, Ord)
instance Outputable SomeLabel where
ppr (BlockLabel l) = text "b:" <+> ppr l
ppr (DeclLabel l) = text "s:" <+> ppr l
getBlockLabel :: SomeLabel -> Maybe Label
getBlockLabel (BlockLabel l) = Just l
getBlockLabel (DeclLabel _) = Nothing
getBlockLabels :: [SomeLabel] -> [Label]
getBlockLabels = mapMaybe getBlockLabel
-- | Return a (Label,CLabel) pair for each labelled block of a CmmDecl,
-- where the label is
-- - the info label for a continuation or dynamic closure
-- - the closure label for a top-level function (not a CAF)
getLabelledBlocks :: CmmDecl -> [(SomeLabel, CAFLabel)]
getLabelledBlocks (CmmData _ (CmmStaticsRaw _ _)) =
[]
getLabelledBlocks (CmmData _ (CmmStatics lbl _ _ _)) =
[ (DeclLabel lbl, mkCAFLabel lbl) ]
getLabelledBlocks (CmmProc top_info _ _ _) =
[ (BlockLabel blockId, caf_lbl)
| (blockId, info) <- mapToList (info_tbls top_info)
, let rep = cit_rep info
, not (isStaticRep rep) || not (isThunkRep rep)
, let !caf_lbl = mkCAFLabel (cit_lbl info)
]
-- | Put the labelled blocks that we will be annotating with SRTs into
-- dependency order. This is so that we can process them one at a
-- time, resolving references to earlier blocks to point to their
-- SRTs. CAFs themselves are not included here; see getCAFs below.
depAnalSRTs
:: CAFEnv
-> Map CLabel CAFSet -- CAFEnv for statics
-> [CmmDecl]
-> [SCC (SomeLabel, CAFLabel, Set CAFLabel)]
depAnalSRTs cafEnv cafEnv_static decls =
srtTrace "depAnalSRTs" (text "decls:" <+> ppr decls $$
text "nodes:" <+> ppr (map node_payload nodes) $$
text "graph:" <+> ppr graph) graph
where
labelledBlocks :: [(SomeLabel, CAFLabel)]
labelledBlocks = concatMap getLabelledBlocks decls
labelToBlock :: Map CAFLabel SomeLabel
labelToBlock = foldl' (\m (v,k) -> Map.insert k v m) Map.empty labelledBlocks
nodes :: [Node SomeLabel (SomeLabel, CAFLabel, Set CAFLabel)]
nodes = [ DigraphNode (l,lbl,cafs') l
(mapMaybe (flip Map.lookup labelToBlock) (Set.toList cafs'))
| (l, lbl) <- labelledBlocks
, Just (cafs :: Set CAFLabel) <-
[case l of
BlockLabel l -> mapLookup l cafEnv
DeclLabel cl -> Map.lookup cl cafEnv_static]
, let cafs' = Set.delete lbl cafs
]
graph :: [SCC (SomeLabel, CAFLabel, Set CAFLabel)]
graph = stronglyConnCompFromEdgedVerticesOrd nodes
-- | Get (Label, CAFLabel, Set CAFLabel) for each block that represents a CAF.
-- These are treated differently from other labelled blocks:
-- - we never shortcut a reference to a CAF to the contents of its
-- SRT, since the point of SRTs is to keep CAFs alive.
-- - CAFs therefore don't take part in the dependency analysis in depAnalSRTs.
-- instead we generate their SRTs after everything else.
getCAFs :: CAFEnv -> [CmmDecl] -> [(Label, CAFLabel, Set CAFLabel)]
getCAFs cafEnv decls =
[ (g_entry g, mkCAFLabel topLbl, cafs)
| CmmProc top_info topLbl _ g <- decls
, Just info <- [mapLookup (g_entry g) (info_tbls top_info)]
, let rep = cit_rep info
, isStaticRep rep && isThunkRep rep
, Just cafs <- [mapLookup (g_entry g) cafEnv]
]
-- | Get the list of blocks that correspond to the entry points for
-- FUN_STATIC closures. These are the blocks for which if we have an
-- SRT we can merge it with the static closure. [FUN]
getStaticFuns :: [CmmDecl] -> [(BlockId, CLabel)]
getStaticFuns decls =
[ (g_entry g, lbl)
| CmmProc top_info _ _ g <- decls
, Just info <- [mapLookup (g_entry g) (info_tbls top_info)]
, Just (id, _) <- [cit_clo info]
, let rep = cit_rep info
, isStaticRep rep && isFunRep rep
, let !lbl = mkLocalClosureLabel (idName id) (idCafInfo id)
]
-- | Maps labels from 'cafAnal' to the final CLabel that will appear
-- in the SRT.
-- - closures with singleton SRTs resolve to their single entry
-- - closures with larger SRTs map to the label for that SRT
-- - CAFs must not map to anything!
-- - if a labels maps to Nothing, we found that this label's SRT
-- is empty, so we don't need to refer to it from other SRTs.
type SRTMap = Map CAFLabel (Maybe SRTEntry)
-- | Given SRTMap of a module returns the set of non-CAFFY names in the module.
-- Any Names not in the set are CAFFY.
srtMapNonCAFs :: SRTMap -> NameSet
srtMapNonCAFs srtMap = mkNameSet (mapMaybe get_name (Map.toList srtMap))
where
get_name (CAFLabel l, Nothing) = hasHaskellName l
get_name (_l, Just _srt_entry) = Nothing
-- | resolve a CAFLabel to its SRTEntry using the SRTMap
resolveCAF :: SRTMap -> CAFLabel -> Maybe SRTEntry
resolveCAF srtMap lbl@(CAFLabel l) =
srtTrace "resolveCAF" ("l:" <+> ppr l <+> "resolved:" <+> ppr ret) ret
where
ret = Map.findWithDefault (Just (SRTEntry (toClosureLbl l))) lbl srtMap
-- | Attach SRTs to all info tables in the CmmDecls, and add SRT
-- declarations to the ModuleSRTInfo.
--
doSRTs
:: DynFlags
-> ModuleSRTInfo
-> [(CAFEnv, [CmmDecl])]
-> [(CAFSet, CmmDecl)]
-> IO (ModuleSRTInfo, [CmmDeclSRTs])
doSRTs dflags moduleSRTInfo procs data_ = do
us <- mkSplitUniqSupply 'u'
-- Ignore the original grouping of decls, and combine all the
-- CAFEnvs into a single CAFEnv.
let static_data_env :: Map CLabel CAFSet
static_data_env =
Map.fromList $
flip map data_ $
\(set, decl) ->
case decl of
CmmProc{} ->
pprPanic "doSRTs" (text "Proc in static data list:" <+> ppr decl)
CmmData _ static ->
case static of
CmmStatics lbl _ _ _ -> (lbl, set)
CmmStaticsRaw lbl _ -> (lbl, set)
static_data :: Set CLabel
static_data = Map.keysSet static_data_env
(proc_envs, procss) = unzip procs
cafEnv = mapUnions proc_envs
decls = map snd data_ ++ concat procss
staticFuns = mapFromList (getStaticFuns decls)
-- Put the decls in dependency order. Why? So that we can implement
-- [Inline] and [Filter]. If we need to refer to an SRT that has
-- a single entry, we use the entry itself, which means that we
-- don't need to generate the singleton SRT in the first place. But
-- to do this we need to process blocks before things that depend on
-- them.
let
sccs :: [SCC (SomeLabel, CAFLabel, Set CAFLabel)]
sccs = {-# SCC depAnalSRTs #-} depAnalSRTs cafEnv static_data_env decls
cafsWithSRTs :: [(Label, CAFLabel, Set CAFLabel)]
cafsWithSRTs = getCAFs cafEnv decls
srtTraceM "doSRTs" (text "data:" <+> ppr data_ $$
text "procs:" <+> ppr procs $$
text "static_data_env:" <+> ppr static_data_env $$
text "sccs:" <+> ppr sccs $$
text "cafsWithSRTs:" <+> ppr cafsWithSRTs)
-- On each strongly-connected group of decls, construct the SRT
-- closures and the SRT fields for info tables.
let result ::
[ ( [CmmDeclSRTs] -- generated SRTs
, [(Label, CLabel)] -- SRT fields for info tables
, [(Label, [SRTEntry])] -- SRTs to attach to static functions
, Bool -- Whether the group has CAF references
) ]
(result, moduleSRTInfo') =
initUs_ us $
flip runStateT moduleSRTInfo $ do
nonCAFs <- mapM (doSCC dflags staticFuns static_data) sccs
cAFs <- forM cafsWithSRTs $ \(l, cafLbl, cafs) ->
oneSRT dflags staticFuns [BlockLabel l] [cafLbl]
True{-is a CAF-} cafs static_data
return (nonCAFs ++ cAFs)
(srt_declss, pairs, funSRTs, has_caf_refs) = unzip4 result
srt_decls = concat srt_declss
unless (null srt_decls) $
dumpIfSet_dyn dflags Opt_D_dump_srts "SRTs" FormatCMM (ppr srt_decls)
-- Next, update the info tables with the SRTs
let
srtFieldMap = mapFromList (concat pairs)
funSRTMap = mapFromList (concat funSRTs)
has_caf_refs' = or has_caf_refs
decls' =
concatMap (updInfoSRTs dflags srtFieldMap funSRTMap has_caf_refs') decls
-- Finally update CafInfos for raw static literals (CmmStaticsRaw). Those are
-- not analysed in oneSRT so we never add entries for them to the SRTMap.
let srtMap_w_raws =
foldl' (\(srtMap :: SRTMap) (_, decl) ->
case decl of
CmmData _ CmmStatics{} ->
-- already updated by oneSRT
srtMap
CmmData _ (CmmStaticsRaw lbl _)
| isIdLabel lbl ->
-- not analysed by oneSRT, declare it non-CAFFY here
Map.insert (mkCAFLabel lbl) Nothing srtMap
| otherwise ->
-- Not an IdLabel, ignore
srtMap
CmmProc{} ->
pprPanic "doSRTs" (text "Found Proc in static data list:" <+> ppr decl))
(moduleSRTMap moduleSRTInfo') data_
return (moduleSRTInfo'{ moduleSRTMap = srtMap_w_raws }, srt_decls ++ decls')
-- | Build the SRT for a strongly-connected component of blocks
doSCC
:: DynFlags
-> LabelMap CLabel -- which blocks are static function entry points
-> Set CLabel -- static data
-> SCC (SomeLabel, CAFLabel, Set CAFLabel)
-> StateT ModuleSRTInfo UniqSM
( [CmmDeclSRTs] -- generated SRTs
, [(Label, CLabel)] -- SRT fields for info tables
, [(Label, [SRTEntry])] -- SRTs to attach to static functions
, Bool -- Whether the group has CAF references
)
doSCC dflags staticFuns static_data (AcyclicSCC (l, cafLbl, cafs)) =
oneSRT dflags staticFuns [l] [cafLbl] False cafs static_data
doSCC dflags staticFuns static_data (CyclicSCC nodes) = do
-- build a single SRT for the whole cycle, see Note [recursive SRTs]
let (lbls, caf_lbls, cafsets) = unzip3 nodes
cafs = Set.unions cafsets
oneSRT dflags staticFuns lbls caf_lbls False cafs static_data
{- Note [recursive SRTs]
If the dependency analyser has found us a recursive group of
declarations, then we build a single SRT for the whole group, on the
grounds that everything in the group is reachable from everything
else, so we lose nothing by having a single SRT.
However, there are a couple of wrinkles to be aware of.
* The Set CAFLabel for this SRT will contain labels in the group
itself. The SRTMap will therefore not contain entries for these labels
yet, so we can't turn them into SRTEntries using resolveCAF. BUT we
can just remove recursive references from the Set CAFLabel before
generating the SRT - the SRT will still contain all the CAFLabels that
we need to refer to from this group's SRT.
* That is, EXCEPT for static function closures. For the same reason
described in Note [Invalid optimisation: shortcutting], we cannot omit
references to static function closures.
- But, since we will merge the SRT with one of the static function
closures (see [FUN]), we can omit references to *that* static
function closure from the SRT.
-}
-- | Build an SRT for a set of blocks
oneSRT
:: DynFlags
-> LabelMap CLabel -- which blocks are static function entry points
-> [SomeLabel] -- blocks in this set
-> [CAFLabel] -- labels for those blocks
-> Bool -- True <=> this SRT is for a CAF
-> Set CAFLabel -- SRT for this set
-> Set CLabel -- Static data labels in this group
-> StateT ModuleSRTInfo UniqSM
( [CmmDeclSRTs] -- SRT objects we built
, [(Label, CLabel)] -- SRT fields for these blocks' itbls
, [(Label, [SRTEntry])] -- SRTs to attach to static functions
, Bool -- Whether the group has CAF references
)
oneSRT dflags staticFuns lbls caf_lbls isCAF cafs static_data = do
topSRT <- get
let
srtMap = moduleSRTMap topSRT
blockids = getBlockLabels lbls
-- Can we merge this SRT with a FUN_STATIC closure?
maybeFunClosure :: Maybe (CLabel, Label)
otherFunLabels :: [CLabel]
(maybeFunClosure, otherFunLabels) =
case [ (l,b) | b <- blockids, Just l <- [mapLookup b staticFuns] ] of
[] -> (Nothing, [])
((l,b):xs) -> (Just (l,b), map fst xs)
-- Remove recursive references from the SRT
nonRec :: Set CAFLabel
nonRec = cafs `Set.difference` Set.fromList caf_lbls
-- Resolve references to their SRT entries
resolved :: [SRTEntry]
resolved = mapMaybe (resolveCAF srtMap) (Set.toList nonRec)
-- The set of all SRTEntries in SRTs that we refer to from here.
allBelow =
Set.unions [ lbls | caf <- resolved
, Just lbls <- [Map.lookup caf (flatSRTs topSRT)] ]
-- Remove SRTEntries that are also in an SRT that we refer to.
-- Implements the [Filter] optimisation.
filtered0 = Set.fromList resolved `Set.difference` allBelow
srtTraceM "oneSRT:"
(text "srtMap:" <+> ppr srtMap $$
text "nonRec:" <+> ppr nonRec $$
text "lbls:" <+> ppr lbls $$
text "caf_lbls:" <+> ppr caf_lbls $$
text "static_data:" <+> ppr static_data $$
text "cafs:" <+> ppr cafs $$
text "blockids:" <+> ppr blockids $$
text "maybeFunClosure:" <+> ppr maybeFunClosure $$
text "otherFunLabels:" <+> ppr otherFunLabels $$
text "resolved:" <+> ppr resolved $$
text "allBelow:" <+> ppr allBelow $$
text "filtered0:" <+> ppr filtered0)
let
isStaticFun = isJust maybeFunClosure
-- For a label without a closure (e.g. a continuation), we must
-- update the SRTMap for the label to point to a closure. It's
-- important that we don't do this for static functions or CAFs,
-- see Note [Invalid optimisation: shortcutting].
updateSRTMap :: Maybe SRTEntry -> StateT ModuleSRTInfo UniqSM ()
updateSRTMap srtEntry =
srtTrace "updateSRTMap"
(ppr srtEntry <+> "isCAF:" <+> ppr isCAF <+>
"isStaticFun:" <+> ppr isStaticFun) $
when (not isCAF && (not isStaticFun || isNothing srtEntry)) $
modify' $ \state ->
let !srt_map =
foldl' (\srt_map cafLbl@(CAFLabel clbl) ->
-- Only map static data to Nothing (== not CAFFY). For CAFFY
-- statics we refer to the static itself instead of a SRT.
if not (Set.member clbl static_data) || isNothing srtEntry then
Map.insert cafLbl srtEntry srt_map
else
srt_map)
(moduleSRTMap state)
caf_lbls
in
state{ moduleSRTMap = srt_map }
this_mod = thisModule topSRT
allStaticData =
all (\(CAFLabel clbl) -> Set.member clbl static_data) caf_lbls
if Set.null filtered0 then do
srtTraceM "oneSRT: empty" (ppr caf_lbls)
updateSRTMap Nothing
return ([], [], [], False)
else do
-- We're going to build an SRT for this group, which should include function
-- references in the group. See Note [recursive SRTs].
let allBelow_funs =
Set.fromList (map (SRTEntry . toClosureLbl) otherFunLabels)
let filtered = filtered0 `Set.union` allBelow_funs
srtTraceM "oneSRT" (text "filtered:" <+> ppr filtered $$
text "allBelow_funs:" <+> ppr allBelow_funs)
case Set.toList filtered of
[] -> pprPanic "oneSRT" empty -- unreachable
-- [Inline] - when we have only one entry there is no need to
-- build an SRT object at all, instead we put the singleton SRT
-- entry in the info table.
[one@(SRTEntry lbl)]
| -- Info tables refer to SRTs by offset (as noted in the section
-- "Referring to an SRT from the info table" of Note [SRTs]). However,
-- when dynamic linking is used we cannot guarantee that the offset
-- between the SRT and the info table will fit in the offset field.
-- Consequently we build a singleton SRT in in this case.
not (labelDynamic dflags this_mod lbl)
-- MachO relocations can't express offsets between compilation units at
-- all, so we are always forced to build a singleton SRT in this case.
&& (not (osMachOTarget $ platformOS $ targetPlatform dflags)
|| isLocalCLabel this_mod lbl) -> do
-- If we have a static function closure, then it becomes the
-- SRT object, and everything else points to it. (the only way
-- we could have multiple labels here is if this is a
-- recursive group, see Note [recursive SRTs])
case maybeFunClosure of
Just (staticFunLbl,staticFunBlock) ->
return ([], withLabels, [], True)
where
withLabels =
[ (b, if b == staticFunBlock then lbl else staticFunLbl)
| b <- blockids ]
Nothing -> do
srtTraceM "oneSRT: one" (text "caf_lbls:" <+> ppr caf_lbls $$
text "one:" <+> ppr one)
updateSRTMap (Just one)
return ([], map (,lbl) blockids, [], True)
cafList | allStaticData ->
return ([], [], [], not (null cafList))
cafList ->
-- Check whether an SRT with the same entries has been emitted already.
-- Implements the [Common] optimisation.
case Map.lookup filtered (dedupSRTs topSRT) of
Just srtEntry@(SRTEntry srtLbl) -> do
srtTraceM "oneSRT [Common]" (ppr caf_lbls <+> ppr srtLbl)
updateSRTMap (Just srtEntry)
return ([], map (,srtLbl) blockids, [], True)
Nothing -> do
-- No duplicates: we have to build a new SRT object
(decls, funSRTs, srtEntry) <-
case maybeFunClosure of
Just (fun,block) ->
return ( [], [(block, cafList)], SRTEntry fun )
Nothing -> do
(decls, entry) <- lift $ buildSRTChain dflags cafList
return (decls, [], entry)
updateSRTMap (Just srtEntry)
let allBelowThis = Set.union allBelow filtered
newFlatSRTs = Map.insert srtEntry allBelowThis (flatSRTs topSRT)
-- When all definition in this group are static data we don't
-- generate any SRTs.
newDedupSRTs = Map.insert filtered srtEntry (dedupSRTs topSRT)
modify' (\state -> state{ dedupSRTs = newDedupSRTs,
flatSRTs = newFlatSRTs })
srtTraceM "oneSRT: new" (text "caf_lbls:" <+> ppr caf_lbls $$
text "filtered:" <+> ppr filtered $$
text "srtEntry:" <+> ppr srtEntry $$
text "newDedupSRTs:" <+> ppr newDedupSRTs $$
text "newFlatSRTs:" <+> ppr newFlatSRTs)
let SRTEntry lbl = srtEntry
return (decls, map (,lbl) blockids, funSRTs, True)
-- | build a static SRT object (or a chain of objects) from a list of
-- SRTEntries.
buildSRTChain
:: DynFlags
-> [SRTEntry]
-> UniqSM
( [CmmDeclSRTs] -- The SRT object(s)
, SRTEntry -- label to use in the info table
)
buildSRTChain _ [] = panic "buildSRT: empty"
buildSRTChain dflags cafSet =
case splitAt mAX_SRT_SIZE cafSet of
(these, []) -> do
(decl,lbl) <- buildSRT dflags these
return ([decl], lbl)
(these,those) -> do
(rest, rest_lbl) <- buildSRTChain dflags (head these : those)
(decl,lbl) <- buildSRT dflags (rest_lbl : tail these)
return (decl:rest, lbl)
where
mAX_SRT_SIZE = 16
buildSRT :: DynFlags -> [SRTEntry] -> UniqSM (CmmDeclSRTs, SRTEntry)
buildSRT dflags refs = do
id <- getUniqueM
let
lbl = mkSRTLabel id
srt_n_info = mkSRTInfoLabel (length refs)
fields =
mkStaticClosure dflags srt_n_info dontCareCCS
[ CmmLabel lbl | SRTEntry lbl <- refs ]
[] -- no padding
[mkIntCLit dflags 0] -- link field
[] -- no saved info
return (mkDataLits (Section Data lbl) lbl fields, SRTEntry lbl)
-- | Update info tables with references to their SRTs. Also generate
-- static closures, splicing in SRT fields as necessary.
updInfoSRTs
:: DynFlags
-> LabelMap CLabel -- SRT labels for each block
-> LabelMap [SRTEntry] -- SRTs to merge into FUN_STATIC closures
-> Bool -- Whether the CmmDecl's group has CAF references
-> CmmDecl
-> [CmmDeclSRTs]
updInfoSRTs _ _ _ _ (CmmData s (CmmStaticsRaw lbl statics))
= [CmmData s (RawCmmStatics lbl statics)]
updInfoSRTs dflags _ _ caffy (CmmData s (CmmStatics lbl itbl ccs payload))
= [CmmData s (RawCmmStatics lbl (map CmmStaticLit field_lits))]
where
caf_info = if caffy then MayHaveCafRefs else NoCafRefs
field_lits = mkStaticClosureFields dflags itbl ccs caf_info payload
updInfoSRTs dflags srt_env funSRTEnv caffy (CmmProc top_info top_l live g)
| Just (_,closure) <- maybeStaticClosure = [ proc, closure ]
| otherwise = [ proc ]
where
caf_info = if caffy then MayHaveCafRefs else NoCafRefs
proc = CmmProc top_info { info_tbls = newTopInfo } top_l live g
newTopInfo = mapMapWithKey updInfoTbl (info_tbls top_info)
updInfoTbl l info_tbl
| l == g_entry g, Just (inf, _) <- maybeStaticClosure = inf
| otherwise = info_tbl { cit_srt = mapLookup l srt_env }
-- Generate static closures [FUN]. Note that this also generates
-- static closures for thunks (CAFs), because it's easier to treat
-- them uniformly in the code generator.
maybeStaticClosure :: Maybe (CmmInfoTable, CmmDeclSRTs)
maybeStaticClosure
| Just info_tbl@CmmInfoTable{..} <-
mapLookup (g_entry g) (info_tbls top_info)
, Just (id, ccs) <- cit_clo
, isStaticRep cit_rep =
let
(newInfo, srtEntries) = case mapLookup (g_entry g) funSRTEnv of
Nothing ->
-- if we don't add SRT entries to this closure, then we
-- want to set the srt field in its info table as usual
(info_tbl { cit_srt = mapLookup (g_entry g) srt_env }, [])
Just srtEntries -> srtTrace "maybeStaticFun" (ppr res)
(info_tbl { cit_rep = new_rep }, res)
where res = [ CmmLabel lbl | SRTEntry lbl <- srtEntries ]
fields = mkStaticClosureFields dflags info_tbl ccs caf_info srtEntries
new_rep = case cit_rep of
HeapRep sta ptrs nptrs ty ->
HeapRep sta (ptrs + length srtEntries) nptrs ty
_other -> panic "maybeStaticFun"
lbl = mkLocalClosureLabel (idName id) caf_info
in
Just (newInfo, mkDataLits (Section Data lbl) lbl fields)
| otherwise = Nothing
srtTrace :: String -> SDoc -> b -> b
-- srtTrace = pprTrace
srtTrace _ _ b = b
srtTraceM :: Applicative f => String -> SDoc -> f ()
srtTraceM str doc = srtTrace str doc (pure ())
|
sdiehl/ghc
|
compiler/GHC/Cmm/Info/Build.hs
|
bsd-3-clause
| 42,586
| 0
| 32
| 11,754
| 6,638
| 3,450
| 3,188
| -1
| -1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module HipChat.Types.User where
import Data.Aeson
import Data.Aeson.Casing
import Data.Text (Text)
import GHC.Generics
data User = User
{ userXmppJid :: Text
, userIsDeleted :: Bool
, userName :: Text
, userLastActive :: Text
, userEmail :: Maybe Text
} deriving (Generic, Show)
instance FromJSON User where
parseJSON = genericParseJSON $ aesonPrefix snakeCase
|
oswynb/hipchat-hs
|
lib/HipChat/Types/User.hs
|
bsd-3-clause
| 512
| 0
| 9
| 144
| 108
| 65
| 43
| 16
| 0
|
module Test.Lex where
import qualified Lexer
import Types.Token (Lexeme(..), Token(..))
import Test.Tasty
import Test.Tasty.HUnit
lexerSpec :: TestTree
lexerSpec = testGroup "Lexer"
[
testCase "detecting identifiers" $
testLexer "foo bar baz" [Identifier "foo", Identifier "bar", Identifier "baz"]
,
testCase "detecting operators" $
testLexer "foo -> bar" [Identifier "foo", Operator "->", Identifier "bar"]
,
testCase "detecting keywords" $
testLexer "func -> bar" [Keyword "func", Operator "->", Identifier "bar"]
]
testLexer :: String -> [Lexeme] -> Assertion
testLexer input expected = fmap (map _lexeme) (Lexer.lex "" input) @?= Right expected
|
letsbreelhere/egg
|
test/Test/Lex.hs
|
bsd-3-clause
| 696
| 0
| 10
| 134
| 210
| 111
| 99
| 16
| 1
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Volume.ZH.Tests
( tests ) where
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Volume.ZH.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ZH Tests"
[ makeCorpusTest [Seal Volume] corpus
]
|
facebookincubator/duckling
|
tests/Duckling/Volume/ZH/Tests.hs
|
bsd-3-clause
| 582
| 0
| 9
| 93
| 77
| 49
| 28
| 10
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, ScopedTypeVariables, NamedFieldPuns #-}
{-# LANGUAGE ViewPatterns, RecordWildCards, FlexibleInstances, TypeFamilies, ConstraintKinds #-}
module Development.Shake.Internal.Rules.File(
need, needHasChanged, needBS, needed, neededBS, want,
trackRead, trackWrite, trackAllow, produces,
defaultRuleFile,
(%>), (|%>), (?>), phony, (~>), phonys,
resultHasChanged,
-- * Internal only
FileQ(..), FileA(..), fileStoredValue, fileEqualValue, EqualCost(..), fileForward
) where
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.Typeable
import Data.List
import Data.Maybe
import qualified Data.ByteString.Char8 as BS
import qualified Data.HashSet as Set
import Foreign.Storable
import Data.Word
import Data.Monoid
import General.Binary
import General.Extra
import Development.Shake.Internal.Core.Types
import Development.Shake.Internal.Core.Rules
import Development.Shake.Internal.Core.Build
import Development.Shake.Internal.Core.Action
import Development.Shake.Internal.FileName
import Development.Shake.Internal.Rules.Rerun
import Development.Shake.Classes
import Development.Shake.FilePath(toStandard)
import Development.Shake.Internal.FilePattern
import Development.Shake.Internal.FileInfo
import Development.Shake.Internal.Options
import Development.Shake.Internal.Errors
import System.FilePath(takeDirectory) -- important that this is the system local filepath, or wrong slashes go wrong
import System.IO.Unsafe(unsafeInterleaveIO)
import Prelude
infix 1 %>, ?>, |%>, ~>
---------------------------------------------------------------------
-- TYPES
type instance RuleResult FileQ = FileR
-- | The unique key we use to index File rules, to avoid name clashes.
newtype FileQ = FileQ {fromFileQ :: FileName}
deriving (Typeable,Eq,Hashable,Binary,BinaryEx,NFData)
-- | Raw information about a file.
data FileA = FileA {-# UNPACK #-} !ModTime {-# UNPACK #-} !FileSize FileHash
deriving (Typeable)
-- | Result of a File rule, may contain raw file information and whether the rule did run this build
data FileR = FileR { answer :: !(Maybe FileA) -- ^ Raw information about the file built by this rule.
-- Set to 'Nothing' for 'phony' files.
, useLint :: !Bool -- ^ Should we lint the resulting file
}
deriving (Typeable)
-- | The types of file rule that occur.
data Mode
= ModePhony (Action ()) -- ^ An action with no file value
| ModeDirect (Action ()) -- ^ An action that produces this file
| ModeForward (Action (Maybe FileA)) -- ^ An action that looks up a file someone else produced
-- | The results of the various 'Mode' rules.
data Answer
= AnswerPhony
| AnswerDirect Ver FileA
| AnswerForward Ver FileA
-- | The file rules we use, first is the name (as pretty as you can get).
data FileRule = FileRule String (FilePath -> Maybe Mode)
deriving Typeable
---------------------------------------------------------------------
-- INSTANCES
instance Show FileQ where show (FileQ x) = fileNameToString x
instance BinaryEx [FileQ] where
putEx = putEx . map fromFileQ
getEx = map FileQ . getEx
instance NFData FileA where
rnf (FileA a b c) = rnf a `seq` rnf b `seq` rnf c
instance NFData FileR where
rnf (FileR a b) = rnf a `seq` rnf b
instance Show FileA where
show (FileA m s h) = "File {mod=" ++ show m ++ ",size=" ++ show s ++ ",digest=" ++ show h ++ "}"
instance Show FileR where
show FileR{..} = show answer
instance Storable FileA where
sizeOf _ = 4 * 3 -- 4 Word32's
alignment _ = alignment (undefined :: ModTime)
peekByteOff p i = FileA <$> peekByteOff p i <*> peekByteOff p (i+4) <*> peekByteOff p (i+8)
pokeByteOff p i (FileA a b c) = pokeByteOff p i a >> pokeByteOff p (i+4) b >> pokeByteOff p (i+8) c
instance BinaryEx FileA where
putEx = putExStorable
getEx = getExStorable
instance BinaryEx [FileA] where
putEx = putExStorableList
getEx = getExStorableList
fromAnswer :: Answer -> Maybe FileA
fromAnswer AnswerPhony = Nothing
fromAnswer (AnswerDirect _ x) = Just x
fromAnswer (AnswerForward _ x) = Just x
instance BinaryEx Answer where
putEx AnswerPhony = mempty
putEx (AnswerDirect ver x) = putExStorable ver <> putEx x
putEx (AnswerForward ver x) = putEx (0 :: Word8) <> putExStorable ver <> putEx x
getEx x = case BS.length x of
0 -> AnswerPhony
i -> if i == sz then f AnswerDirect x else f AnswerForward $ BS.tail x
where
sz = sizeOf (undefined :: Ver) + sizeOf (undefined :: FileA)
f ctor x = let (a,b) = binarySplit x in ctor a $ getEx b
---------------------------------------------------------------------
-- FILE CHECK QUERIES
-- | An equality check and a cost.
data EqualCost
= EqualCheap -- ^ The equality check was cheap.
| EqualExpensive -- ^ The equality check was expensive, as the results are not trivially equal.
| NotEqual -- ^ The values are not equal.
deriving (Eq,Ord,Show,Read,Typeable,Enum,Bounded)
fileStoredValue :: ShakeOptions -> FileQ -> IO (Maybe FileA)
fileStoredValue ShakeOptions{shakeChange=c, shakeNeedDirectory=allowDir} (FileQ x) = do
res <- getFileInfo allowDir x
case res of
Nothing -> pure Nothing
Just (time,size) | c == ChangeModtime -> pure $ Just $ FileA time size noFileHash
Just (time,size) -> do
hash <- unsafeInterleaveIO $ getFileHash x
pure $ Just $ FileA time size hash
fileEqualValue :: ShakeOptions -> FileA -> FileA -> EqualCost
fileEqualValue ShakeOptions{shakeChange=c} (FileA x1 x2 x3) (FileA y1 y2 y3) = case c of
ChangeModtime -> bool $ x1 == y1
ChangeDigest -> bool $ x2 == y2 && x3 == y3
ChangeModtimeOrDigest -> bool $ x1 == y1 && x2 == y2 && x3 == y3
_ | x1 == y1 -> EqualCheap
| x2 == y2 && x3 == y3 -> EqualExpensive
| otherwise -> NotEqual
where bool b = if b then EqualCheap else NotEqual
-- | Arguments: options; is the file an input; a message for failure if the file does not exist; filename
storedValueError :: ShakeOptions -> Bool -> String -> FileQ -> IO (Maybe FileA)
{-
storedValueError opts False msg x | False && not (shakeOutputCheck opts) = do
when (shakeCreationCheck opts) $ do
whenM (isNothing <$> (storedValue opts x :: IO (Maybe FileA))) $ error $ msg ++ "\n " ++ unpackU (fromFileQ x)
pure $ FileA fileInfoEq fileInfoEq fileInfoEq
-}
storedValueError opts input msg x = maybe def Just <$> fileStoredValue opts2 x
where def = if shakeCreationCheck opts || input then error err else Nothing
err = msg ++ "\n " ++ fileNameToString (fromFileQ x)
opts2 = if not input && shakeChange opts == ChangeModtimeAndDigestInput then opts{shakeChange=ChangeModtime} else opts
---------------------------------------------------------------------
-- THE DEFAULT RULE
defaultRuleFile :: Rules ()
defaultRuleFile = do
opts@ShakeOptions{..} <- getShakeOptionsRules
-- A rule from FileQ to (Maybe FileA). The result value is only useful for linting.
addBuiltinRuleEx (ruleLint opts) (ruleIdentity opts) (ruleRun opts $ shakeRebuildApply opts)
ruleLint :: ShakeOptions -> BuiltinLint FileQ FileR
ruleLint opts k (FileR (Just v) True) = do
now <- fileStoredValue opts k
pure $ case now of
Nothing -> Just "<missing>"
Just now | fileEqualValue opts v now == EqualCheap -> Nothing
| otherwise -> Just $ show now
ruleLint _ _ _ = pure Nothing
ruleIdentity :: ShakeOptions -> BuiltinIdentity FileQ FileR
ruleIdentity opts | shakeChange opts == ChangeModtime = throwImpure errorNoHash
ruleIdentity _ = \k v -> case answer v of
Just (FileA _ size hash) -> Just $ runBuilder $ putExStorable size <> putExStorable hash
Nothing -> Nothing
ruleRun :: ShakeOptions -> (FilePath -> Rebuild) -> BuiltinRun FileQ FileR
ruleRun opts@ShakeOptions{..} rebuildFlags o@(FileQ (fileNameToString -> xStr)) oldBin@(fmap getEx -> old :: Maybe Answer) mode = do
-- for One, rebuild makes perfect sense
-- for Forward, we expect the child will have already rebuilt - Rebuild just lets us deal with code changes
-- for Phony, it doesn't make that much sense, but probably isn't harmful?
let r = rebuildFlags xStr
(ruleVer, ruleAct, ruleErr) <- getUserRuleInternal o (\(FileRule s _) -> Just s) $ \(FileRule _ f) -> f xStr
let verEq v = Just v == ruleVer || case ruleAct of [] -> v == Ver 0; [(v2,_)] -> v == Ver v2; _ -> False
let rebuild = do
putWhen Verbose $ "# " ++ show o
case ruleAct of
[] -> rebuildWith Nothing
[x] -> rebuildWith $ Just x
_ -> throwM ruleErr
case old of
_ | r == RebuildNow -> rebuild
_ | r == RebuildLater -> case old of
Just _ ->
-- ignoring the currently stored value, which may trigger lint has changed
-- so disable lint on this file
unLint <$> retOld ChangedNothing
Nothing -> do
-- i don't have a previous value, so assume this is a source node, and mark rebuild in future
now <- liftIO $ fileStoredValue opts o
case now of
Nothing -> rebuild
Just now -> do alwaysRerun; retNew ChangedStore $ AnswerDirect (Ver 0) now
{-
_ | r == RebuildNever -> do
now <- liftIO $ fileStoredValue opts o
case now of
Nothing -> rebuild
Just now -> do
let diff | Just (AnswerDirect old) <- old, fileEqualValue opts old now /= NotEqual = ChangedRecomputeSame
| otherwise = ChangedRecomputeDiff
retNew diff $ AnswerDirect now
-}
Just (AnswerDirect ver old) | mode == RunDependenciesSame, verEq ver -> do
now <- liftIO $ fileStoredValue opts o
let noHash (FileA _ _ x) = isNoFileHash x
case now of
Nothing -> rebuild
Just now -> case fileEqualValue opts old now of
NotEqual ->
rebuild
-- if our last build used no file hashing, but this build should, then we must refresh the hash
EqualCheap | if noHash old then shakeChange == ChangeModtimeAndDigestInput || noHash now else True ->
retOld ChangedNothing
_ ->
retNew ChangedStore $ AnswerDirect ver now
Just (AnswerForward ver _) | verEq ver, mode == RunDependenciesSame -> retOld ChangedNothing
_ -> rebuild
where
-- no need to lint check forward files
-- but more than that, it goes wrong if you do, see #427
fileR (AnswerDirect _ x) = FileR (Just x) True
fileR (AnswerForward _ x) = FileR (Just x) False
fileR AnswerPhony = FileR Nothing False
unLint (RunResult a b c) = RunResult a b c{useLint = False}
retNew :: RunChanged -> Answer -> Action (RunResult FileR)
retNew c v = pure $ RunResult c (runBuilder $ putEx v) $ fileR v
retOld :: RunChanged -> Action (RunResult FileR)
retOld c = pure $ RunResult c (fromJust oldBin) $ fileR (fromJust old)
-- actually run the rebuild
rebuildWith act = do
let answer ctor new = do
let b = case () of
_ | Just old <- old
, Just old <- fromAnswer old
, fileEqualValue opts old new /= NotEqual -> ChangedRecomputeSame
_ -> ChangedRecomputeDiff
retNew b $ ctor new
case act of
Nothing -> do
new <- liftIO $ storedValueError opts True "Error, file does not exist and no rule available:" o
answer (AnswerDirect $ Ver 0) $ fromJust new
Just (ver, ModeForward act) -> do
new <- act
case new of
Nothing -> do
-- Not 100% sure how you get here, but I think it involves RebuildLater and multi-file rules
historyDisable
retNew ChangedRecomputeDiff AnswerPhony
Just new -> answer (AnswerForward $ Ver ver) new
Just (ver, ModeDirect act) -> do
cache <- historyLoad ver
case cache of
Just encodedHash -> do
Just (FileA mod size _) <- liftIO $ storedValueError opts False "Error, restored the rule but did not produce file:" o
answer (AnswerDirect $ Ver ver) $ FileA mod size $ getExStorable encodedHash
Nothing -> do
act
new <- liftIO $ storedValueError opts False "Error, rule finished running but did not produce file:" o
case new of
Nothing -> do
-- rule ran, but didn't compute an answer, because shakeCreationCheck=False
-- I think it should probably not return phony, but return a different valid-but-no-file
-- but it's just too rare to bother
historyDisable
retNew ChangedRecomputeDiff AnswerPhony
Just new@(FileA _ _ fileHash) -> do
producesUnchecked [xStr]
res <- answer (AnswerDirect $ Ver ver) new
historySave ver $ runBuilder $
if isNoFileHash fileHash then throwImpure errorNoHash else putExStorable fileHash
pure res
Just (_, ModePhony act) -> do
-- See #523 and #524
-- Shake runs the dependencies first, but stops when one has changed.
-- We don't want to run the existing deps first if someone changes the build system,
-- so insert a fake dependency that cuts the process dead.
alwaysRerun
act
retNew ChangedRecomputeDiff AnswerPhony
apply_ :: Partial => (a -> FileName) -> [a] -> Action [FileR]
apply_ f = apply . map (FileQ . f)
-- | Has a file changed. This function will only give the correct answer if called in the rule
-- producing the file, /before/ the rule has modified the file in question.
-- Best avoided, but sometimes necessary in conjunction with 'needHasChanged' to cause rebuilds
-- to happen if the result is deleted or modified.
resultHasChanged :: FilePath -> Action Bool
resultHasChanged file = do
let filename = FileQ $ fileNameFromString file
res <- getDatabaseValue filename
old<- pure $ case result <$> res of
Nothing -> Nothing
Just (Left bs) -> fromAnswer $ getEx bs
Just (Right v) -> answer v
case old of
Nothing -> pure True
Just old -> do
opts <- getShakeOptions
new <- liftIO $ fileStoredValue opts filename
pure $ case new of
Nothing -> True
Just new -> fileEqualValue opts old new == NotEqual
---------------------------------------------------------------------
-- OPTIONS ON TOP
-- | Internal method for adding forwarding actions
fileForward :: String -> (FilePath -> Maybe (Action (Maybe FileA))) -> Rules ()
fileForward help act = addUserRule $ FileRule help $ fmap ModeForward . act
-- | Add a dependency on the file arguments, ensuring they are built before continuing.
-- The file arguments may be built in parallel, in any order. This function is particularly
-- necessary when calling 'Development.Shake.cmd' or 'Development.Shake.command'. As an example:
--
-- @
-- \"\/\/*.rot13\" '%>' \\out -> do
-- let src = 'Development.Shake.FilePath.dropExtension' out
-- 'need' [src]
-- 'Development.Shake.cmd' \"rot13\" [src] \"-o\" [out]
-- @
--
-- Usually @need [foo,bar]@ is preferable to @need [foo] >> need [bar]@ as the former allows greater
-- parallelism, while the latter requires @foo@ to finish building before starting to build @bar@.
--
-- This function should not be called with wildcards (e.g. @*.txt@ - use 'getDirectoryFiles' to expand them),
-- environment variables (e.g. @$HOME@ - use 'getEnv' to expand them) or directories (directories cannot be
-- tracked directly - track files within the directory instead).
need :: Partial => [FilePath] -> Action ()
need = withFrozenCallStack $ void . apply_ fileNameFromString
-- | Like 'need' but returns a list of rebuilt dependencies since the calling rule last built successfully.
--
-- The following example writes a list of changed dependencies to a file as its action.
--
-- @
-- \"target\" '%>' \\out -> do
-- let sourceList = [\"source1\", \"source2\"]
-- rebuildList <- 'needHasChanged' sourceList
-- 'Development.Shake.writeFileLines' out rebuildList
-- @
--
-- This function can be used to alter the action depending on which dependency needed
-- to be rebuild.
--
-- Note that a rule can be run even if no dependency has changed, for example
-- because of 'shakeRebuild' or because the target has changed or been deleted.
-- To detect the latter case you may wish to use 'resultHasChanged'.
needHasChanged :: Partial => [FilePath] -> Action [FilePath]
needHasChanged paths = withFrozenCallStack $ do
apply_ fileNameFromString paths
self <- getCurrentKey
selfVal <- case self of
Nothing -> pure Nothing
Just self -> getDatabaseValueGeneric self
case selfVal of
Nothing -> pure paths -- never build before or not a key, so everything has changed
Just selfVal -> flip filterM paths $ \path -> do
pathVal <- getDatabaseValue (FileQ $ fileNameFromString path)
pure $ case pathVal of
Just pathVal | changed pathVal > built selfVal -> True
_ -> False
needBS :: Partial => [BS.ByteString] -> Action ()
needBS = withFrozenCallStack $ void . apply_ fileNameFromByteString
-- | Like 'need', but if 'shakeLint' is set, check that the file does not rebuild.
-- Used for adding dependencies on files that have already been used in this rule.
needed :: Partial => [FilePath] -> Action ()
needed xs = withFrozenCallStack $ do
opts <- getShakeOptions
if isNothing $ shakeLint opts then need xs else neededCheck $ map fileNameFromString xs
neededBS :: Partial => [BS.ByteString] -> Action ()
neededBS xs = withFrozenCallStack $ do
opts <- getShakeOptions
if isNothing $ shakeLint opts then needBS xs else neededCheck $ map fileNameFromByteString xs
neededCheck :: Partial => [FileName] -> Action ()
neededCheck xs = withFrozenCallStack $ do
opts <- getShakeOptions
pre <- liftIO $ mapM (fileStoredValue opts . FileQ) xs
post <- apply_ id xs
let bad = [ (x, if isJust a then "File change" else "File created")
| (x, a, FileR (Just b) _) <- zip3 xs pre post, maybe NotEqual (\a -> fileEqualValue opts a b) a == NotEqual]
case bad of
[] -> pure ()
(file,msg):_ -> throwM $ errorStructured
"Lint checking error - 'needed' file required rebuilding"
[("File", Just $ fileNameToString file)
,("Error",Just msg)]
""
-- Either trackRead or trackWrite
track :: ([FileQ] -> Action ()) -> [FilePath] -> Action ()
track tracker xs = do
ShakeOptions{shakeLintIgnore} <- getShakeOptions
let ignore = (?==*) shakeLintIgnore
let ys = filter (not . ignore) xs
when (ys /= []) $
tracker $ map (FileQ . fileNameFromString) ys
-- | Track that a file was read by the action preceding it. If 'shakeLint' is activated
-- then these files must be dependencies of this rule. Calls to 'trackRead' are
-- automatically inserted in 'LintFSATrace' mode.
trackRead :: [FilePath] -> Action ()
trackRead = track lintTrackRead
-- | Track that a file was written by the action preceding it. If 'shakeLint' is activated
-- then these files must either be the target of this rule, or never referred to by the build system.
-- Calls to 'trackWrite' are automatically inserted in 'LintFSATrace' mode.
trackWrite :: [FilePath] -> Action ()
trackWrite = track lintTrackWrite
-- | Allow accessing a file in this rule, ignoring any subsequent 'trackRead' \/ 'trackWrite' calls matching
-- the pattern.
trackAllow :: [FilePattern] -> Action ()
trackAllow ps = do
let ignore = (?==*) ps
lintTrackAllow $ \(FileQ x) -> ignore $ fileNameToString x
-- | This rule builds the following files, in addition to any defined by its target.
-- At the end of the rule these files must have been written.
-- These files must /not/ be tracked as part of the build system - two rules cannot produce
-- the same file and you cannot 'need' the files it produces.
produces :: [FilePath] -> Action ()
produces xs = do
producesChecked xs
trackWrite xs
-- | Require that the argument files are built by the rules, used to specify the target.
--
-- @
-- main = 'Development.Shake.shake' 'shakeOptions' $ do
-- 'want' [\"Main.exe\"]
-- ...
-- @
--
-- This program will build @Main.exe@, given sufficient rules. All arguments to all 'want' calls
-- may be built in parallel, in any order.
--
-- This function is defined in terms of 'action' and 'need', use 'action' if you need more complex
-- targets than 'want' allows.
want :: Partial => [FilePath] -> Rules ()
want [] = pure ()
want xs = withFrozenCallStack $ action $ need xs
root :: String -> (FilePath -> Bool) -> (FilePath -> Action ()) -> Rules ()
root help test act = addUserRule $ FileRule help $ \x -> if not $ test x then Nothing else Just $ ModeDirect $ do
liftIO $ createDirectoryRecursive $ takeDirectory x
act x
-- | Declare a Make-style phony action. A phony target does not name
-- a file (despite living in the same namespace as file rules);
-- rather, it names some action to be executed when explicitly
-- requested. You can demand 'phony' rules using 'want'. (And 'need',
-- although that's not recommended.)
--
-- Phony actions are intended to define recipes that can be executed
-- by the user. If you 'need' a phony action in a rule then every
-- execution where that rule is required will rerun both the rule and
-- the phony action. However, note that phony actions are never
-- executed more than once in a single build run.
--
-- In make, the @.PHONY@ attribute on non-file-producing rules has a
-- similar effect. However, while in make it is acceptable to omit
-- the @.PHONY@ attribute as long as you don't create the file in
-- question, a Shake rule which behaves this way will fail lint.
-- For file-producing rules which should be
-- rerun every execution of Shake, see 'Development.Shake.alwaysRerun'.
phony :: Located => String -> Action () -> Rules ()
phony oname@(toStandard -> name) act = do
addTarget oname
addPhony ("phony " ++ show oname ++ " at " ++ callStackTop) $ \s -> if s == name then Just act else Nothing
-- | A predicate version of 'phony', return 'Just' with the 'Action' for the matching rules.
phonys :: Located => (String -> Maybe (Action ())) -> Rules ()
phonys = addPhony ("phonys at " ++ callStackTop)
-- | Infix operator alias for 'phony', for sake of consistency with normal
-- rules.
(~>) :: Located => String -> Action () -> Rules ()
(~>) oname@(toStandard -> name) act = do
addTarget oname
addPhony (show oname ++ " ~> at " ++ callStackTop) $ \s -> if s == name then Just act else Nothing
addPhony :: String -> (String -> Maybe (Action ())) -> Rules ()
addPhony help act = addUserRule $ FileRule help $ fmap ModePhony . act
-- | Define a rule to build files. If the first argument returns 'True' for a given file,
-- the second argument will be used to build it. Usually '%>' is sufficient, but '?>' gives
-- additional power. For any file used by the build system, only one rule should return 'True'.
-- This function will create the directory for the result file, if necessary.
--
-- @
-- (all isUpper . 'Development.Shake.FilePath.takeBaseName') '?>' \\out -> do
-- let src = 'Development.Shake.FilePath.replaceBaseName' out $ map toLower $ takeBaseName out
-- 'Development.Shake.writeFile'' out . map toUpper =<< 'Development.Shake.readFile'' src
-- @
--
-- If the 'Action' completes successfully the file is considered up-to-date, even if the file
-- has not changed.
(?>) :: Located => (FilePath -> Bool) -> (FilePath -> Action ()) -> Rules ()
(?>) test act = priority 0.5 $ root ("?> at " ++ callStackTop) test act
-- | Define a set of patterns, and if any of them match, run the associated rule. Defined in terms of '%>'.
-- Think of it as the OR (@||@) equivalent of '%>'.
(|%>) :: Located => [FilePattern] -> (FilePath -> Action ()) -> Rules ()
(|%>) pats act = do
mapM_ addTarget pats
let (simp,other) = partition simple pats
case map toStandard simp of
[] -> pure ()
[p] -> root help (\x -> toStandard x == p) act
ps -> let set = Set.fromList ps in root help (flip Set.member set . toStandard) act
unless (null other) $
let ps = map (?==) other in priority 0.5 $ root help (\x -> any ($ x) ps) act
where help = show pats ++ " |%> at " ++ callStackTop
-- | Define a rule that matches a 'FilePattern', see '?==' for the pattern rules.
-- Patterns with no wildcards have higher priority than those with wildcards, and no file
-- required by the system may be matched by more than one pattern at the same priority
-- (see 'priority' and 'alternatives' to modify this behaviour).
-- This function will create the directory for the result file, if necessary.
--
-- @
-- \"*.asm.o\" '%>' \\out -> do
-- let src = 'Development.Shake.FilePath.dropExtension' out
-- 'need' [src]
-- 'Development.Shake.cmd' \"as\" [src] \"-o\" [out]
-- @
--
-- To define a build system for multiple compiled languages, we recommend using @.asm.o@,
-- @.cpp.o@, @.hs.o@, to indicate which language produces an object file.
-- I.e., the file @foo.cpp@ produces object file @foo.cpp.o@.
--
-- Note that matching is case-sensitive, even on Windows.
--
-- If the 'Action' completes successfully the file is considered up-to-date, even if the file
-- has not changed.
(%>) :: Located => FilePattern -> (FilePath -> Action ()) -> Rules ()
(%>) test act = withFrozenCallStack $
(if simple test then id else priority 0.5) $ do
addTarget test
root (show test ++ " %> at " ++ callStackTop) (test ?==) act
|
ndmitchell/shake
|
src/Development/Shake/Internal/Rules/File.hs
|
bsd-3-clause
| 27,137
| 0
| 28
| 7,242
| 5,770
| 2,956
| 2,814
| -1
| -1
|
{-# Language OverloadedStrings #-}
{-|
Module : Client.Commands.Window
Description : Window command implementations
Copyright : (c) Eric Mertens, 2016-2020
License : ISC
Maintainer : emertens@gmail.com
-}
module Client.Commands.Window (windowCommands, parseFocus) where
import Client.Commands.Arguments.Spec
import Client.Commands.TabCompletion
import Client.Commands.Types
import Client.Commands.WordCompletion
import Client.Mask (buildMask)
import Client.State
import Client.State.Focus
import Client.State.Network
import Client.State.Window (windowClear, wlText, winMessages, winHidden, winSilent, winName)
import Control.Applicative
import Control.Exception
import Control.Lens
import Data.Foldable
import Data.List ((\\), nub)
import qualified Client.State.EditBox as Edit
import Data.HashSet (HashSet)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.IO as LText
import Irc.Identifier
windowCommands :: CommandSection
windowCommands = CommandSection "Window management"
------------------------------------------------------------------------
[ Command
(pure "focus")
(liftA2 (,) (simpleToken "network") (optionalArg (simpleToken "[target]")))
"Change the focused window.\n\
\\n\
\When only \^Bnetwork\^B is specified this switches to the network status window.\n\
\When \^Bnetwork\^B and \^Btarget\^B are specified this switches to that chat window.\n\
\\n\
\Nickname and channels can be specified in the \^Btarget\^B parameter.\n\
\See also: /query (aliased /c /channel) to switch to a target on the current network.\n"
$ ClientCommand cmdFocus tabFocus
, Command
("c" :| ["channel"])
(simpleToken "focus")
"\^BParameters:\^B\n\
\\n\
\ focuses: Focus name\n\
\\n\
\\^BDescription:\^B\n\
\\n\
\ This command sets the current window focus. When\n\
\ no network is specified, the current network will\n\
\ be used.\n\
\\n\
\ Client: *\n\
\ Network: \^_network\^_:\n\
\ Channel: \^_#channel\^_\n\
\ Channel: \^_network\^_:\^_#channel\^_\n\
\ User: \^_nick\^_\n\
\ User: \^_network\^_:\^_nick\^_\n\
\\n\
\\^BExamples:\^B\n\
\\n\
\ /c fn:#haskell\n\
\ /c #haskell\n\
\ /c fn:\n\
\ /c *:\n\
\\n\
\\^BSee also:\^B focus\n"
$ ClientCommand cmdChannel tabChannel
, Command
(pure "clear")
(optionalArg (liftA2 (,) (simpleToken "[network]") (optionalArg (simpleToken "[channel]"))))
"Clear a window.\n\
\\n\
\If no arguments are provided the current window is cleared.\n\
\If \^Bnetwork\^B is provided the that network window is cleared.\n\
\If \^Bnetwork\^B and \^Bchannel\^B are provided that chat window is cleared.\n\
\If \^Bnetwork\^B is provided and \^Bchannel\^B is \^B*\^O all windows for that network are cleared.\n\
\\n\
\If a window is cleared and no longer active that window will be removed from the client.\n"
$ ClientCommand cmdClear tabFocus
, Command
(pure "windows")
(optionalArg (simpleToken "[kind]"))
"Show a list of all windows with an optional argument to limit the kinds of windows listed.\n\
\\n\
\\^Bkind\^O: one of \^Bnetworks\^O, \^Bchannels\^O, \^Busers\^O\n\
\\n"
$ ClientCommand cmdWindows tabWindows
, Command
(pure "splits")
(remainingArg "focuses")
"\^BParameters:\^B\n\
\\n\
\ focuses: List of focus names\n\
\\n\
\\^BDescription:\^B\n\
\\n\
\ This command sents the set of focuses that will always\n\
\ be visible, even when unfocused. When the client is focused\n\
\ to an active network, the network can be omitted when\n\
\ specifying a focus. If no focuses are listed, they will\n\
\ all be cleared.\n\
\\n\
\ Client: *\n\
\ Network: \^_network\^_:\n\
\ Channel: \^_#channel\^_\n\
\ Channel: \^_network\^_:\^_#channel\^_\n\
\ User: \^_nick\^_\n\
\ User: \^_network\^_:\^_nick\^_\n\
\\n\
\\^BExamples:\^B\n\
\\n\
\ /splits * fn:#haskell fn:chanserv\n\
\ /splits #haskell #haskell-lens nickserv\n\
\ /splits\n\
\\n\
\\^BSee also:\^B splits+, splits-\n"
$ ClientCommand cmdSplits tabSplits
, Command
(pure "splits+")
(remainingArg "focuses")
"Add windows to the splits list. Omit the list of focuses to add the\
\ current window.\n\
\\n\
\\^Bfocuses\^B: space delimited list of focus names.\n\
\\n\
\Client: *\n\
\Network: \^BNETWORK\^B\n\
\Channel: \^BNETWORK\^B:\^B#CHANNEL\^B\n\
\User: \^BNETWORK\^B:\^BNICK\^B\n\
\\n\
\If the network part is omitted, the current network will be used.\n"
$ ClientCommand cmdSplitsAdd tabSplits
, Command
(pure "splits-")
(remainingArg "focuses")
"Remove windows from the splits list. Omit the list of focuses to\
\ remove the current window.\n\
\\n\
\\^Bfocuses\^B: space delimited list of focus names.\n\
\\n\
\Client: *\n\
\Network: \^BNETWORK\^B\n\
\Channel: \^BNETWORK\^B:\^B#CHANNEL\^B\n\
\User: \^BNETWORK\^B:\^BNICK\^B\n\
\\n\
\If the network part is omitted, the current network will be used.\n"
$ ClientCommand cmdSplitsDel tabActiveSplits
, Command
(pure "ignore")
(remainingArg "masks")
"\^BParameters:\^B\n\
\\n\
\ masks: List of masks\n\
\\n\
\\^BDescription:\^B\n\
\\n\
\ Toggle the soft-ignore on each of the space-delimited given\n\
\ nicknames. Ignores can use \^B*\^B (many) and \^B?\^B (one) wildcards.\n\
\ Masks can be of the form: nick[[!user]@host]\n\
\ Masks use a case-insensitive comparison.\n\
\\n\
\ If no masks are specified the current ignore list is displayed.\n\
\\n\
\\^BExamples:\^B\n\
\\n\
\ /ignore\n\
\ /ignore nick1 nick2 nick3\n\
\ /ignore nick@host\n\
\ /ignore nick!user@host\n\
\ /ignore *@host\n\
\ /ignore *!baduser@*\n"
$ ClientCommand cmdIgnore tabIgnore
, Command
(pure "grep")
(remainingArg "regular-expression")
"Set the persistent regular expression.\n\
\\n\
\\^BFlags:\^B\n\
\ -A n Show n messages after match\n\
\ -B n Show n messages before match\n\
\ -C n Show n messages before and after match\n\
\ -F Use plain-text match instead of regular expression\n\
\ -i Case insensitive match\n\
\ -v Invert pattern match\n\
\ -m n Limit results to n matches\n\
\ -- Stop processing flags\n\
\\n\
\Clear the regular expression by calling this without an argument.\n\
\\n\
\\^B/grep\^O is case-sensitive.\n"
$ ClientCommand cmdGrep simpleClientTab
, Command
(pure "dump")
(simpleToken "filename")
"Dump current buffer to file.\n"
$ ClientCommand cmdDump simpleClientTab
, Command
(pure "mentions")
(pure ())
"Show a list of all message that were highlighted as important.\n\
\\n\
\When using \^B/grep\^B the important messages are those matching\n\
\the regular expression instead.\n"
$ ClientCommand cmdMentions noClientTab
, Command
(pure "setwindow")
(simpleToken "hide|show|loud|silent")
"Set window property.\n\
\\n\
\\^Bloud\^B / \^Bsilent\^B\n\
\ Toggles if window activity appears in the status bar.\n\
\n\
\\^Bshow\^B / \^Bhide\^B\n\
\ Toggles if window appears in window command shortcuts.\n"
$ ClientCommand cmdSetWindow tabSetWindow
, Command
(pure "setname")
(optionalArg (simpleToken "[letter]"))
"Set window shortcut letter. If no letter is provided the next available\n\
\letter will automatically be assigned.\n\
\\n\
\Available letters are configured in the 'window-names' configuration setting.\n"
$ ClientCommand cmdSetWindowName noClientTab
]
cmdSetWindowName :: ClientCommand (Maybe String)
cmdSetWindowName st arg =
-- unset current name so that it becomes available
let mbSt1 = failover (clientWindows . ix (view clientFocus st) . winName) (\_ -> Nothing) st in
case mbSt1 of
Nothing -> commandFailureMsg "no current window" st
Just st1 ->
let next = clientNextWindowName st
mbName =
case arg of
Just [n] | n `elem` clientWindowNames st -> Right n
Just _ -> Left "invalid name"
Nothing
| next /= '\0' -> Right next
| otherwise -> Left "no free names" in
case mbName of
Left e -> commandFailureMsg e st
Right name ->
let unset n = if n == Just name then Nothing else n in
commandSuccess
$ set (clientWindows . ix (view clientFocus st) . winName) (Just name)
$ over (clientWindows . each . winName) unset
$ st1
cmdSetWindow :: ClientCommand String
cmdSetWindow st cmd =
case mbFun of
Nothing -> commandFailureMsg "bad window setting" st
Just f ->
case failover (clientWindows . ix (view clientFocus st)) f st of
Nothing -> commandFailureMsg "no such window" st
Just st' -> commandSuccess st'
where
mbFun =
case cmd of
"show" -> Just (set winHidden False)
"hide" -> Just (set winName Nothing . set winHidden True)
"loud" -> Just (set winSilent False)
"silent" -> Just (set winSilent True)
_ -> Nothing
tabSetWindow :: Bool {- ^ reversed -} -> ClientCommand String
tabSetWindow isReversed st _ =
simpleTabCompletion plainWordCompleteMode [] completions isReversed st
where
completions = ["hide", "show", "loud", "silent"] :: [Text]
-- | Implementation of @/grep@
cmdGrep :: ClientCommand String
cmdGrep st str
| null str = commandSuccess (set clientRegex Nothing st)
| otherwise =
case buildMatcher str of
Nothing -> commandFailureMsg "bad grep" st
Just r -> commandSuccess (set clientRegex (Just r) st)
-- | Implementation of @/windows@ command. Set subfocus to Windows.
cmdWindows :: ClientCommand (Maybe String)
cmdWindows st arg =
case arg of
Nothing -> success AllWindows
Just "networks" -> success NetworkWindows
Just "channels" -> success ChannelWindows
Just "users" -> success UserWindows
_ -> commandFailureMsg errmsg st
where
errmsg = "/windows expected networks, channels, or users"
success x =
commandSuccess (changeSubfocus (FocusWindows x) st)
-- | Implementation of @/mentions@ command. Set subfocus to Mentions.
cmdMentions :: ClientCommand ()
cmdMentions st _ = commandSuccess (changeSubfocus FocusMentions st)
cmdIgnore :: ClientCommand String
cmdIgnore st rest =
case mkId <$> Text.words (Text.pack rest) of
[] -> commandSuccess (changeSubfocus FocusIgnoreList st)
xs -> commandSuccess st2
where
(newIgnores, st1) = (clientIgnores <%~ updateIgnores) st
st2 = set clientIgnoreMask (buildMask (toList newIgnores)) st1
updateIgnores :: HashSet Identifier -> HashSet Identifier
updateIgnores s = foldl' updateIgnore s xs
updateIgnore s x = over (contains x) not s
-- | Complete the nickname at the current cursor position using the
-- userlist for the currently focused channel (if any)
tabIgnore :: Bool {- ^ reversed -} -> ClientCommand String
tabIgnore isReversed st _ =
simpleTabCompletion mode hint completions isReversed st
where
hint = activeNicks st
completions = currentCompletionList st ++ views clientIgnores toList st
mode = currentNickCompletionMode st
-- | Implementation of @/splits@
cmdSplits :: ClientCommand String
cmdSplits st str =
withSplitFocuses st str $ \args ->
commandSuccess (setExtraFocus (nub args) st)
-- | Implementation of @/splits+@. When no focuses are provided
-- the current focus is used instead.
cmdSplitsAdd :: ClientCommand String
cmdSplitsAdd st str =
withSplitFocuses st str $ \args ->
let args'
| null args = [(view clientFocus st, view clientSubfocus st)]
| otherwise = args
extras = nub (args' ++ view clientExtraFocus st)
in commandSuccess (setExtraFocus extras st)
-- | Implementation of @/splits-@. When no focuses are provided
-- the current focus is used instead.
cmdSplitsDel :: ClientCommand String
cmdSplitsDel st str =
withSplitFocuses st str $ \args ->
let args'
| null args = [(view clientFocus st, view clientSubfocus st)]
| otherwise = args
extras = view clientExtraFocus st \\ args'
in commandSuccess (setExtraFocus extras st)
withSplitFocuses ::
ClientState ->
String ->
([(Focus, Subfocus)] -> IO CommandResult) ->
IO CommandResult
withSplitFocuses st str k =
case mb of
Nothing -> commandFailureMsg "unable to parse arguments" st
Just args -> k [(x, FocusMessages) | x <- args]
where
mb = traverse
(parseFocus (views clientFocus focusNetwork st))
(words str)
-- | Parses a single focus name given a default network.
parseFocus ::
Maybe Text {- ^ default network -} ->
String {- ^ @[network:]target@ -} ->
Maybe Focus
parseFocus mbNet x =
case break (==':') x of
("*","") -> pure Unfocused
(net,_:"") -> pure (NetworkFocus (Text.pack net))
(net,_:chan) -> pure (ChannelFocus (Text.pack net) (mkId (Text.pack chan)))
(chan,"") -> mbNet <&> \net ->
ChannelFocus net (mkId (Text.pack chan))
cmdFocus :: ClientCommand (String, Maybe String)
cmdFocus st (network, mbChannel)
| network == "*" = commandSuccess (changeFocus Unfocused st)
| otherwise =
case mbChannel of
Nothing ->
let focus = NetworkFocus (Text.pack network) in
commandSuccess (changeFocus focus st)
Just channel ->
let focus = ChannelFocus (Text.pack network) (mkId (Text.pack channel)) in
commandSuccess
$ changeFocus focus st
tabWindows :: Bool -> ClientCommand String
tabWindows isReversed st _ =
simpleTabCompletion plainWordCompleteMode [] completions isReversed st
where
completions = ["networks","channels","users"] :: [Text]
-- | Tab completion for @/splits-@. This completes only from the list of active
-- entries in the splits list.
tabActiveSplits :: Bool -> ClientCommand String
tabActiveSplits isReversed st _ =
simpleTabCompletion plainWordCompleteMode [] completions isReversed st
where
completions = currentNetSplits <> currentSplits
currentSplits = [renderSplitFocus x | (x, FocusMessages) <- view clientExtraFocus st]
currentNetSplits =
[ idText chan
| (ChannelFocus net chan, FocusMessages) <- view clientExtraFocus st
, views clientFocus focusNetwork st == Just net
]
-- | When used on a channel that the user is currently
-- joined to this command will clear the messages but
-- preserve the window. When used on a window that the
-- user is not joined to this command will delete the window.
cmdClear :: ClientCommand (Maybe (String, Maybe String))
cmdClear st args =
case args of
Nothing -> clearFocus (view clientFocus st)
Just ("*", Nothing ) -> clearFocus Unfocused
Just (network, Nothing ) -> clearFocus (NetworkFocus (Text.pack network))
Just (network, Just "*" ) -> clearNetworkWindows network
Just (network, Just channel) -> clearFocus (ChannelFocus (Text.pack network) (mkId (Text.pack channel)))
where
clearNetworkWindows network
= commandSuccess
$ foldl' (flip clearFocus1) st
$ filter (\x -> focusNetwork x == Just (Text.pack network))
$ views clientWindows Map.keys st
clearFocus focus = commandSuccess (clearFocus1 focus st)
clearFocus1 focus st' = focusEffect (windowEffect st')
where
windowEffect = over (clientWindows . at focus)
(if isActive then fmap windowClear else const Nothing)
focusEffect
| noChangeNeeded = id
| prevExists = changeFocus prev
| otherwise = advanceFocus
where
noChangeNeeded = isActive || view clientFocus st' /= focus
prevExists = has (clientWindows . ix prev) st'
prev = view clientPrevFocus st
isActive =
case focus of
Unfocused -> False
NetworkFocus network -> has (clientConnection network) st'
ChannelFocus network channel -> has (clientConnection network
.csChannels . ix channel) st'
-- | Tab completion for @/splits[+]@. When given no arguments this
-- populates the current list of splits, otherwise it tab completes
-- all of the currently available windows.
tabSplits :: Bool -> ClientCommand String
tabSplits isReversed st rest
-- If no arguments, populate the current splits
| all (' '==) rest =
let cmd = unwords $ "/splits"
: [Text.unpack (renderSplitFocus x) | (x, FocusMessages) <- view clientExtraFocus st]
newline = Edit.endLine cmd
in commandSuccess (set (clientTextBox . Edit.line) newline st)
-- Tab complete the available windows. Accepts either fully qualified
-- window names or current network names without the ':'
| otherwise =
let completions = currentNet <> allWindows
allWindows = renderSplitFocus <$> views clientWindows Map.keys st
currentNet = case views clientFocus focusNetwork st of
Just net -> idText <$> channelWindowsOnNetwork net st
Nothing -> []
in simpleTabCompletion plainWordCompleteMode [] completions isReversed st
-- | Render a entry from splits back to the textual format.
renderSplitFocus :: Focus -> Text
renderSplitFocus Unfocused = "*"
renderSplitFocus (NetworkFocus x) = x <> ":"
renderSplitFocus (ChannelFocus x y) = x <> ":" <> idText y
-- | When tab completing the first parameter of the focus command
-- the current networks are used.
tabFocus :: Bool -> ClientCommand String
tabFocus isReversed st _ =
simpleTabCompletion plainWordCompleteMode [] completions isReversed st
where
networks = map mkId $ HashMap.keys $ view clientConnections st
params = words $ uncurry take $ clientLine st
completions =
case params of
[_cmd,_net] -> networks
[_cmd,net,_chan] -> channelWindowsOnNetwork (Text.pack net) st
_ -> []
-- | @/channel@ command. Takes a channel or nickname and switches
-- focus to that target on the current network.
cmdChannel :: ClientCommand String
cmdChannel st channel =
case parseFocus (views clientFocus focusNetwork st) channel of
Just focus -> commandSuccess (changeFocus focus st)
Nothing -> commandFailureMsg "No current network" st
-- | Tab completion for @/channel@. Tab completion uses pre-existing
-- windows.
tabChannel ::
Bool {- ^ reversed order -} ->
ClientCommand String
tabChannel isReversed st _ =
simpleTabCompletion plainWordCompleteMode [] completions isReversed st
where
completions = currentNet <> allWindows
allWindows = renderSplitFocus <$> views clientWindows Map.keys st
currentNet = case views clientFocus focusNetwork st of
Just net -> idText <$> channelWindowsOnNetwork net st
Nothing -> []
-- | Return the list of identifiers for open channel windows on
-- the given network name.
channelWindowsOnNetwork ::
Text {- ^ network -} ->
ClientState {- ^ client state -} ->
[Identifier] {- ^ open channel windows -}
channelWindowsOnNetwork network st =
[ chan | ChannelFocus net chan <- Map.keys (view clientWindows st)
, net == network ]
-- | Implementation of @/dump@. Writes detailed contents of focused buffer
-- to the given filename.
cmdDump :: ClientCommand String
cmdDump st fp =
do res <- try (LText.writeFile fp (LText.unlines outputLines))
case res of
Left e -> commandFailureMsg (Text.pack (displayException (e :: SomeException))) st
Right{} -> commandSuccess st
where
focus = view clientFocus st
outputLines
= reverse
$ clientFilter st id
$ toListOf (clientWindows . ix focus . winMessages . each . wlText) st
|
glguy/irc-core
|
src/Client/Commands/Window.hs
|
isc
| 21,231
| 3
| 21
| 5,732
| 4,011
| 2,028
| 1,983
| 329
| 8
|
{-# LANGUAGE TypeApplications #-}
module Test.Pos.Chain.Ssc.CborSpec
( spec
) where
import Universum
import Test.Hspec (Spec, describe)
import Pos.Chain.Ssc (VssCertificate)
import Test.Pos.Binary.Helpers (binaryTest)
import Test.Pos.Chain.Ssc.Arbitrary ()
spec :: Spec
spec = describe "Cbor Bi instances" $ binaryTest @VssCertificate
|
input-output-hk/pos-haskell-prototype
|
chain/test/Test/Pos/Chain/Ssc/CborSpec.hs
|
mit
| 405
| 0
| 7
| 107
| 87
| 55
| 32
| 10
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.