code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe :: Tree a -> [a]
fringe (Leaf x) = [x]
fringe (Branch left right) = fringe left ++ fringe right
-- fringe (Branch (Leaf 1) (Branch (Leaf 2) (Branch (Leaf 3) (Leaf 4))))
| maxtangli/sonico | language/haskell/tree.hs | mit | 226 | 0 | 8 | 48 | 89 | 46 | 43 | 4 | 1 |
{-# LANGUAGE CPP,GeneralizedNewtypeDeriving,RankNTypes #-}
{-|
Module : Graphics.Rendering.Cairo.Canvas
Copyright : Copyright (c) 2015 Anton Pirogov
License : MIT
Maintainer : anton.pirogov@gmail.com
This module defines the 'Canvas' monad, which is a convenience wrapper around
the underlying Cairo rendering and can be used with the same textures.
You can also mix both APIs, if the need arises.
The Canvas API imitates most of the drawing functions
of the Processing language. See <http://processing.org/reference> for comparison.
While having the Processing spirit, this module does not aim for a perfect
mapping and deviates where necessary or appropriate. Nevertheless most
Processing examples should be trivial to port to the Canvas API. Example:
@
\{\-\# LANGUAGE OverloadedStrings \#\-\}
import SDL
import SDL.Cairo
import Linear.V2 (V2(..))
import Graphics.Rendering.Cairo.Canvas
main :: IO ()
main = do
initializeAll
window <- createWindow "cairo-canvas using SDL2" defaultWindow
renderer <- createRenderer window (-1) defaultRenderer
texture <- createCairoTexture' renderer window
withCairoTexture' texture $ runCanvas $ do
background $ gray 102
fill $ red 255 !\@ 128
noStroke
rect $ D 200 200 100 100
stroke $ green 255 !\@ 128
fill $ blue 255 !\@ 128
rect $ D 250 250 100 100
triangle (V2 400 300) (V2 350 400) (V2 400 400)
copy renderer texture Nothing Nothing
present renderer
delay 5000
@
-}
module Graphics.Rendering.Cairo.Canvas (
-- * Entry point
Canvas, runCanvas, withRenderer, getCanvasSize,
-- * Color and Style
Color, Byte, gray, red, green, blue, rgb, (!@),
stroke, fill, noStroke, noFill, strokeWeight, strokeJoin, strokeCap,
-- * Coordinates
Dim(..), toD, dimPos, dimSize, Anchor(..), aligned, centered, corners,
-- * Primitives
background, point, line, triangle, rect, polygon, shape, ShapeMode(..),
-- * Arcs and Curves
circle, circle', arc, ellipse, bezier, bezierQ,
-- * Transformations
resetMatrix, pushMatrix, popMatrix, translate, rotate, scale,
-- * Images
Image(imageSize), createImage, loadImagePNG, saveImagePNG, image, image', blend, grab,
-- * Text
Font(..), textFont, textSize, textExtents, text, text',
-- * Math
mapRange, radians, degrees,
-- * Misc
randomSeed, random, getTime, Time(..),
LineCap(..), LineJoin(..)
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Control.Monad.State
import Data.Word (Word8)
import Data.Time.Clock (UTCTime(..),getCurrentTime)
import Data.Time.LocalTime (timeToTimeOfDay,TimeOfDay(..))
import Data.Time.Calendar (toGregorian)
import System.Random (mkStdGen,setStdGen,randomRIO,Random)
import Linear.V2 (V2(..))
import Linear.V4 (V4(..))
import qualified Graphics.Rendering.Cairo as C
import Graphics.Rendering.Cairo (Render,LineJoin(..),LineCap(..),Format(..),Operator(..))
-- | For values from 0 to 255
type Byte = Word8
-- | RGBA Color is just a byte vector. Colors can be added, subtracted, etc.
type Color = V4 Byte
data CanvasState = CanvasState{ csSize :: V2 Double, -- ^ reported size
csFG :: Maybe Color, -- ^ stroke color
csBG :: Maybe Color, -- ^ fill color
csImages :: [Image] -- ^ keep track of images to free later
}
-- | get size of the canvas (Processing: @width(), height()@)
getCanvasSize :: Canvas (V2 Double)
getCanvasSize = gets csSize
newtype RenderWrapper m a = Canvas { unCanvas :: StateT CanvasState m a }
deriving (Functor, Applicative, Monad, MonadTrans, MonadIO, MonadState CanvasState)
-- | wrapper around the Cairo 'Render' monad, providing a Processing-style API
type Canvas = RenderWrapper Render
-- | draw on a Cairo surface using the 'Canvas' monad
runCanvas = flip withCairoSurface
withCairoSurface :: C.Surface -> Canvas a -> IO a
withCairoSurface s m = do
w <- fromIntegral <$> C.imageSurfaceGetWidth s
h <- fromIntegral <$> C.imageSurfaceGetHeight s
withRenderer (C.renderWith s) (V2 w h) m
-- | draw on a Cairo surface using the 'Canvas' monad
withRenderer :: (forall a. Render a -> IO a) -- ^ the renderer to use (e.g. 'Graphics.Rendering.Cairo.renderWith' surface)
-> V2 Double -- ^ reported canvas size
-> Canvas a -> IO a
withRenderer renderer size c = do
let defaults = strokeWeight 1 >> strokeCap C.LineCapRound
initstate = CanvasState{ csSize = size
, csFG = Just $ gray 0
, csBG = Just $ gray 255
, csImages = []
}
(ret, result) <- renderer $ runStateT (unCanvas $ defaults >> c) initstate
forM_ (csImages result) $ \(Image s' _ _) -> C.surfaceFinish s'
return ret
----
-- | set current stroke color
stroke :: Color -> Canvas ()
stroke clr = modify $ \cs -> cs{csFG=Just clr}
-- | set current fill color
fill :: Color -> Canvas ()
fill clr = modify $ \cs -> cs{csBG=Just clr}
-- | disable stroke (-> shapes without borders!), reenabled by using 'stroke'
noStroke :: Canvas ()
noStroke = modify $ \cs -> cs{csFG=Nothing}
-- | disable fill (-> shapes are not filled!), reenabled by using 'fill'
noFill :: Canvas ()
noFill = modify $ \cs -> cs{csBG=Nothing}
-- | create opaque gray color
gray :: Byte -> Color
gray c = V4 c c c 255
-- | create opaque red color
red :: Byte -> Color
red c = V4 c 0 0 255
-- | create opaque green color
green :: Byte -> Color
green c = V4 0 c 0 255
-- | create opaque blue color
blue :: Byte -> Color
blue c = V4 0 0 c 255
-- | create opaque mixed color
rgb :: Byte -> Byte -> Byte -> Color
rgb r g b = V4 r g b 255
-- | set transparency of color (half red would be: @red 255 !\@ 128@)
(!@) :: Color -> Byte -> Color
(V4 r g b _) !@ a = V4 r g b a
-- | set line width for shape borders etc.
strokeWeight :: Double -> Canvas ()
strokeWeight d = lift $ C.setLineWidth d
-- | set the style of connections between lines of shapes
strokeJoin :: C.LineJoin -> Canvas ()
strokeJoin l = lift $ C.setLineJoin l
-- | set the style of the line caps
strokeCap :: C.LineCap -> Canvas ()
strokeCap l = lift $ C.setLineCap l
----
-- | position (canonically, top-left corner) and size representation (X Y W H)
data Dim = D Double Double Double Double deriving (Show,Eq)
-- | indicates where a position coordinate is located in a rectangle
data Anchor = NW | N | NE | E | SE | S | SW | W | Center | Baseline deriving (Show,Eq)
-- | create dimensions from position and size vector
toD (V2 a b) (V2 c d) = D a b c d
-- | get position vector from dimensions
dimPos (D a b _ _) = V2 a b
-- | get size vector from dimensions
dimSize (D _ _ c d) = V2 c d
-- | takes dimensions with bottom-right corner instead of size, returns normalized (with size)
corners (D xl yl xh yh) = D xl yl (xh-xl) (yh-yl)
-- | takes dimensions with centered position, returns normalized (top-left corner)
centered = aligned Center
-- | takes dimensions with non-standard position coordinate,
-- returns dimensions normalized to top-left corner coordinate
aligned :: Anchor -> Dim -> Dim
aligned NW dim = dim
aligned NE (D x y w h) = D (x-w) y w h
aligned SW (D x y w h) = D x (y-h) w h
aligned SE (D x y w h) = D (x-w) (y-h) w h
aligned Baseline dim = aligned SW dim
aligned N (D x y w h) = D (x-w/2) y w h
aligned W (D x y w h) = D x (y-h/2) w h
aligned S (D x y w h) = D (x-w/2) (y-h) w h
aligned E (D x y w h) = D (x-w) (y-h/2) w h
aligned Center (D x y w h) = D (x-w/2) (y-h/2) w h
----
-- | replace current matrix with identity
resetMatrix :: Canvas ()
resetMatrix = lift C.identityMatrix
-- | push current matrix onto the stack
pushMatrix :: Canvas ()
pushMatrix = lift C.save
-- | pop a matrix
popMatrix :: Canvas ()
popMatrix = lift C.restore
-- | translate coordinate system
translate :: V2 Double -> Canvas ()
translate (V2 x y) = lift $ C.translate x y
-- | scale coordinate system
scale :: V2 Double -> Canvas ()
scale (V2 x y) = lift $ C.scale x y
-- | rotate coordinate system
rotate :: Double -> Canvas ()
rotate a = lift $ C.rotate a
----
-- | clear the canvas with given color
background :: Color -> Canvas ()
background c = do
(V2 w h) <- gets csSize
lift $ setColor c >> C.rectangle 0 0 w h >> C.fill
-- | draw a point with stroke color (cairo emulates this with 1x1 rects!)
point :: V2 Double -> Canvas ()
point (V2 x y) = ifColor csFG $ \c -> do
C.rectangle x y 1 1
setColor c
C.fill
-- | draw a line between two points with stroke color
line :: V2 Double -> V2 Double -> Canvas ()
line (V2 x1 y1) (V2 x2 y2) = ifColor csFG $ \c -> do
C.moveTo x1 y1
C.lineTo x2 y2
setColor c
C.stroke
-- | draw a triangle connecting three points
triangle :: V2 Double -> V2 Double -> V2 Double -> Canvas ()
triangle (V2 x1 y1) (V2 x2 y2) (V2 x3 y3) = drawShape $ do
C.moveTo x1 y1
C.lineTo x2 y2
C.lineTo x3 y3
C.lineTo x1 y1
-- | draw a rectangle
rect :: Dim -> Canvas ()
rect (D x y w h) = drawShape $ C.rectangle x y w h
-- | draw a polygon connecting given points (equivalent to @'shape' ('ShapeRegular' True)@)
polygon :: [V2 Double] -> Canvas ()
polygon = shape (ShapeRegular True)
-- | Shape mode to use
data ShapeMode = ShapeRegular Bool -- ^regular path. flag decides whether the first and last point are connected
| ShapePoints -- ^just draw the points, no lines
| ShapeLines -- ^interpret points as pairs, draw lines
| ShapeTriangles -- ^interpret points as triples, draw triangles
| ShapeTriangleStrip -- ^draw triangle for every neighborhood of 3 points
| ShapeTriangleFan -- ^fix first point, draw triangles with every neighboring pair and first point
deriving (Show,Eq)
-- | draw shape along a given path using given @'ShapeMode'@.
-- (Processing: @beginShape(),vertex(),endShape()@)
shape :: ShapeMode -> [V2 Double] -> Canvas ()
shape (ShapeRegular closed) ((V2 x y):ps) = drawShape $ do
C.moveTo x y
forM_ ps $ \(V2 x' y') -> C.lineTo x' y'
when closed $ C.closePath
shape (ShapeRegular _) _ = return ()
shape ShapePoints ps = forM_ ps point
shape ShapeLines (p1:p2:ps) = do
line p1 p2
shape ShapeLines ps
shape ShapeLines _ = return ()
shape ShapeTriangles (p1:p2:p3:ps) = do
triangle p1 p2 p3
shape ShapeTriangles ps
shape ShapeTriangles _ = return ()
shape ShapeTriangleStrip (p1:p2:p3:ps) = do
triangle p1 p2 p3
shape ShapeTriangleStrip (p2:p3:ps)
shape ShapeTriangleStrip _ = return ()
shape ShapeTriangleFan (p1:p2:p3:ps) = do
triangle p1 p2 p3
shape ShapeTriangleFan (p1:p3:ps)
shape ShapeTriangleFan _ = return ()
----
-- | draw arc: @arc dimensions startAngle endAngle@
arc :: Dim -> Double -> Double -> Canvas ()
arc (D x y w h) sa ea = drawShape $ do
C.save
C.translate (x+(w/2)) (y+(h/2))
C.scale (w/2) (h/2)
C.arc 0 0 1 sa ea
C.restore
-- | draw ellipse
ellipse :: Dim -> Canvas ()
ellipse dim = arc dim 0 (2*pi)
-- | draw circle: @circle leftCorner diameter@
circle :: V2 Double -> Double -> Canvas ()
circle (V2 x y) d = ellipse (D x y d d)
-- | draw circle: @circle centerPoint diameter@
circle' :: V2 Double -> Double -> Canvas ()
circle' (V2 x y) d = ellipse $ centered (D x y d d)
-- | draw cubic bezier spline: @bezier fstAnchor fstControl sndControl sndAnchor@
bezier :: V2 Double -> V2 Double -> V2 Double -> V2 Double -> Canvas ()
bezier (V2 x1 y1) (V2 x2 y2) (V2 x3 y3) (V2 x4 y4) = drawShape $ do
C.moveTo x1 y1
C.curveTo x2 y2 x3 y3 x4 y4
-- | draw quadratic bezier spline: @bezier fstAnchor control sndAnchor@
bezierQ :: V2 Double -> V2 Double -> V2 Double -> Canvas ()
bezierQ p0 p12 p3 = bezier p0 p1 p2 p3
where p1 = p0 + 2/3*(p12-p0)
p2 = p3 + 2/3*(p12-p3)
----
-- | map a value from one range onto another
mapRange :: Double -> (Double,Double) -> (Double,Double) -> Double
mapRange v (l1,r1) (l2,r2) = (v-l1)*fac + l2
where fac = (r2-l2)/(r1-l1)
-- | convert degrees to radians
radians :: Double -> Double
radians d = d*pi/180
-- | convert radians to degrees
degrees :: Double -> Double
degrees r = r/pi*180
-- | force value v into given range
constrain :: Double -> (Double,Double) -> Double
constrain v (l,h) = max l $ min h v
-- | set new random seed
randomSeed :: Int -> Canvas ()
randomSeed s = liftIO $ setStdGen $ mkStdGen s
-- | get new random number
random :: (Random a) => (a,a) -> Canvas a
random = liftIO . randomRIO
-- | date and time as returned by getTime
data Time = Time { year :: Int, month :: Int, day :: Int
, hour :: Int, minute :: Int, second :: Int } deriving (Show,Eq)
-- | get current system time. Use the 'Time' accessors for specific components.
-- (Processing: @year(),month(),day(),hour(),minute(),second()@)
getTime :: IO Time
getTime = do
(UTCTime day time) <- getCurrentTime
let (y,m,d) = toGregorian day
(TimeOfDay h mins s) = timeToTimeOfDay time
return $ Time (fromIntegral y::Int) m d h mins (round s :: Int)
----
-- | Stores an image surface with additional information
data Image = Image {imageSurface::C.Surface, imageSize::V2 Int, imageFormat::Format}
-- | create a new empty image of given size
createImage :: V2 Int -> Canvas Image
createImage (V2 w h) = do
s <- liftIO $ C.createImageSurface FormatARGB32 w h
let img = Image s (V2 w h) FormatARGB32
track img
return img
--TODO: add checks (file exists, correct format, etc.)
-- | load a PNG image from given path.
loadImagePNG :: FilePath -> Canvas Image
loadImagePNG path = do
s <- liftIO $ C.imageSurfaceCreateFromPNG path
w <- C.imageSurfaceGetWidth s
h <- C.imageSurfaceGetHeight s
f <- C.imageSurfaceGetFormat s
let img = Image s (V2 w h) f
track img
return img
-- | Save an image as PNG to given file path
saveImagePNG :: Image -> FilePath -> Canvas ()
saveImagePNG (Image s _ _) fp = liftIO (C.surfaceWriteToPNG s fp)
-- | Render complete image on given coordinates
image :: Image -> V2 Double -> Canvas ()
image img@(Image _ (V2 w h) _) (V2 x y) =
image' img (D x y (fromIntegral w) (fromIntegral h))
-- | Render complete image inside given dimensions
image' :: Image -> Dim -> Canvas ()
image' img@(Image _ (V2 ow oh) _) =
blend OperatorSource img (D 0 0 (fromIntegral ow) (fromIntegral oh))
-- | Copy given part of image to given part of screen, using given blending
-- operator and resizing when necessary. Use 'OperatorSource' to copy without
-- blending effects. (Processing: @copy(),blend()@)
blend :: Operator -> Image -> Dim -> Dim -> Canvas ()
blend op (Image s _ _) sdim ddim = lift $ C.withTargetSurface $ \surf ->
copyFromToSurface op s sdim surf ddim
-- | get a copy of the image from current window (Processing: @get()@)
grab :: Dim -> Canvas Image
grab dim@(D _ _ w h) = do
i@(Image s _ _) <- createImage (V2 (round w) (round h))
lift $ C.withTargetSurface $ \surf ->
copyFromToSurface OperatorSource surf dim s (D 0 0 w h)
return i
----
-- | Font definition
data Font = Font{fontFace::String
,fontSize::Double
,fontBold::Bool
,fontItalic::Bool} deriving (Show,Eq)
-- | set current font for text rendering
textFont :: Font -> Canvas ()
textFont f = lift $ setFont f
-- | get the size of the text when rendered in current font
textSize :: String -> Canvas (V2 Double)
textSize = return . dimSize . fst <=< textExtents
-- | get information about given text when rendered in current font.
-- returns tuple with location of top-left corner relative to
-- the origin and size of rendered text in the first component,
-- cursor advancement relative to origin in the second component
-- (also see 'Graphics.Rendering.Cairo.TextExtents').
textExtents :: String -> Canvas (Dim, V2 Double)
textExtents s = do
(C.TextExtents xb yb w h xa ya) <- lift $ C.textExtents s
return ((D xb yb w h),(V2 xa ya))
-- | render text. returns cursor advancement (@text = text' Baseline@)
text :: String -> V2 Double -> Canvas (V2 Double)
text = text' Baseline
-- | render text with specified alignment. returns cursor advancement
text' :: Anchor -> String -> V2 Double -> Canvas (V2 Double)
text' a str pos = do
(C.TextExtents xb yb w h xa ya) <- lift $ C.textExtents str
let (D xn yn _ _) = (if a==Baseline then id else aligned a) $ toD pos $ V2 w h
(V2 x' y') = (V2 xn yn) - if a/=Baseline then (V2 xb yb) else 0
ifColor csFG $ \c -> C.moveTo x' y' >> setColor c >> C.showText str
return $ V2 xa ya
-- helpers --
-- | draw a shape - first fill with bg color, then draw border with stroke color
drawShape :: Render a -> Canvas ()
drawShape m = do
ifColor csBG $ \c -> m >> setColor c >> C.fill
ifColor csFG $ \c -> m >> setColor c >> C.stroke
-- | if color (csFG/csBG) is set, perform given render block
ifColor :: (CanvasState -> Maybe Color) -> (Color -> Render a) -> Canvas ()
ifColor cf m = get >>= \cs -> case cf cs of
Just c -> lift (m c) >> return ()
Nothing -> return ()
-- | convert from 256-value RGBA to Double representation, set color
setColor :: Color -> Render ()
setColor (V4 r g b a) = C.setSourceRGBA (conv r) (conv g) (conv b) (conv a)
where conv = ((1.0/256)*).fromIntegral
-- | Add to garbage collection list
track :: Image -> Canvas ()
track img = modify $ \cs -> cs{csImages=img:csImages cs}
-- cairo helpers --
-- | helper: returns new surface with scaled content. does NOT cleanup!
createScaledSurface :: C.Surface -> (V2 Double) -> Render C.Surface
createScaledSurface s (V2 w h) = do
ow <- C.imageSurfaceGetWidth s
oh <- C.imageSurfaceGetHeight s
s' <- liftIO $ C.createSimilarSurface s C.ContentColorAlpha (round w) (round h)
C.renderWith s' $ do
C.scale (w/fromIntegral ow) (h/fromIntegral oh)
C.setSourceSurface s 0 0
pat <- C.getSource
C.patternSetExtend pat C.ExtendPad
C.setOperator C.OperatorSource
C.paint
return s'
-- | helper: returns new surface with only part of original content. does NOT cleanup!
createTrimmedSurface :: C.Surface -> Dim -> Render C.Surface
createTrimmedSurface s (D x y w h) = do
s' <- liftIO $ C.createSimilarSurface s C.ContentColorAlpha (round w) (round h)
C.renderWith s' $ do
C.setSourceSurface s (-x) (-y)
C.setOperator C.OperatorSource
C.rectangle 0 0 w h
C.fill
return s'
copyFromToSurface :: Operator -> C.Surface -> Dim -> C.Surface -> Dim -> Render ()
copyFromToSurface op src sdim@(D sx sy sw sh) dest (D x y w h) = do
ow <- C.imageSurfaceGetWidth src
oh <- C.imageSurfaceGetHeight src
let needsTrim = sx/=0 || sy/=0 || round sw/=ow || round sh/=oh
needsRescale = round sw/=round w || round sh/=round h
s' <- if needsTrim then createTrimmedSurface src sdim else return src
s'' <- if needsRescale then createScaledSurface s' (V2 w h) else return s'
C.renderWith dest $ do
C.save
C.setSourceSurface s'' x y
C.setOperator op
C.rectangle x y w h
C.fill
C.restore
when needsTrim $ C.surfaceFinish s'
when needsRescale $ C.surfaceFinish s''
-- | Set the current font
setFont :: Font -> Render ()
setFont (Font face sz bold italic) = do
C.selectFontFace face
(if italic then C.FontSlantItalic else C.FontSlantNormal )
(if bold then C.FontWeightBold else C.FontWeightNormal)
C.setFontSize sz
| apirogov/cairo-canvas | src/Graphics/Rendering/Cairo/Canvas.hs | mit | 19,376 | 0 | 16 | 4,297 | 6,151 | 3,169 | 2,982 | -1 | -1 |
localMaxima :: [Integer] -> [Integer]
localMaxima list
| length list > 2 = map (\(_,b,_) -> b) (filter (\(a,b,c) -> a < b && b > c)(neighborList list))
| otherwise = []
neighborList :: [Integer] -> [(Integer, Integer, Integer)]
neighborList (a:b:c:xs) = (a,b,c) : (neighborList (b:c:xs))
neighborList list = [] | julitopower/HaskellLearning | localmaxima.hs | mit | 315 | 1 | 13 | 54 | 199 | 109 | 90 | 7 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Text.Documentalist.Writer ( module Text.Documentalist.Types.DocBlock
, module Text.Documentalist.Types.Package
, Writer(..)
) where
import Control.Monad.IO.Class
import Text.Documentalist.Types.DocBlock
import Text.Documentalist.Types.Package
-- | Generates formatted output from 'DocBlock's.
class MonadIO w => Writer w where
-- | Writes formatted documentation to a destination determined by the specific 'Writer' used.
--
-- Any errors will be indicated with a thrown 'Exception'.
write :: Package (Maybe DocBlock) -> w ()
| jspahrsummers/documentalist | Text/Documentalist/Writer.hs | mit | 691 | 0 | 10 | 188 | 97 | 61 | 36 | 9 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
module JSDOM.Custom.XMLHttpRequest (
module Generated
, XHRError(..)
, send
, sendString
, sendArrayBuffer
, sendBlob
, sendDocument
, sendFormData
) where
import Prelude ()
import Prelude.Compat
import Data.Typeable (Typeable)
import Control.Concurrent.MVar (takeMVar, newEmptyMVar, putMVar)
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Catch (onException, bracket, throwM)
import Control.Exception (Exception(..))
import Control.Lens.Operators ((^.))
import Language.Javascript.JSaddle
(js0, js1, ToJSString, ToJSVal(..), JSVal)
import JSDOM.Types
(DOM, MonadDOM, liftDOM, FormData(..), IsDocument, IsBlob, IsArrayBufferView)
import JSDOM.EventM (onAsync)
import JSDOM.Generated.XMLHttpRequest as Generated hiding (send)
import JSDOM.Generated.XMLHttpRequestEventTarget as Generated
data XHRError = XHRError
| XHRAborted
deriving (Show, Eq, Typeable)
instance Exception XHRError
throwXHRError :: MonadDOM m => Maybe XHRError -> m ()
throwXHRError = maybe (return ()) (liftDOM . throwM)
withEvent :: DOM (DOM ()) -> DOM a -> DOM a
withEvent aquire = bracket aquire id . const
send' :: (MonadDOM m) => XMLHttpRequest -> Maybe JSVal -> m ()
send' self mbVal = liftDOM $ (`onException` abort self) $ do
result <- liftIO newEmptyMVar
r <- withEvent (onAsync self Generated.error . liftIO $ putMVar result (Just XHRError)) $
withEvent (onAsync self abortEvent . liftIO $ putMVar result (Just XHRAborted)) $
withEvent (onAsync self load . liftIO $ putMVar result Nothing) $ do
void $
case mbVal of
Nothing -> self ^. js0 "send"
Just val -> self ^. js1 "send" val
liftIO $ takeMVar result
throwXHRError r
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
send :: (MonadDOM m) => XMLHttpRequest -> m ()
send self = send' self Nothing
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
sendString :: (MonadDOM m, ToJSString str) => XMLHttpRequest -> str -> m ()
sendString self str = liftDOM $ toJSVal str >>= send' self . Just
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
sendArrayBuffer :: (MonadDOM m, IsArrayBufferView view) => XMLHttpRequest -> view -> m ()
sendArrayBuffer self view = liftDOM $ toJSVal view >>= send' self . Just
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
sendBlob :: (MonadDOM m, IsBlob blob) => XMLHttpRequest -> blob -> m ()
sendBlob self blob = liftDOM $ toJSVal blob >>= send' self . Just
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
sendDocument :: (MonadDOM m, IsDocument doc) => XMLHttpRequest -> doc -> m ()
sendDocument self doc = liftDOM $ toJSVal doc >>= send' self . Just
-- | <https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest#send() Mozilla XMLHttpRequest.send documentation>
sendFormData :: (MonadDOM m) => XMLHttpRequest -> FormData -> m ()
sendFormData self formData = liftDOM $ toJSVal formData >>= send' self . Just
| ghcjs/jsaddle-dom | src/JSDOM/Custom/XMLHttpRequest.hs | mit | 3,450 | 0 | 17 | 638 | 937 | 500 | 437 | 58 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Views.Index (render) where
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes
import Text.Blaze.Html.Renderer.Text
render = do
html $ do
body $ do
h1 "wow, such puppy"
ul $ do
li "much wag"
li "woof"
img ! src "puppy.jpg" ! alt "puppy!"
| Pholey/place-puppy | Placepuppy/Views/Index.hs | mit | 369 | 0 | 16 | 90 | 103 | 55 | 48 | 14 | 1 |
{-
(*) Modified run-length encoding.
Modify the result of problem 10 in such a way that if an element has no
duplicates it is simply copied into the result list. Only elements with
duplicates are transferred as (N E) lists.
Example in Haskell:
ghci> encodeModified "aaaabccaadeeee"
[Multiple 4 'a', Single 'b', Multiple 2 'c',
Multiple 2 'a', Single 'd', Multiple 4 'e']
-}
import Data.List
data Item a = Multiple Int a | Single a deriving (Show)
encodeModified :: (Eq a) => [a] -> [Item a]
encodeModified = map (\x -> if length x > 1 then Multiple (length x) (head x) else Single (head x)) . group
| gaoce/haskell_99 | 11.hs | mit | 638 | 0 | 12 | 147 | 119 | 64 | 55 | 4 | 2 |
-- (c) The University of Glasgow 2006
{-# LANGUAGE CPP, ScopedTypeVariables #-}
-- For Functor SCC. ToDo: Remove me when 7.10 is released
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Gen2.GHC.Digraph(
Graph, graphFromEdgedVertices,
graphFromVerticesAndAdjacency,
SCC(..), Node, flattenSCC, flattenSCCs,
stronglyConnCompG,
topologicalSortG, dfsTopSortG,
verticesG, edgesG, hasVertexG,
reachableG, reachablesG, transposeG,
outdegreeG, indegreeG,
vertexGroupsG, emptyG,
componentsG,
findCycle,
-- For backwards compatability with the simpler version of Digraph
stronglyConnCompFromEdgedVertices, stronglyConnCompFromEdgedVerticesR,
) where
-- #include "HsVersions.h"
------------------------------------------------------------------------------
-- A version of the graph algorithms described in:
--
-- ``Lazy Depth-First Search and Linear IntGraph Algorithms in Haskell''
-- by David King and John Launchbury
--
-- Also included is some additional code for printing tree structures ...
--
-- If you ever find yourself in need of algorithms for classifying edges,
-- or finding connected/biconnected components, consult the history; Sigbjorn
-- Finne contributed some implementations in 1997, although we've since
-- removed them since they were not used anywhere in GHC.
------------------------------------------------------------------------------
import Prelude
import Util ( minWith, count )
import Outputable
import Maybes ( expectJust )
import MonadUtils ( allM )
-- Extensions
import Control.Monad ( filterM, liftM, liftM2 )
import Control.Monad.ST
-- std interfaces
import Data.Maybe
import Data.Array
import Data.List hiding (transpose)
import Data.Ord
import Data.Array.ST
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Graph as G
import Data.Graph hiding (Graph, Edge, transposeG, reachable)
import Data.Tree
{-
************************************************************************
* *
* Graphs and Graph Construction
* *
************************************************************************
Note [Nodes, keys, vertices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A 'node' is a big blob of client-stuff
* Each 'node' has a unique (client) 'key', but the latter
is in Ord and has fast comparison
* Digraph then maps each 'key' to a Vertex (Int) which is
arranged densely in 0.n
-}
data Graph node = Graph {
gr_int_graph :: IntGraph,
gr_vertex_to_node :: Vertex -> node,
gr_node_to_vertex :: node -> Maybe Vertex
}
data Edge node = Edge node node
type Node key payload = (payload, key, [key])
-- The payload is user data, just carried around in this module
-- The keys are ordered
-- The [key] are the dependencies of the node;
-- it's ok to have extra keys in the dependencies that
-- are not the key of any Node in the graph
emptyGraph :: Graph a
emptyGraph = Graph (array (1, 0) []) (error "emptyGraph") (const Nothing)
graphFromEdgedVertices
:: Ord key
=> [Node key payload] -- The graph; its ok for the
-- out-list to contain keys which arent
-- a vertex key, they are ignored
-> Graph (Node key payload)
graphFromEdgedVertices [] = emptyGraph
graphFromEdgedVertices edged_vertices = Graph graph vertex_fn (key_vertex . key_extractor)
where key_extractor (_, k, _) = k
(bounds, vertex_fn, key_vertex, numbered_nodes) = reduceNodesIntoVertices edged_vertices key_extractor
graph = array bounds [(v, mapMaybe key_vertex ks) | (v, (_, _, ks)) <- numbered_nodes]
graphFromVerticesAndAdjacency
:: Ord key
=> [(node, key)]
-> [(key, key)] -- First component is source vertex key,
-- second is target vertex key (thing depended on)
-- Unlike the other interface I insist they correspond to
-- actual vertices because the alternative hides bugs. I can't
-- do the same thing for the other one for backcompat reasons.
-> Graph (node, key)
graphFromVerticesAndAdjacency [] _ = emptyGraph
graphFromVerticesAndAdjacency vertices edges = Graph graph vertex_node (key_vertex . key_extractor)
where key_extractor = snd
(bounds, vertex_node, key_vertex, _) = reduceNodesIntoVertices vertices key_extractor
key_vertex_pair (a, b) = (expectJust "graphFromVerticesAndAdjacency" $ key_vertex a,
expectJust "graphFromVerticesAndAdjacency" $ key_vertex b)
reduced_edges = map key_vertex_pair edges
graph = buildG bounds reduced_edges
reduceNodesIntoVertices
:: Ord key
=> [node]
-> (node -> key)
-> (Bounds, Vertex -> node, key -> Maybe Vertex, [(Int, node)])
reduceNodesIntoVertices nodes key_extractor = (bounds, (!) vertex_map, key_vertex, numbered_nodes)
where
max_v = length nodes - 1
bounds = (0, max_v) :: (Vertex, Vertex)
sorted_nodes = sortBy (comparing key_extractor) nodes
numbered_nodes = zipWith (,) [0..] sorted_nodes
key_map = array bounds [(i, key_extractor node) | (i, node) <- numbered_nodes]
vertex_map = array bounds numbered_nodes
--key_vertex :: key -> Maybe Vertex
-- returns Nothing for non-interesting vertices
key_vertex k = find 0 max_v
where
find a b | a > b = Nothing
| otherwise = let mid = (a + b) `div` 2
in case compare k (key_map ! mid) of
LT -> find a (mid - 1)
EQ -> Just mid
GT -> find (mid + 1) b
{-
************************************************************************
* *
* SCC
* *
************************************************************************
-}
type WorkItem key payload
= (Node key payload, -- Tip of the path
[payload]) -- Rest of the path;
-- [a,b,c] means c depends on b, b depends on a
-- | Find a reasonably short cycle a->b->c->a, in a strongly
-- connected component. The input nodes are presumed to be
-- a SCC, so you can start anywhere.
findCycle :: forall payload key. Ord key
=> [Node key payload] -- The nodes. The dependencies can
-- contain extra keys, which are ignored
-> Maybe [payload] -- A cycle, starting with node
-- so each depends on the next
findCycle graph
= go Set.empty (new_work root_deps []) []
where
env :: Map.Map key (Node key payload)
env = Map.fromList [ (key, node) | node@(_, key, _) <- graph ]
-- Find the node with fewest dependencies among the SCC modules
-- This is just a heuristic to find some plausible root module
root :: Node key payload
root = fst (minWith snd [ (node, count (`Map.member` env) deps)
| node@(_,_,deps) <- graph ])
(root_payload,root_key,root_deps) = root
-- 'go' implements Dijkstra's algorithm, more or less
go :: Set.Set key -- Visited
-> [WorkItem key payload] -- Work list, items length n
-> [WorkItem key payload] -- Work list, items length n+1
-> Maybe [payload] -- Returned cycle
-- Invariant: in a call (go visited ps qs),
-- visited = union (map tail (ps ++ qs))
go _ [] [] = Nothing -- No cycles
go visited [] qs = go visited qs []
go visited (((payload,key,deps), path) : ps) qs
| key == root_key = Just (root_payload : reverse path)
| key `Set.member` visited = go visited ps qs
| key `Map.notMember` env = go visited ps qs
| otherwise = go (Set.insert key visited)
ps (new_qs ++ qs)
where
new_qs = new_work deps (payload : path)
new_work :: [key] -> [payload] -> [WorkItem key payload]
new_work deps path = [ (n, path) | Just n <- map (`Map.lookup` env) deps ]
{-
************************************************************************
* *
* Strongly Connected Component wrappers for Graph
* *
************************************************************************
Note: the components are returned topologically sorted: later components
depend on earlier ones, but not vice versa i.e. later components only have
edges going from them to earlier ones.
-}
stronglyConnCompG :: Graph node -> [SCC node]
stronglyConnCompG graph = decodeSccs graph forest
where forest = {-# SCC "Digraph.scc" #-} scc (gr_int_graph graph)
decodeSccs :: Graph node -> Forest Vertex -> [SCC node]
decodeSccs Graph { gr_int_graph = graph, gr_vertex_to_node = vertex_fn } forest
= map decode forest
where
decode (Node v []) | mentions_itself v = CyclicSCC [vertex_fn v]
| otherwise = AcyclicSCC (vertex_fn v)
decode other = CyclicSCC (dec other [])
where dec (Node v ts) vs = vertex_fn v : foldr dec vs ts
mentions_itself v = v `elem` (graph ! v)
-- The following two versions are provided for backwards compatability:
stronglyConnCompFromEdgedVertices
:: Ord key
=> [Node key payload]
-> [SCC payload]
stronglyConnCompFromEdgedVertices
= map (fmap get_node) . stronglyConnCompFromEdgedVerticesR
where get_node (n, _, _) = n
-- The "R" interface is used when you expect to apply SCC to
-- (some of) the result of SCC, so you dont want to lose the dependency info
stronglyConnCompFromEdgedVerticesR
:: Ord key
=> [Node key payload]
-> [SCC (Node key payload)]
stronglyConnCompFromEdgedVerticesR = stronglyConnCompG . graphFromEdgedVertices
{-
************************************************************************
* *
* Misc wrappers for Graph
* *
************************************************************************
-}
topologicalSortG :: Graph node -> [node]
topologicalSortG graph = map (gr_vertex_to_node graph) result
where result = {-# SCC "Digraph.topSort" #-} topSort (gr_int_graph graph)
dfsTopSortG :: Graph node -> [[node]]
dfsTopSortG graph =
map (map (gr_vertex_to_node graph) . flatten) $ dfs g (topSort g)
where
g = gr_int_graph graph
reachableG :: Graph node -> node -> [node]
reachableG graph from = map (gr_vertex_to_node graph) result
where from_vertex = expectJust "reachableG" (gr_node_to_vertex graph from)
result = {-# SCC "Digraph.reachable" #-} reachable (gr_int_graph graph) [from_vertex]
reachablesG :: Graph node -> [node] -> [node]
reachablesG graph froms = map (gr_vertex_to_node graph) result
where result = {-# SCC "Digraph.reachable" #-}
reachable (gr_int_graph graph) vs
vs = [ v | Just v <- map (gr_node_to_vertex graph) froms ]
hasVertexG :: Graph node -> node -> Bool
hasVertexG graph node = isJust $ gr_node_to_vertex graph node
verticesG :: Graph node -> [node]
verticesG graph = map (gr_vertex_to_node graph) $ vertices (gr_int_graph graph)
edgesG :: Graph node -> [Edge node]
edgesG graph = map (\(v1, v2) -> Edge (v2n v1) (v2n v2)) $ edges (gr_int_graph graph)
where v2n = gr_vertex_to_node graph
transposeG :: Graph node -> Graph node
transposeG graph = Graph (G.transposeG (gr_int_graph graph))
(gr_vertex_to_node graph)
(gr_node_to_vertex graph)
outdegreeG :: Graph node -> node -> Maybe Int
outdegreeG = degreeG outdegree
indegreeG :: Graph node -> node -> Maybe Int
indegreeG = degreeG indegree
degreeG :: (G.Graph -> Table Int) -> Graph node -> node -> Maybe Int
degreeG degree graph node = let table = degree (gr_int_graph graph)
in fmap ((!) table) $ gr_node_to_vertex graph node
vertexGroupsG :: Graph node -> [[node]]
vertexGroupsG graph = map (map (gr_vertex_to_node graph)) result
where result = vertexGroups (gr_int_graph graph)
emptyG :: Graph node -> Bool
emptyG g = graphEmpty (gr_int_graph g)
componentsG :: Graph node -> [[node]]
componentsG graph = map (map (gr_vertex_to_node graph) . flatten)
$ components (gr_int_graph graph)
{-
************************************************************************
* *
* Showing Graphs
* *
************************************************************************
-}
instance Outputable node => Outputable (Graph node) where
ppr graph = vcat [
hang (text "Vertices:") 2 (vcat (map ppr $ verticesG graph)),
hang (text "Edges:") 2 (vcat (map ppr $ edgesG graph))
]
instance Outputable node => Outputable (Edge node) where
ppr (Edge from to) = ppr from <+> text "->" <+> ppr to
graphEmpty :: G.Graph -> Bool
graphEmpty g = lo > hi
where (lo, hi) = bounds g
{-
************************************************************************
* *
* IntGraphs
* *
************************************************************************
-}
type IntGraph = G.Graph
-- Functor instance was added in 7.8, in containers 0.5.3.2 release
-- ToDo: Drop me when 7.10 is released.
#if __GLASGOW_HASKELL__ < 708
instance Functor SCC where
fmap f (AcyclicSCC v) = AcyclicSCC (f v)
fmap f (CyclicSCC vs) = CyclicSCC (fmap f vs)
#endif
{-
------------------------------------------------------------
-- Depth first search numbering
------------------------------------------------------------
-}
-- Data.Tree has flatten for Tree, but nothing for Forest
preorderF :: Forest a -> [a]
preorderF ts = concat (map flatten ts)
{-
------------------------------------------------------------
-- Finding reachable vertices
------------------------------------------------------------
-}
-- This generalizes reachable which was found in Data.Graph
reachable :: IntGraph -> [Vertex] -> [Vertex]
reachable g vs = preorderF (dfs g vs)
{-
------------------------------------------------------------
-- Total ordering on groups of vertices
------------------------------------------------------------
The plan here is to extract a list of groups of elements of the graph
such that each group has no dependence except on nodes in previous
groups (i.e. in particular they may not depend on nodes in their own
group) and is maximal such group.
Clearly we cannot provide a solution for cyclic graphs.
We proceed by iteratively removing elements with no outgoing edges
and their associated edges from the graph.
This probably isn't very efficient and certainly isn't very clever.
-}
type Set s = STArray s Vertex Bool
mkEmpty :: Bounds -> ST s (Set s)
mkEmpty bnds = newArray bnds False
contains :: Set s -> Vertex -> ST s Bool
contains m v = readArray m v
include :: Set s -> Vertex -> ST s ()
include m v = writeArray m v True
vertexGroups :: IntGraph -> [[Vertex]]
vertexGroups g = runST (mkEmpty (bounds g) >>= \provided -> vertexGroupsS provided g next_vertices)
where next_vertices = noOutEdges g
noOutEdges :: IntGraph -> [Vertex]
noOutEdges g = [ v | v <- vertices g, null (g!v)]
vertexGroupsS :: Set s -> IntGraph -> [Vertex] -> ST s [[Vertex]]
vertexGroupsS provided g to_provide
= if null to_provide
then do {
all_provided <- allM (provided `contains`) (vertices g)
; if all_provided
then return []
else error "vertexGroup: cyclic graph"
}
else do {
mapM_ (include provided) to_provide
; to_provide' <- filterM (vertexReady provided g) (vertices g)
; rest <- vertexGroupsS provided g to_provide'
; return $ to_provide : rest
}
vertexReady :: Set s -> IntGraph -> Vertex -> ST s Bool
vertexReady provided g v = liftM2 (&&) (liftM not $ provided `contains` v) (allM (provided `contains`) (g!v))
| ghcjs/ghcjs | src/Gen2/GHC/Digraph.hs | mit | 16,860 | 0 | 17 | 4,754 | 3,594 | 1,923 | 1,671 | 219 | 3 |
-- List length. Tail Recursion with "foldl".
module Length where
import Prelude hiding (length)
import Data.List (foldl)
length :: [t] -> Integer
length = foldl (\lengthAccumulator item -> lengthAccumulator + 1) 0
{- GHCi>
length ""
length "1"
length "12"
-}
-- 0
-- 1
-- 2
-- foldl :: (b -> a -> b) -> b -> [a] -> b
-- foldl f z0 xs0
--
-- = lgo z0 xs0
--
-- where
--
-- lgo z [] = z
-- lgo z (x:xs) = lgo (z `f` x) xs
-- length "123"
-- = foldl (\lengthAccumulator item -> lengthAccumulator + 1) 0 "123"
-- = lgo 0 "123"
--
-- where f = \lengthAccumulator item -> lengthAccumulator + 1
--
-- = lgo 0 ('1' : "23")
-- ~> lgo ( 0 `f` '1' ) "23"
-- = lgo ( 0 `f` '1' ) ('2' : "3")
-- ~> lgo ( (0 `f` '1') `f` '2' ) "3"
-- = lgo ( (0 `f` '1') `f` '2' ) ('3' : [])
-- ~> lgo ( ( (0 `f` '1') `f` '2' ) `f` '3' ) []
--
-- ~> ( ( (0 `f` '1') `f` '2' ) `f` '3' )
-- ~> ( ( (0 + 1) `f` '2' ) `f` '3' )
--
-- ~> ( ( 1 `f` '2' ) `f` '3' )
-- ~> ( ( 1 + 1 ) `f` '3' )
--
-- ~> ( 2 `f` '3' )
-- ~> ( 2 + 1 )
--
-- ~> 3
-- Results of "length" on a list that increases at each element by a power of 10 in an infinite list:
samples :: [Integer]
samples = [ length [1 .. 10^n] | n <- [1 ..] ]
-- "sample n" represents the application of "length" to a list that contains 10^n elements:
sample :: Int -> Integer
sample number = samples !! number
-- Helper, that formats numbers in scientific notation with powers of 10:
format :: Integer -> String
format 10 = "10"
format integer = "10^" ++ (show $ truncate $ log (fromIntegral integer :: Float) / log 10)
{- GHCi>
:{
let count n = do putStrLn $ format $ sample n
count (n + 1)
in count 0
:}
-}
-- 10
-- 10^2
-- 10^3
-- 10^4
-- 10^5
-- 10^6
-- 10^7
--
-- Note: After little time there will be serious indications (system becomes slower) that
-- the program runs out of memory. And we know why.
-- The problem is the growing 'accumulator thunk' - the bigger the
-- input size, the more memory is needed to store this not yet evaluated expression:
--
-- ( ( ... ( (0 `f` "1") `f` "2" ) `f` ... ) `f` "N" )
--
-- N denotes the last element in a list of length N. Well on my computer this approach
-- came with an improvement: 10^7 is in the result list of tail recursion, while
-- primitive recursion got until 10^6.
-- Profiling time and allocation for "length" at an input list that contains 10^5 elements:
-------------------------------------------------------------------------------------------
profile :: IO ()
profile = print $ length [1 .. 10^5]
--
-- System : Windows 8.1 Pro 64 Bit (6.3, Build 9600)
-- Intel(R) Core(TM) i5 CPU M 460 @ 2.53 GHz (4 CPUs), ~2.5 GHz
-- 4096 MB RAM
--
-- Antivirus / ... : deactivated
--
-- Compiler : GHC 7.8.3
--
--
-- Result:
--
-------------------------------------------------------------------------------------------
| pascal-knodel/haskell-craft | Examples/· Folds/length/foldl/Length.hs | mit | 3,389 | 0 | 11 | 1,180 | 286 | 191 | 95 | 14 | 1 |
import Data.List
import Data.Numbers.Primes
import System.IO
sum' n = n*(n+1) `div`2
nbFactors n = map (\x -> length x + 1) $ group $ primeFactors n
triangleNbs = head [n | n <- [5000000..], product (nbFactors n) > 500]
main = putStrLn $ show triangleNbs | t00n/ProjectEuler | triangleNbs.hs | epl-1.0 | 258 | 0 | 11 | 48 | 130 | 68 | 62 | 7 | 1 |
{-
Copyright (C) 2017 WATANABE Yuki <magicant@wonderwand.net>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE Safe #-}
{-|
Copyright : (C) 2017 WATANABE Yuki
License : GPL-2
Portability : non-portable (flexible instances)
This module defines elements for positioning source code characters and
describing origin of them.
-}
module Flesh.Source.Position (
Situation(..), Fragment(..), Position(..), dummyPosition, next,
Positioned, PositionedList(..), PositionedString, unposition,
headPosition, spread, dropP) where
import qualified Flesh.Language.Alias.Core as Alias
-- | Function definition.
type FunctionDefinition = Position
-- | Situation in which a code fragment is executed/evaluated.
data Situation =
-- | Standard input.
StandardInput
-- | Script file.
| File {
-- | Path to the script file (for informative purposes only).
path :: !String,
-- | Position of the dot built-in that sourced this file. (Nothing if
-- not sourced by the dot built-in.)
dotBuiltinPosition :: !(Maybe Position)}
-- | Command string evaluated by the eval built-in.
| Eval {
evalBuiltinPosition :: !Position}
-- | Command Substitution.
| CommandSubstitution {
-- | Position at which substitution/expansion/function call occurred.
position :: !Position}
-- | Part of code that resulted from alias substitution.
| Alias {
position :: !Position,
-- | Definition of the alias substituted.
aliasDefinition :: !(Alias.Definition Position)}
-- | Arithmetic expansion.
| ArithmeticExpansion {
position :: !Position}
-- | Function call.
| FunctionCall {
position :: !Position,
-- | Definition of the function called.
functionDefinition :: !FunctionDefinition}
deriving (Eq, Show)
-- | Source code fragment, typically a single line of code.
data Fragment = Fragment {
-- | Source code.
code :: String,
-- | Situation in which the source code occurred.
situation :: !Situation,
-- | Line number (starts from 0).
lineNo :: !Int}
deriving (Show)
-- | Equality of Fragment is compared ignoring their 'code'.
instance Eq Fragment where
a == b = lineNo a == lineNo b && situation a == situation b
-- | Position of a character that occurs in a source code fragment.
data Position = Position {
-- | Fragment whose code the index is to.
fragment :: !Fragment,
-- | Index to the character in the code of the fragment (starts from 0).
index :: !Int}
deriving (Eq, Show)
-- | Unmeaningful position for testing.
dummyPosition :: String -> Position
dummyPosition c = Position {fragment = f, index = 0}
where f = Fragment {code = c, situation = StandardInput, lineNo = 0}
-- | Increments the index of a position.
next :: Position -> Position
next (Position fragment_ index_) = Position fragment_ (index_ + 1)
-- | Something with a record of position from which it originated.
type Positioned a = (Position, a)
-- | Like @['Positioned' a]@, but the last nil also has its position.
data PositionedList a = Nil Position | (:~) (Positioned a) (PositionedList a)
deriving Eq
infixr 5 :~
-- | Like @['Positioned' 'Char']@, but the last nil also has its position.
type PositionedString = PositionedList Char
-- | Converts @'PositionedList' a@ to @['Positioned' a]@ by ignoring the last
-- position.
unposition :: PositionedList a -> [Positioned a]
unposition (Nil _) = []
unposition (x :~ xs) = x : unposition xs
-- | Returns the position of the first element.
headPosition :: PositionedList a -> Position
headPosition (Nil p) = p
headPosition ((p, _) :~ _) = p
instance Show a => Show (PositionedList a) where
show s = show l
where l = map snd $ unposition s
-- | Given a position for the first element of a list, returns a list of
-- elements positioned successively.
spread :: Position -> [a] -> PositionedList a
spread p [] = Nil p
spread p (x:xs) = (p, x) :~ spread (next p) xs
-- | Drop as many first items as specified from the positioned list.
dropP :: Int -> PositionedList a -> PositionedList a
dropP 0 xs = xs
dropP _ n@(Nil _) = n
dropP n (_ :~ xs) = (dropP $! n - 1) xs
-- vim: set et sw=2 sts=2 tw=78:
| magicant/flesh | src/Flesh/Source/Position.hs | gpl-2.0 | 4,800 | 2 | 12 | 981 | 814 | 467 | 347 | 89 | 1 |
-- moviendo_un_circulo.hs
-- Moviendo un círculo.
-- José A. Alonso Jiménez <jalonso@us.es>
-- Sevilla, 21 de Mayo de 2013
-- ---------------------------------------------------------------------
import Graphics.Gloss
main :: IO ()
main = animate (InWindow "Moviendo un circulo" (1800,820) (90,90)) green animacion
animacion :: Float -> Picture
animacion t = translate (50 * t - 900) 0 (color red (circleSolid 25))
-- Nota: La variable t toma como valor el tiempo transcurrido. Sus
-- valores son [0,0.5..]. Por tanto, los desplazamientos son [0,25,..].
| jaalonso/I1M-Cod-Temas | src/Tema_27/moviendo_un_circulo.hs | gpl-2.0 | 561 | 0 | 9 | 83 | 106 | 59 | 47 | 5 | 1 |
#!/usr/bin/env runghc
import WJR.Application
main :: IO ()
main = defaultMain
| drpowell/Prokka-web | prokka-web.hs | gpl-3.0 | 80 | 0 | 6 | 13 | 22 | 12 | 10 | 3 | 1 |
{-|
Module : Lipid.Parsers.CombinedRadyl.GlycerophospholipidSpec
Description :
Copyright : Michael Thomas
License : GPL-3
Maintainer : Michael Thomas <Michaelt293@gmail.com>
Stability : Experimental
-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeApplications #-}
module Lipid.Parsers.CombinedRadyl.GlycerophospholipidSpec where
import Lipid.Blocks
import Test.Hspec
import Lipid.CombinedRadyl.Glycerophospholipid
import Lipid.Parsers.CombinedRadyl.Glycerophospholipid
spec :: Spec
spec = do
describe "Test for quasiquoters and Shorthand instances" $ do
it "QuasiQuoter for PA 34:0" $
shorthand @ (PA (Maybe DeltaPosition)) [paMaybeDelta|PA 34:0|] `shouldBe` "PA 34:0"
it "QuasiQuoter for PA 34:0" $
shorthand @ (PA DeltaPosition) [paDelta|PA 34:0|] `shouldBe` "PA 34:0"
it "QuasiQuoter for PA 34:1(9)(6)" $
shorthand [paMaybeDelta|PA 34:1(9)(6)|] `shouldBe` "PA 34:1(9)(6)"
it "QuasiQuoter for PA 34:1(9)(6)" $
shorthand [paDelta|PA 34:1(9)(6)|] `shouldBe` "PA 34:1(9)(6)"
it "QuasiQuoter for PA 34:1(9Z)(6Z)" $
shorthand [paMaybeDelta|PA 34:1(9Z)(6Z)|] `shouldBe` "PA 34:1(9Z)(6Z)"
it "QuasiQuoter for PA 34:1(9Z)(6Z)" $
shorthand [paDelta|PA 34:1(9Z)(6Z)|] `shouldBe` "PA 34:1(9Z)(6Z)"
it "QuasiQuoter for PA 34:1" $
shorthand @ (PA (Maybe DeltaPosition)) [paMaybeDelta|PA 34:1|] `shouldBe` "PA 34:1"
it "QuasiQuoter for PE 34:0" $
shorthand @ (PE (Maybe DeltaPosition)) [peMaybeDelta|PE 34:0|] `shouldBe` "PE 34:0"
it "QuasiQuoter for PE 34:0" $
shorthand @ (PE DeltaPosition) [peDelta|PE 34:0|] `shouldBe` "PE 34:0"
it "QuasiQuoter for PE 34:1(9)(6)" $
shorthand [peMaybeDelta|PE 34:1(9)(6)|] `shouldBe` "PE 34:1(9)(6)"
it "QuasiQuoter for PE 34:1(9)(6)" $
shorthand [peDelta|PE 34:1(9)(6)|] `shouldBe` "PE 34:1(9)(6)"
it "QuasiQuoter for PE 34:1(9Z)(6Z)" $
shorthand [peMaybeDelta|PE 34:1(9Z)(6Z)|] `shouldBe` "PE 34:1(9Z)(6Z)"
it "QuasiQuoter for PE 34:1(9Z)(6Z)" $
shorthand [peDelta|PE 34:1(9Z)(6Z)|] `shouldBe` "PE 34:1(9Z)(6Z)"
it "QuasiQuoter for PE 34:1" $
shorthand @ (PE (Maybe DeltaPosition)) [peMaybeDelta|PE 34:1|] `shouldBe` "PE 34:1"
it "QuasiQuoter for PG 34:1(9Z)(6Z)" $
shorthand [pgMaybeDelta|PG 34:1(9Z)(6Z)|] `shouldBe` "PG 34:1(9Z)(6Z)"
it "QuasiQuoter for PG 34:1(9Z)(6Z)" $
shorthand [pgDelta|PG 34:1(9Z)(6Z)|] `shouldBe` "PG 34:1(9Z)(6Z)"
it "QuasiQuoter for PG 34:1" $
shorthand @ (PG (Maybe DeltaPosition)) [pgMaybeDelta|PG 34:1|] `shouldBe` "PG 34:1"
it "QuasiQuoter for PS 34:1(9Z)(6Z)" $
shorthand [psMaybeDelta|PS 34:1(9Z)(6Z)|] `shouldBe` "PS 34:1(9Z)(6Z)"
it "QuasiQuoter for PS 34:1(9Z)(6Z)" $
shorthand [psDelta|PS 34:1(9Z)(6Z)|] `shouldBe` "PS 34:1(9Z)(6Z)"
it "QuasiQuoter for PS 34:1" $
shorthand @ (PS (Maybe DeltaPosition)) [psMaybeDelta|PS 34:1|] `shouldBe` "PS 34:1"
it "QuasiQuoter for PI 34:1(9Z)(6Z)" $
shorthand [piMaybeDelta|PI 34:1(9Z)(6Z)|] `shouldBe` "PI 34:1(9Z)(6Z)"
it "QuasiQuoter for PI 34:1(9Z)(6Z)" $
shorthand [piDelta|PI 34:1(9Z)(6Z)|] `shouldBe` "PI 34:1(9Z)(6Z)"
it "QuasiQuoter for PI 34:1" $
shorthand @ (PI (Maybe DeltaPosition)) [piMaybeDelta|PI 34:1|] `shouldBe` "PI 34:1"
it "QuasiQuoter for PGP 34:1(9Z)(6Z)" $
shorthand [pgpMaybeDelta|PGP 34:1(9Z)(6Z)|] `shouldBe` "PGP 34:1(9Z)(6Z)"
it "QuasiQuoter for PGP 34:1(9Z)(6Z)" $
shorthand [pgpDelta|PGP 34:1(9Z)(6Z)|] `shouldBe` "PGP 34:1(9Z)(6Z)"
it "QuasiQuoter for PGP 34:1" $
shorthand @ (PGP (Maybe DeltaPosition)) [pgpMaybeDelta|PGP 34:1|] `shouldBe` "PGP 34:1"
it "QuasiQuoter for PC 34:1(9Z)(6Z)" $
shorthand [pcMaybeDelta|PC 34:1(9Z)(6Z)|] `shouldBe` "PC 34:1(9Z)(6Z)"
it "QuasiQuoter for PC 34:1(9Z)(6Z)" $
shorthand [pcDelta|PC 34:1(9Z)(6Z)|] `shouldBe` "PC 34:1(9Z)(6Z)"
it "QuasiQuoter for PC 34:1" $
shorthand @ (PC (Maybe DeltaPosition)) [pcMaybeDelta|PC 34:1|] `shouldBe` "PC 34:1"
it "QuasiQuoter for PIP 34:1(9Z)(6Z)" $
shorthand [pipMaybeDelta|PIP 34:1(9Z)(6Z)|] `shouldBe` "PIP 34:1(9Z)(6Z)"
it "QuasiQuoter for PIP 34:1(9Z)(6Z)" $
shorthand [pipDelta|PIP 34:1(9Z)(6Z)|] `shouldBe` "PIP 34:1(9Z)(6Z)"
it "QuasiQuoter for PIP 34:1" $
shorthand @ (PIP (Maybe DeltaPosition)) [pipMaybeDelta|PIP 34:1|] `shouldBe` "PIP 34:1"
it "QuasiQuoter for PIP2 34:1(9Z)(6Z)" $
shorthand [pip2MaybeDelta|PIP2 34:1(9Z)(6Z)|] `shouldBe` "PIP2 34:1(9Z)(6Z)"
it "QuasiQuoter for PIP2 34:1(9Z)(6Z)" $
shorthand [pip2Delta|PIP2 34:1(9Z)(6Z)|] `shouldBe` "PIP2 34:1(9Z)(6Z)"
it "QuasiQuoter for PIP2 34:1" $
shorthand @ (PIP2 (Maybe DeltaPosition)) [pip2MaybeDelta|PIP2 34:1|] `shouldBe` "PIP2 34:1"
describe "Test for quasiquoters and nNomenclature instances" $ do
it "QuasiQuoter for PA 34:0" $
nNomenclature @ (PA (Maybe OmegaPosition)) [paMaybeOmega|PA 34:0|] `shouldBe` "PA 34:0"
it "QuasiQuoter for PA 34:0" $
nNomenclature @ (PA OmegaPosition) [paDelta|PA 34:0|] `shouldBe` "PA 34:0"
it "QuasiQuoter for PA 34:1(n-9)(n-6)" $
nNomenclature [paMaybeOmega|PA 34:1(n-9)(n-6)|] `shouldBe` "PA 34:1(n-9)(n-6)"
it "QuasiQuoter for PA 34:1(n-9)(n-6)" $
nNomenclature [paOmega|PA 34:1(n-9)(n-6)|] `shouldBe` "PA 34:1(n-9)(n-6)"
it "QuasiQuoter for PA 34:1(n-9Z)(n-6Z)" $
nNomenclature [paMaybeOmega|PA 34:1(n-9)(n-6)|] `shouldBe` "PA 34:1(n-9)(n-6)"
it "QuasiQuoter for PA 34:1(n-9)(n-6)" $
nNomenclature [paOmega|PA 34:1(n-9)(n-6)|] `shouldBe` "PA 34:1(n-9)(n-6)"
it "QuasiQuoter for PA 34:1" $
nNomenclature @ (PA (Maybe OmegaPosition)) [paMaybeOmega|PA 34:1|] `shouldBe` "PA 34:1"
it "QuasiQuoter for PE 34:1(n-9Z)(n-6Z)" $
nNomenclature [peMaybeOmega|PE 34:1(n-9)(n-6)|] `shouldBe` "PE 34:1(n-9)(n-6)"
it "QuasiQuoter for PE 34:1(n-9)(n-6)" $
nNomenclature [peOmega|PE 34:1(n-9)(n-6)|] `shouldBe` "PE 34:1(n-9)(n-6)"
it "QuasiQuoter for PE 34:1" $
nNomenclature @ (PE (Maybe OmegaPosition)) [peMaybeOmega|PE 34:1|] `shouldBe` "PE 34:1"
it "QuasiQuoter for PC 34:1(n-9Z)(n-6Z)" $
nNomenclature [pcMaybeOmega|PC 34:1(n-9)(n-6)|] `shouldBe` "PC 34:1(n-9)(n-6)"
it "QuasiQuoter for PC 34:1(n-9)(n-6)" $
nNomenclature [pcOmega|PC 34:1(n-9)(n-6)|] `shouldBe` "PC 34:1(n-9)(n-6)"
it "QuasiQuoter for PC 34:1" $
nNomenclature @ (PC (Maybe OmegaPosition)) [pcMaybeOmega|PC 34:1|] `shouldBe` "PC 34:1"
it "QuasiQuoter for PS 34:1(n-9Z)(n-6Z)" $
nNomenclature [psMaybeOmega|PS 34:1(n-9)(n-6)|] `shouldBe` "PS 34:1(n-9)(n-6)"
it "QuasiQuoter for PS 34:1(n-9)(n-6)" $
nNomenclature [psOmega|PS 34:1(n-9)(n-6)|] `shouldBe` "PS 34:1(n-9)(n-6)"
it "QuasiQuoter for PS 34:1" $
nNomenclature @ (PS (Maybe OmegaPosition)) [psMaybeOmega|PS 34:1|] `shouldBe` "PS 34:1"
it "QuasiQuoter for PG 34:1(n-9Z)(n-6Z)" $
nNomenclature [pgMaybeOmega|PG 34:1(n-9)(n-6)|] `shouldBe` "PG 34:1(n-9)(n-6)"
it "QuasiQuoter for PG 34:1(n-9)(n-6)" $
nNomenclature [pgOmega|PG 34:1(n-9)(n-6)|] `shouldBe` "PG 34:1(n-9)(n-6)"
it "QuasiQuoter for PG 34:1" $
nNomenclature @ (PG (Maybe OmegaPosition)) [pgMaybeOmega|PG 34:1|] `shouldBe` "PG 34:1"
it "QuasiQuoter for PGP 34:1(n-9Z)(n-6Z)" $
nNomenclature [pgpMaybeOmega|PGP 34:1(n-9)(n-6)|] `shouldBe` "PGP 34:1(n-9)(n-6)"
it "QuasiQuoter for PGP 34:1(n-9)(n-6)" $
nNomenclature [pgpOmega|PGP 34:1(n-9)(n-6)|] `shouldBe` "PGP 34:1(n-9)(n-6)"
it "QuasiQuoter for PGP 34:1" $
nNomenclature @ (PGP (Maybe OmegaPosition)) [pgpMaybeOmega|PGP 34:1|] `shouldBe` "PGP 34:1"
it "QuasiQuoter for PI 34:1(n-9Z)(n-6Z)" $
nNomenclature [piMaybeOmega|PI 34:1(n-9)(n-6)|] `shouldBe` "PI 34:1(n-9)(n-6)"
it "QuasiQuoter for PI 34:1(n-9)(n-6)" $
nNomenclature [piOmega|PI 34:1(n-9)(n-6)|] `shouldBe` "PI 34:1(n-9)(n-6)"
it "QuasiQuoter for PI 34:1" $
nNomenclature @ (PI (Maybe OmegaPosition)) [piMaybeOmega|PI 34:1|] `shouldBe` "PI 34:1"
it "QuasiQuoter for PIP 34:1(n-9Z)(n-6Z)" $
nNomenclature [pipMaybeOmega|PIP 34:1(n-9)(n-6)|] `shouldBe` "PIP 34:1(n-9)(n-6)"
it "QuasiQuoter for PIP 34:1(n-9)(n-6)" $
nNomenclature [pipOmega|PIP 34:1(n-9)(n-6)|] `shouldBe` "PIP 34:1(n-9)(n-6)"
it "QuasiQuoter for PIP 34:1" $
nNomenclature @ (PIP (Maybe OmegaPosition)) [pipMaybeOmega|PIP 34:1|] `shouldBe` "PIP 34:1"
it "QuasiQuoter for PIP2 34:1(n-9Z)(n-6Z)" $
nNomenclature [pip2MaybeOmega|PIP2 34:1(n-9)(n-6)|] `shouldBe` "PIP2 34:1(n-9)(n-6)"
it "QuasiQuoter for PIP2 34:1(n-9)(n-6)" $
nNomenclature [pip2Omega|PIP2 34:1(n-9)(n-6)|] `shouldBe` "PIP2 34:1(n-9)(n-6)"
it "QuasiQuoter for PIP2 34:1" $
nNomenclature @ (PIP2 (Maybe OmegaPosition)) [pip2MaybeOmega|PIP2 34:1|] `shouldBe` "PIP2 34:1"
| Michaelt293/Lipid-Haskell | test/Lipid/Parsers/CombinedRadyl/GlycerophospholipidSpec.hs | gpl-3.0 | 8,898 | 0 | 17 | 1,649 | 1,948 | 1,079 | 869 | 143 | 1 |
-- Check whether a given term represents a binary tree
-- Typechecker does it.
data Tree a = Empty | Node a (Tree a) (Tree a)
istree :: Tree a -> Bool
istree x = True
| dannywillems/99-problems | haskell/p54.hs | gpl-3.0 | 170 | 0 | 8 | 39 | 52 | 28 | 24 | 3 | 1 |
{-# LANGUAGE PatternSynonyms #-}
{-|
Module : ASCII
Description : ASCII Control codes
Copyright : (c) Frédéric BISSON, 2015
License : GPL-3
Maintainer : zigazou@free.fr
Stability : experimental
Portability : POSIX
ASCII control codes.
-}
module Minitel.Constants.ASCII where
import Minitel.Type.MNatural (MNat)
default (MNat)
-- * ASCII control codes
pattern NUL = 0x00 -- Null
pattern SOH = 0x01 -- Start Of Heading
pattern STX = 0x02 -- Start of TeXt
pattern ETX = 0x03 -- End of TeXt
pattern EOT = 0x04 -- End Of Transmission
pattern ENQ = 0x05 -- ENQuiry
pattern ACK = 0x06 -- ACKnowledge
pattern BEL = 0x07 -- BELl
pattern BS = 0x08 -- BackSpace
pattern HT = 0x09 -- Horizontal Tab
pattern LF = 0x0a -- Line Feed / new line
pattern VT = 0x0b -- Vertical Tab
pattern FF = 0x0c -- Form Feed
pattern CR = 0x0d -- Carriage Return
pattern SO = 0x0e -- Shift Out
pattern SI = 0x0f -- Shift In
pattern DLE = 0x10 -- Data Link Escape
pattern DC1 = 0x11 -- Device Control 1
pattern DC2 = 0x12 -- Device Control 2
pattern DC3 = 0x13 -- Device Control 3
pattern DC4 = 0x14 -- Device Control 4
pattern NAK = 0x15 -- Negative AcKnowledge
pattern SYN = 0x16 -- SYNchronous idle
pattern ETB = 0x17 -- End of Transmission Block
pattern CAN = 0x18 -- CANcel
pattern EM = 0x19 -- End of Medium
pattern SUB = 0x1a -- SUBstitute
pattern ESC = 0x1b -- ESCape
pattern FS = 0x1c -- File Separator
pattern GS = 0x1d -- Group Separator
pattern RS = 0x1e -- Record Separator
pattern US = 0x1f -- Unit Separator
-- * ASCII visible characters
pattern Space = 0x20
pattern Exclamation = 0x21
pattern DoubleQuotes = 0x22
pattern Hash = 0x23
pattern Dollar = 0x24
pattern Percent = 0x25
pattern Ampersand = 0x26
pattern Quote = 0x27
pattern LeftParen = 0x28
pattern RightParen = 0x29
pattern Asterisk = 0x2a
pattern Plus = 0x2b
pattern Comma = 0x2c
pattern Minus = 0x2d
pattern Period = 0x2e
pattern Slash = 0x2f
pattern Digit0 = 0x30
pattern Digit1 = 0x31
pattern Digit2 = 0x32
pattern Digit3 = 0x33
pattern Digit4 = 0x34
pattern Digit5 = 0x35
pattern Digit6 = 0x36
pattern Digit7 = 0x37
pattern Digit8 = 0x38
pattern Digit9 = 0x39
pattern Colon = 0x3a
pattern SemiColon = 0x3b
pattern LessThan = 0x3c
pattern Equal = 0x3d
pattern GreaterThan = 0x3e
pattern Question = 0x3f
pattern At = 0x40
pattern UpperA = 0x41
pattern UpperB = 0x42
pattern UpperC = 0x43
pattern UpperD = 0x44
pattern UpperE = 0x45
pattern UpperF = 0x46
pattern UpperG = 0x47
pattern UpperH = 0x48
pattern UpperI = 0x49
pattern UpperJ = 0x4a
pattern UpperK = 0x4b
pattern UpperL = 0x4c
pattern UpperM = 0x4d
pattern UpperN = 0x4e
pattern UpperO = 0x4f
pattern UpperP = 0x50
pattern UpperQ = 0x51
pattern UpperR = 0x52
pattern UpperS = 0x53
pattern UpperT = 0x54
pattern UpperU = 0x55
pattern UpperV = 0x56
pattern UpperW = 0x57
pattern UpperX = 0x58
pattern UpperY = 0x59
pattern UpperZ = 0x5a
pattern LeftSqBra = 0x5b
pattern BackSlash = 0x5c
pattern RightSqBra = 0x5d
pattern Circumflex = 0x5e
pattern UnderScore = 0x5f
pattern BackTick = 0x60
pattern LowerA = 0x61
pattern LowerB = 0x62
pattern LowerC = 0x63
pattern LowerD = 0x64
pattern LowerE = 0x65
pattern LowerF = 0x66
pattern LowerG = 0x67
pattern LowerH = 0x68
pattern LowerI = 0x69
pattern LowerJ = 0x6a
pattern LowerK = 0x6b
pattern LowerL = 0x6c
pattern LowerM = 0x6d
pattern LowerN = 0x6e
pattern LowerO = 0x6f
pattern LowerP = 0x70
pattern LowerQ = 0x71
pattern LowerR = 0x72
pattern LowerS = 0x73
pattern LowerT = 0x74
pattern LowerU = 0x75
pattern LowerV = 0x76
pattern LowerW = 0x77
pattern LowerX = 0x78
pattern LowerY = 0x79
pattern LowerZ = 0x7a
pattern LeftCuBra = 0x7b
pattern Pipe = 0x7c
pattern RightCuBra = 0x7d
pattern Tilde = 0x7e
pattern Del = 0x7f
| Zigazou/HaMinitel | src/Minitel/Constants/ASCII.hs | gpl-3.0 | 4,536 | 0 | 5 | 1,526 | 1,084 | 564 | 520 | 132 | 0 |
{-| Este modulo contiene la declaracion de los posibles tipos para
los términos de las (pre-)expresiones. Como en las pre-expresiones,
declaramos un tipo de datos general que nos permite utilizar muchas
funciones e idiomas estándares de Haskell. -}
{-# Language TypeSynonymInstances,FlexibleInstances #-}
module Equ.Types where
import Data.Text (Text, pack, unpack)
import qualified Data.Text as T (head)
import Data.Char
import Control.Applicative
import Test.QuickCheck(Arbitrary, arbitrary, elements, oneof)
import Data.Monoid
import qualified Data.Foldable as F
import Data.Traversable
import Data.Serialize(Serialize, get, getWord8, put, putWord8)
-- | Tipos de datos atómicos.
data AtomTy = ATyNum -- ^ Los reales.
| ATyInt -- ^ Los enteros.
| ATyNat -- ^ Los naturales.
| ATyBool -- ^ Corresponde a las fórmulas proposicionales.
deriving (Eq)
instance Show AtomTy where
show ATyNum = "Num"
show ATyInt = "Int"
show ATyNat = "Nat"
show ATyBool = "Bool"
-- | Las variables de tipo.
type TyVarName = Text
infixr 8 :->
-- | Un tipo polimórfico para tener instancias de Functor y Monad; las
-- variables de tipo se asumen cuantificadas universalmente.
data Type' v = TyUnknown -- ^ Representa falta de información.
| TyVar v -- ^ Variable de tipo.
| TyList (Type' v) -- ^ Listas.
| TyAtom AtomTy -- ^ Tipos atómicos.
| Type' v :-> Type' v -- ^ Espacios de funciones.
deriving (Eq)
instance Functor Type' where
fmap f (TyVar v) = TyVar $ f v
fmap f (TyList t) = TyList $ fmap f t
fmap f (t :-> t') = fmap f t :-> fmap f t'
fmap _ (TyAtom a) = TyAtom a
fmap _ TyUnknown = TyUnknown
instance Applicative Type' where
pure = TyVar
_ <*> TyUnknown = TyUnknown
_ <*> TyAtom a = TyAtom a
TyVar f <*> TyVar v = TyVar $ f v
TyAtom a <*> TyVar _ = TyAtom a
TyUnknown <*> TyVar _ = TyUnknown
TyList f <*> TyVar v = TyList $ f <*> TyVar v
(f :-> f') <*> TyVar v = (f <*> TyVar v) :-> (f' <*> TyVar v)
f <*> TyList t = TyList $ (f <*> t)
f <*> t :-> t' = (f <*> t) :-> (f <*> t')
instance F.Foldable Type' where
foldMap f (TyVar e) = f e
foldMap f (TyList t) = F.foldMap f t
foldMap f (t :-> t') = F.foldMap f t `mappend` F.foldMap f t'
foldMap _ _ = mempty
-- TODO: tiene sentido?
instance Traversable Type' where
traverse f (TyVar e) = TyVar <$> f e
traverse f (TyList t) = TyList <$> traverse f t
traverse f (t :-> t') = liftA2 (:->) (traverse f t) (traverse f t')
traverse _ TyUnknown = pure TyUnknown
traverse _ (TyAtom a) = pure (TyAtom a)
instance Monad Type' where
return a = TyVar a
TyUnknown >>= _ = TyUnknown
TyAtom t >>= _ = TyAtom t
TyVar v >>= f = f v
TyList t >>= f = TyList $ t >>= f
t :-> t' >>= f = (:->) (t >>= f) (t' >>= f)
instance Serialize TyVarName where
put = put . unpack
get = get >>= return . pack
instance Serialize AtomTy where
put ATyNum = putWord8 0
put ATyInt = putWord8 1
put ATyNat = putWord8 2
put ATyBool = putWord8 3
get = do
tag_ <- getWord8
case tag_ of
0 -> return ATyNum
1 -> return ATyInt
2 -> return ATyNat
3 -> return ATyBool
_ -> fail $ "SerializeErr AtomTy " ++ show tag_
instance (Serialize a) => Serialize (Type' a) where
put TyUnknown = putWord8 0
put (TyVar v) = putWord8 1 >> put v
put (TyList t) = putWord8 2 >> put t
put (TyAtom a) = putWord8 3 >> put a
put (t :-> t') = putWord8 4 >> put t >> put t'
get = do
tag_ <- getWord8
case tag_ of
0 -> return TyUnknown
1 -> TyVar <$> get
2 -> TyList <$> get
3 -> TyAtom <$> get
4 -> (:->) <$> get <*> get
_ -> fail $ "SerializeErr (Type' a) " ++ show tag_
-- | El tipo concreto de nuestras expresiones.
type Type = Type' TyVarName
instance Show Type where
show TyUnknown = "?"
show (TyVar v) = unpack v
show (TyList t) = "[" ++ show t ++ "]"
show (TyAtom t) = show t
show (t :-> t') = show t ++ " -> " ++ show t'
-- | Constructor de TyVar
tyVar :: String -> Type
tyVar = TyVar . pack
-- | Constructor de TyAtom ATyBool
tyBool :: Type
tyBool = TyAtom ATyBool
tyInt :: Type
tyInt = TyAtom ATyInt
-- | Ocurencia de una variable en un tipo.
occurs :: TyVarName -> Type -> Bool
occurs v = F.elem v
-- | Replace the occurrence of a type-variable for a type: 'replace v
-- s t', replaces the occurences of 'v' in 's' for 't'.
tyreplace :: TyVarName -> Type -> Type -> Type
tyreplace v t t' = t' >>= (\w -> if v == w then t else TyVar w)
-- | Crea variables de tipos que no pueden ser parseadas.
tyVarInternal :: Int -> Type
tyVarInternal n = tyVar $ "V" ++ show n
-- | Una variable de tipo es interna si comienza con mayúscula.
isTyVarInternal :: TyVarName -> Bool
isTyVarInternal = isUpper . T.head
-- | Predicado que decide si un tipo es una variable de tipo.
isTyVar :: Type -> Bool
isTyVar (TyVar _) = True
isTyVar _ = False
-- -- | Instancia arbitrary para el tipo nombre de variable.
instance Arbitrary TyVarName where
arbitrary =
elements [(pack . ("t"++) . show) n | n <- [(0::Int)..100]]
-- | Instancia arbitrary para los tipos atómicos.
instance Arbitrary AtomTy where
arbitrary = elements [ATyNum, ATyInt, ATyNat, ATyBool]
-- | Instancia arbitrary para los tipos generales.
instance Arbitrary Type where
arbitrary =
oneof [ TyVar <$> arbitrary
, TyList <$> arbitrary
, TyAtom <$> arbitrary
, (:->) <$> arbitrary <*> arbitrary
]
-- | La aridad de un tipo es la cantidad de flechas que hay.
arity :: Type -> Int
arity (_ :-> t') = 1 + arity t'
arity _ = 0
-- | Devuelve los tipos de los argumentos de un tipo funcional.
argsTypes :: Type -> [Type]
argsTypes = reverse . go []
where go :: [Type] -> Type -> [Type]
go ts (t :-> t') = go (t:ts) t'
go ts _ = ts
-- | Devuelve el tipo del resultado de un tipo funcional, sino
-- devuelve nada.
resType :: Type -> Maybe Type
resType (_ :-> t') = Just t'
resType _ = Nothing
-- | Construye un tipo funcional a partir del tipo del resultado
-- y el tipo de sus argumentos.
exponential :: Type -> [Type] -> Type
exponential = foldr (:->)
| miguelpagano/equ | Equ/Types.hs | gpl-3.0 | 6,385 | 0 | 12 | 1,745 | 1,987 | 1,025 | 962 | -1 | -1 |
{-# LANGUAGE CPP, PackageImports #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module Data.List (
-- * Basic functions
(++) -- :: [a] -> [a] -> [a]
, head -- :: [a] -> a
, last -- :: [a] -> a
, tail -- :: [a] -> [a]
, init -- :: [a] -> [a]
, null -- :: [a] -> Bool
, length -- :: [a] -> Int
-- * List transformations
, map -- :: (a -> b) -> [a] -> [b]
, reverse -- :: [a] -> [a]
, intersperse -- :: a -> [a] -> [a]
, intercalate -- :: [a] -> [[a]] -> [a]
, transpose -- :: [[a]] -> [[a]]
, subsequences -- :: [a] -> [[a]]
, permutations -- :: [a] -> [[a]]
-- * Reducing lists (folds)
, foldl -- :: (a -> b -> a) -> a -> [b] -> a
, foldl' -- :: (a -> b -> a) -> a -> [b] -> a
, foldl1 -- :: (a -> a -> a) -> [a] -> a
, foldl1' -- :: (a -> a -> a) -> [a] -> a
, foldr -- :: (a -> b -> b) -> b -> [a] -> b
, foldr1 -- :: (a -> a -> a) -> [a] -> a
-- ** Special folds
, concat -- :: [[a]] -> [a]
, concatMap -- :: (a -> [b]) -> [a] -> [b]
, and -- :: [Bool] -> Bool
, or -- :: [Bool] -> Bool
, any -- :: (a -> Bool) -> [a] -> Bool
, all -- :: (a -> Bool) -> [a] -> Bool
, sum -- :: (Num a) => [a] -> a
, product -- :: (Num a) => [a] -> a
, maximum -- :: (Ord a) => [a] -> a
, minimum -- :: (Ord a) => [a] -> a
-- * Building lists
-- ** Scans
, scanl -- :: (a -> b -> a) -> a -> [b] -> [a]
, scanl1 -- :: (a -> a -> a) -> [a] -> [a]
, scanr -- :: (a -> b -> b) -> b -> [a] -> [b]
, scanr1 -- :: (a -> a -> a) -> [a] -> [a]
-- ** Accumulating maps
, mapAccumL -- :: (a -> b -> (a,c)) -> a -> [b] -> (a,[c])
, mapAccumR -- :: (a -> b -> (a,c)) -> a -> [b] -> (a,[c])
-- ** Infinite lists
, iterate -- :: (a -> a) -> a -> [a]
, repeat -- :: a -> [a]
, replicate -- :: Int -> a -> [a]
, cycle -- :: [a] -> [a]
-- ** Unfolding
, unfoldr -- :: (b -> Maybe (a, b)) -> b -> [a]
-- * Sublists
-- ** Extracting sublists
, take -- :: Int -> [a] -> [a]
, drop -- :: Int -> [a] -> [a]
, splitAt -- :: Int -> [a] -> ([a], [a])
, takeWhile -- :: (a -> Bool) -> [a] -> [a]
, dropWhile -- :: (a -> Bool) -> [a] -> [a]
, span -- :: (a -> Bool) -> [a] -> ([a], [a])
, break -- :: (a -> Bool) -> [a] -> ([a], [a])
, stripPrefix -- :: Eq a => [a] -> [a] -> Maybe [a]
, group -- :: Eq a => [a] -> [[a]]
, inits -- :: [a] -> [[a]]
, tails -- :: [a] -> [[a]]
-- ** Predicates
, isPrefixOf -- :: (Eq a) => [a] -> [a] -> Bool
, isSuffixOf -- :: (Eq a) => [a] -> [a] -> Bool
, isInfixOf -- :: (Eq a) => [a] -> [a] -> Bool
-- * Searching lists
-- ** Searching by equality
, elem -- :: a -> [a] -> Bool
, notElem -- :: a -> [a] -> Bool
, lookup -- :: (Eq a) => a -> [(a,b)] -> Maybe b
-- ** Searching with a predicate
, find -- :: (a -> Bool) -> [a] -> Maybe a
, filter -- :: (a -> Bool) -> [a] -> [a]
, partition -- :: (a -> Bool) -> [a] -> ([a], [a])
-- * Indexing lists
-- | These functions treat a list @xs@ as a indexed collection,
-- with indices ranging from 0 to @'length' xs - 1@.
, (!!) -- :: [a] -> Int -> a
, elemIndex -- :: (Eq a) => a -> [a] -> Maybe Int
, elemIndices -- :: (Eq a) => a -> [a] -> [Int]
, findIndex -- :: (a -> Bool) -> [a] -> Maybe Int
, findIndices -- :: (a -> Bool) -> [a] -> [Int]
-- * Zipping and unzipping lists
, zip -- :: [a] -> [b] -> [(a,b)]
, zip3
, zip4, zip5, zip6, zip7
, zipWith -- :: (a -> b -> c) -> [a] -> [b] -> [c]
, zipWith3
, zipWith4, zipWith5, zipWith6, zipWith7
, unzip -- :: [(a,b)] -> ([a],[b])
, unzip3
, unzip4, unzip5, unzip6, unzip7
-- * Special lists
-- ** Functions on strings
, lines -- :: String -> [String]
, words -- :: String -> [String]
, unlines -- :: [String] -> String
, unwords -- :: [String] -> String
-- ** \"Set\" operations
, nub -- :: (Eq a) => [a] -> [a]
, delete -- :: (Eq a) => a -> [a] -> [a]
, (\\) -- :: (Eq a) => [a] -> [a] -> [a]
, union -- :: (Eq a) => [a] -> [a] -> [a]
, intersect -- :: (Eq a) => [a] -> [a] -> [a]
-- ** Ordered lists
, sort -- :: (Ord a) => [a] -> [a]
, insert -- :: (Ord a) => a -> [a] -> [a]
-- * Generalized functions
-- ** The \"@By@\" operations
-- | By convention, overloaded functions have a non-overloaded
-- counterpart whose name is suffixed with \`@By@\'.
-- *** User-supplied equality (replacing an @Eq@ context)
-- | The predicate is assumed to define an equivalence.
, nubBy -- :: (a -> a -> Bool) -> [a] -> [a]
, deleteBy -- :: (a -> a -> Bool) -> a -> [a] -> [a]
, deleteFirstsBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, unionBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, intersectBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, groupBy -- :: (a -> a -> Bool) -> [a] -> [[a]]
-- *** User-supplied comparison (replacing an @Ord@ context)
-- | The function is assumed to define a total ordering.
, sortBy -- :: (a -> a -> Ordering) -> [a] -> [a]
, insertBy -- :: (a -> a -> Ordering) -> a -> [a] -> [a]
, maximumBy -- :: (a -> a -> Ordering) -> [a] -> a
, minimumBy -- :: (a -> a -> Ordering) -> [a] -> a
-- ** The \"@generic@\" operations
-- | The prefix \`@generic@\' indicates an overloaded function that
-- is a generalized version of a "Prelude" function.
, genericLength -- :: (Integral a) => [b] -> a
, genericTake -- :: (Integral a) => a -> [b] -> [b]
, genericDrop -- :: (Integral a) => a -> [b] -> [b]
, genericSplitAt -- :: (Integral a) => a -> [b] -> ([b], [b])
, genericIndex -- :: (Integral a) => [b] -> a -> b
, genericReplicate -- :: (Integral a) => a -> b -> [b]
) where
import "base" Data.List hiding ( splitAt )
| jwiegley/ghc-release | libraries/haskell2010/Data/List.hs | gpl-3.0 | 6,771 | 0 | 5 | 2,708 | 493 | 375 | 118 | 105 | 0 |
module Chap03.Data.TaggedBinaryTree where
import Data.Foldable
import Prelude hiding (foldr)
data TaggedBinaryTree m a = E
| T m a (TaggedBinaryTree m a) (TaggedBinaryTree m a)
deriving (Eq)
instance (Show m, Show a) => Show (TaggedBinaryTree m a) where
show E = "E"
show (T m x a b) = "(" ++ unwords [show m, show x, show a, show b] ++ ")"
tag :: m -> TaggedBinaryTree m a -> m
tag m E = m
tag _ (T n _ _ _) = n
instance Foldable (TaggedBinaryTree m) where
foldr _ z E = z
foldr f z (T _ x a b) = foldr f (f x $ foldr f z b) a
instance Functor (TaggedBinaryTree m) where
fmap _ E = E
fmap f (T m x a b) = T m (f x) (fmap f a) (fmap f b)
| stappit/okasaki-pfds | src/Chap03/Data/TaggedBinaryTree.hs | gpl-3.0 | 728 | 0 | 10 | 233 | 359 | 184 | 175 | 18 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
-- | Parse and process phylogeny data
module Bio.PhylogenyTools (
module Bio.PhylogenyData,
drawPhylogeneticGraph,
pathLengths,
pathLengthsIndexed,
averagePathLengthperNodes,
compareAveragePathLengths,
minimumAveragePathLength,
maximumAveragePathLength,
getLabel
) where
import Prelude
import System.IO
import Bio.PhylogenyData
import Data.Maybe
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Token
import Text.ParserCombinators.Parsec.Language (emptyDef)
import Control.Monad
import Data.Tree
import Data.List
import Data.Either
import Data.Tuple
import qualified Data.Text.Lazy as TL
import Data.Graph.Inductive
import qualified Data.Either.Unwrap as E
import qualified Data.GraphViz as GV
import qualified Data.GraphViz.Printing as GVP
import qualified Data.GraphViz.Attributes.Colors as GVAC
import qualified Data.GraphViz.Attributes.Complete as GVA
--------------------------------------------------------
--draw Graph
drawPhylogeneticGraph :: Gr String Double -> String
drawPhylogeneticGraph inputGraph = do
let dotFormat = GV.graphToDot GV.nonClusteredParams inputGraph
let params = GV.nonClusteredParams {GV.isDirected = True
, GV.globalAttributes = [GV.GraphAttrs [GVA.Size (GVA.GSize (20 :: Double) (Just (20 :: Double)) False)]]
, GV.isDotCluster = const True
, GV.fmtNode = nodeFormat
, GV.fmtEdge = edgeFormat
}
let dotFormat = GV.graphToDot params inputGraph
let dottext = GVP.renderDot $ GVP.toDot dotFormat
TL.unpack dottext
nodeFormat :: (t,String) -> [GVA.Attribute]
nodeFormat (_,label)
| label == "internal" = [GV.shape GVA.PointShape] --[GV.textLabel (TL.pack "")]
| otherwise = [GV.textLabel (TL.pack label)]
edgeFormat :: (t,t, Double) -> [GVA.Attribute]
edgeFormat (_,_,label) = [GV.textLabel (TL.pack ("\"" ++ (show label) ++ "\""))]
--Paths
-- | Computes distance between all nodes in the graph
pathLengths :: Gr String Double -> [Double]
pathLengths inputGraph = pathLengths
where nonInternalLabeledNodes = filter (\(_,label) -> label /= "internal") (labNodes inputGraph)
nonInternalNodes = map fst nonInternalLabeledNodes
pairs = map toPair (sequence [nonInternalNodes,nonInternalNodes])
--we are not considering distance to self and the upper triangular part of the distance matrix
nonselfPairs = filter (\pair -> uncurry (/=) pair) pairs
upperTriangularNonselfPairs = take (length nonselfPairs `div` 2) nonselfPairs
pathLengths = map (\pair -> spLength (fst pair) (snd pair) inputGraph) upperTriangularNonselfPairs
-- | Computes distance between all nodes in the graph including the corresponding node indices
pathLengthsIndexed :: Gr String Double -> [(Double,(Node,Node))]
pathLengthsIndexed inputGraph = pathLengthsIndexed
where nonInternalLabeledNodes = filter (\(_,label) -> label /= "internal") (labNodes inputGraph)
nonInternalNodes = map fst nonInternalLabeledNodes
pairs = map toPair (sequence [nonInternalNodes,nonInternalNodes])
--we are not considering distance to self and the upper triangular part of the distance matrix
nonselfPairs = filter (\pair -> uncurry (/=) pair) pairs
upperTriangularNonselfPairs = take (length nonselfPairs `div` 2) nonselfPairs
pathLengths = map (\pair -> uncurry spLength pair inputGraph) upperTriangularNonselfPairs
pathLengthsIndexed = zip pathLengths upperTriangularNonselfPairs
averagePathLengthperNodes :: [(Double,(Node,Node))] -> [(Node,Double)]
averagePathLengthperNodes indexedPathLengths = averagePathLengthperNodes
where nodeIndices = nub (concatMap (\(_,(nodeIndex,nodeIndex2)) -> [nodeIndex,nodeIndex2]) indexedPathLengths)
averagePathLengthperNodes = map (averagePathLengthperNode indexedPathLengths) nodeIndices
averagePathLengthperNode :: [(Double,(Node,Node))] -> Node -> (Node,Double)
averagePathLengthperNode indexedPathLengths nodeIndex = (nodeIndex,averagePathLength)
where pathLengthsperNode = filter (\(_,(a,b)) -> a == nodeIndex || b == nodeIndex) indexedPathLengths
sumPathLengths = sum (map (\(pathLength,(_,_)) -> pathLength) pathLengthsperNode)
averagePathLength = sumPathLengths / fromIntegral (length pathLengthsperNode)
minimumAveragePathLength :: [(Node,Double)] -> (Node,Double)
minimumAveragePathLength = minimumBy compareAveragePathLengths
maximumAveragePathLength :: [(Node,Double)] -> (Node,Double)
maximumAveragePathLength = maximumBy compareAveragePathLengths
getLabel :: Gr String Double -> (Node,a) -> String
getLabel parsedNewick inputNode = nodeLabel
where nodeLabels = labNodes parsedNewick
labeledNode = fromJust (find (\(index,label) -> index == fst inputNode) nodeLabels)
nodeLabel = snd labeledNode
--auxiliary functions
toPair [a,b] = (a,b)
compareAveragePathLengths :: (Node,Double) -> (Node,Double) -> Ordering
compareAveragePathLengths (_,length1) (_,length2)
| length1 > length2 = LT
| length1 < length2 = GT
-- in case of equal evalues the first hit is selected
| length1 == length2 = EQ
| eggzilla/Phylogeny | src/Bio/PhylogenyTools.hs | gpl-3.0 | 5,506 | 0 | 20 | 1,161 | 1,386 | 786 | 600 | 88 | 1 |
module Assembler.Instruction
( Instruction(..)
, Symbol(getSymbol)
, Value(getValue)
, ValueOrSymbol
, symbol
, value
) where
import Data.Char (isDigit)
import Data.Either
import Data.List as List
import Data.Maybe
-- You should
newtype Symbol = Symbol { getSymbol::String } deriving (Eq, Ord, Show)
newtype Value = Value { getValue::String } deriving (Eq, Show)
type ValueOrSymbol = Either Value Symbol
data Instruction a = AS a -- should be either Value or Variable
| CS { getDst::String
, getComp::String
, getJmp::String }
| LS Symbol
| EmptyLine deriving (Eq, Show)
allowedSymbols = ['0'..'9'] ++ ['a'..'z'] ++ ['A'..'Z'] ++ ['_', '.', '$', ':']
maxInt = "65535"
maybeError b s = if b then Nothing else Just s
symbol :: String -> Either String Symbol
symbol s =
let symbolIsOk =
maybeError (length s > 0 && not (isDigit $ head s) &&
(List.null $ List.filter (\c -> not $
elem c allowedSymbols)
s))
$ "Invalid symbol: " ++ s
in if isNothing symbolIsOk then Right $ Symbol s
else Left $ fromJust symbolIsOk
value :: String -> Either String Value
value s =
let areDigits = maybeError (all isDigit s) $ "Invalid value: " ++ s
canFit = maybeError (length s < length maxInt ||
((length s == length maxInt) && (s < maxInt)))
$ "Value too large: " ++ s
tests = [areDigits, canFit]
in if all isNothing tests then Right $ Value s
else Left $ fromJust $ head $ filter isJust tests
| rifung/nand2tetris | projects/compiler/src/Assembler/Instruction.hs | gpl-3.0 | 1,754 | 0 | 20 | 620 | 545 | 300 | 245 | 46 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Maps.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Mutate a map asset.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.maps.patch@.
module Network.Google.Resource.MapsEngine.Maps.Patch
(
-- * REST Resource
MapsPatchResource
-- * Creating a Request
, mapsPatch
, MapsPatch
-- * Request Lenses
, mpPayload
, mpId
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.maps.patch@ method which the
-- 'MapsPatch' request conforms to.
type MapsPatchResource =
"mapsengine" :>
"v1" :>
"maps" :>
Capture "id" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Map :> Patch '[JSON] ()
-- | Mutate a map asset.
--
-- /See:/ 'mapsPatch' smart constructor.
data MapsPatch = MapsPatch'
{ _mpPayload :: !Map
, _mpId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MapsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mpPayload'
--
-- * 'mpId'
mapsPatch
:: Map -- ^ 'mpPayload'
-> Text -- ^ 'mpId'
-> MapsPatch
mapsPatch pMpPayload_ pMpId_ =
MapsPatch'
{ _mpPayload = pMpPayload_
, _mpId = pMpId_
}
-- | Multipart request metadata.
mpPayload :: Lens' MapsPatch Map
mpPayload
= lens _mpPayload (\ s a -> s{_mpPayload = a})
-- | The ID of the map.
mpId :: Lens' MapsPatch Text
mpId = lens _mpId (\ s a -> s{_mpId = a})
instance GoogleRequest MapsPatch where
type Rs MapsPatch = ()
type Scopes MapsPatch =
'["https://www.googleapis.com/auth/mapsengine"]
requestClient MapsPatch'{..}
= go _mpId (Just AltJSON) _mpPayload
mapsEngineService
where go
= buildClient (Proxy :: Proxy MapsPatchResource)
mempty
| rueshyna/gogol | gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Maps/Patch.hs | mpl-2.0 | 2,736 | 0 | 13 | 684 | 386 | 232 | 154 | 59 | 1 |
func =
case
lakjsdlajsdljasdlkjasldjasldjasldjalsdjlaskjd
lakjsdlajsdljasdlkjasldjasldjasldjalsdjlaskjd
of
False -> False
True -> True
| lspitzner/brittany | data/Test115.hs | agpl-3.0 | 168 | 0 | 7 | 45 | 25 | 12 | 13 | 6 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Api.Core where
------------------------------------------------------------------------------
import Control.Lens
import Snap.Core
import Snap.Snaplet
import qualified Data.ByteString.Char8 as B
import Api.Services.LokaService
------------------------------------------------------------------------------
data Api = Api { _lokaService :: Snaplet LokaService }
makeLenses ''Api
------------------------------------------------------------------------------
apiRoutes :: [(B.ByteString, Handler b Api ())]
apiRoutes = [("status", method GET respondOk)]
------------------------------------------------------------------------------
respondOk :: Handler b Api ()
respondOk = do
modifyResponse . setResponseCode $ 200
------------------------------------------------------------------------------
apiInit :: SnapletInit b Api
apiInit = makeSnaplet "api" "Core Api" Nothing $ do
ls <- nestSnaplet "" lokaService lokaServiceInit
addRoutes apiRoutes
return $ Api ls
| jakespringer/loka | Server/src/api/Core.hs | lgpl-3.0 | 1,051 | 0 | 9 | 110 | 199 | 109 | 90 | 20 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeFamilies, EmptyDataDecls, ScopedTypeVariables #-}
module Arithm where
-- Start Zero Succ
data Zero
data Succ n
-- Stop Zero Succ
-- Start Nat
class Nat n where
toInt :: n -> Int
instance Nat Zero where
toInt _ = 0
instance (Nat n) => Nat (Succ n) where
toInt _ = 1 + toInt (undefined :: n)
-- Stop Nat
type One = Succ Zero
type Two = Succ One
type Three = Succ Two
type Four = Succ Three
type Five = Succ Four
type Six = Succ Five
type Seven = Succ Six
type Eight = Succ Seven
type Nine = Succ Eight
type Ten = Succ Nine
-- Start wrappers
newtype Pointer n = MkPointer Int
newtype Offset n = MkOffset Int
-- Stop wrappers
-- Start multiple
multiple :: forall n. (Nat n) => Int -> Offset n
multiple i = MkOffset (i * toInt (undefined :: n))
-- Stop multiple
-- Start GCD
class (Nat d, Nat m, Nat n) => HasGCD d m n where
type GCD d m n
instance (Nat d) => HasGCD d Zero Zero where
type GCD d Zero Zero = d
instance (Nat d, Nat m, Nat n) => HasGCD d (Succ m) (Succ n) where
type GCD d (Succ m) (Succ n) = GCD (Succ d) m n
instance (Nat m) => HasGCD Zero (Succ m) Zero where
type GCD Zero (Succ m) Zero = Succ m
instance (Nat d, Nat m) => HasGCD (Succ d) (Succ m) Zero where
type GCD (Succ d) (Succ m) Zero = GCD (Succ Zero) d m
instance (Nat n) => HasGCD Zero Zero (Succ n) where
type GCD Zero Zero (Succ n) = Succ n
instance (Nat d, Nat n) => HasGCD (Succ d) Zero (Succ n) where
type GCD (Succ d) Zero (Succ n) = GCD (Succ Zero) d n
-- Stop GCD
-- Start add
add :: Pointer m -> Offset n -> Pointer (GCD Zero m n)
add (MkPointer x) (MkOffset y) = MkPointer (x + y)
-- Stop add
fetch32 :: (GCD Zero n Four ~ Four) => Pointer n -> IO ()
fetch32 = undefined
-- General addition
-- Start plus
type family Plus m n
type instance Plus Zero n = n
type instance Plus (Succ m) n = Succ (Plus m n)
plus :: m -> n -> Plus m n
plus = undefined
tplus = plus (undefined::Two) (undefined::Three)
-- Stop plus
tplus' x = if True then plus x (undefined::One) else tplus
| egaburov/funstuff | Haskell/fun-with-types/codes/Arithm.hs | apache-2.0 | 2,055 | 0 | 9 | 468 | 899 | 482 | 417 | -1 | -1 |
-- This file is part of "Loopless Functional Algorithms".
-- Copyright (c) 2005 Jamie Snape, Oxford University Computing Laboratory.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- https://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Traversals where
data Rose a = Node a [Rose a]
data Tree a = Null | Fork a (Tree a) (Tree a)
preorder (Node x xts) = x:concat (map preorder xts)
inorder (Fork x lt rt) = inorder lt ++ x:inorder rt
| snape/LooplessFunctionalAlgorithms | Traversals.hs | apache-2.0 | 907 | 0 | 8 | 167 | 128 | 73 | 55 | 5 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QIconEngineV2.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:15
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QIconEngineV2 (
QqIconEngineV2(..)
,QqIconEngineV2_nf(..)
,qIconEngineV2_delete, qIconEngineV2_delete1
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Gui.QIcon
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QIconEngineV2 ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QIconEngineV2_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QIconEngineV2_userMethod" qtc_QIconEngineV2_userMethod :: Ptr (TQIconEngineV2 a) -> CInt -> IO ()
instance QuserMethod (QIconEngineV2Sc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QIconEngineV2_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QIconEngineV2 ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QIconEngineV2_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QIconEngineV2_userMethodVariant" qtc_QIconEngineV2_userMethodVariant :: Ptr (TQIconEngineV2 a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QIconEngineV2Sc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QIconEngineV2_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqIconEngineV2 x1 where
qIconEngineV2 :: x1 -> IO (QIconEngineV2 ())
instance QqIconEngineV2 (()) where
qIconEngineV2 ()
= withQIconEngineV2Result $
qtc_QIconEngineV2
foreign import ccall "qtc_QIconEngineV2" qtc_QIconEngineV2 :: IO (Ptr (TQIconEngineV2 ()))
instance QqIconEngineV2 ((QIconEngineV2 t1)) where
qIconEngineV2 (x1)
= withQIconEngineV2Result $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV21 cobj_x1
foreign import ccall "qtc_QIconEngineV21" qtc_QIconEngineV21 :: Ptr (TQIconEngineV2 t1) -> IO (Ptr (TQIconEngineV2 ()))
class QqIconEngineV2_nf x1 where
qIconEngineV2_nf :: x1 -> IO (QIconEngineV2 ())
instance QqIconEngineV2_nf (()) where
qIconEngineV2_nf ()
= withObjectRefResult $
qtc_QIconEngineV2
instance QqIconEngineV2_nf ((QIconEngineV2 t1)) where
qIconEngineV2_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV21 cobj_x1
instance Qclone (QIconEngineV2 ()) (()) (IO (QIconEngineV2 ())) where
clone x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_clone_h cobj_x0
foreign import ccall "qtc_QIconEngineV2_clone_h" qtc_QIconEngineV2_clone_h :: Ptr (TQIconEngineV2 a) -> IO (Ptr (TQIconEngineV2 ()))
instance Qclone (QIconEngineV2Sc a) (()) (IO (QIconEngineV2 ())) where
clone x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_clone_h cobj_x0
instance Qkey (QIconEngineV2 ()) (()) (IO (String)) where
key x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_key_h cobj_x0
foreign import ccall "qtc_QIconEngineV2_key_h" qtc_QIconEngineV2_key_h :: Ptr (TQIconEngineV2 a) -> IO (Ptr (TQString ()))
instance Qkey (QIconEngineV2Sc a) (()) (IO (String)) where
key x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_key_h cobj_x0
qIconEngineV2_delete :: QIconEngineV2 a -> IO ()
qIconEngineV2_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_delete cobj_x0
foreign import ccall "qtc_QIconEngineV2_delete" qtc_QIconEngineV2_delete :: Ptr (TQIconEngineV2 a) -> IO ()
qIconEngineV2_delete1 :: QIconEngineV2 a -> IO ()
qIconEngineV2_delete1 x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QIconEngineV2_delete1 cobj_x0
foreign import ccall "qtc_QIconEngineV2_delete1" qtc_QIconEngineV2_delete1 :: Ptr (TQIconEngineV2 a) -> IO ()
instance QqactualSize (QIconEngineV2 ()) ((QSize t1, QIconMode, QIconState)) where
qactualSize x0 (x1, x2, x3)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_actualSize_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QIconEngineV2_actualSize_h" qtc_QIconEngineV2_actualSize_h :: Ptr (TQIconEngineV2 a) -> Ptr (TQSize t1) -> CLong -> CLong -> IO (Ptr (TQSize ()))
instance QqactualSize (QIconEngineV2Sc a) ((QSize t1, QIconMode, QIconState)) where
qactualSize x0 (x1, x2, x3)
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_actualSize_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance QactualSize (QIconEngineV2 ()) ((Size, QIconMode, QIconState)) where
actualSize x0 (x1, x2, x3)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_actualSize_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3) csize_ret_w csize_ret_h
foreign import ccall "qtc_QIconEngineV2_actualSize_qth_h" qtc_QIconEngineV2_actualSize_qth_h :: Ptr (TQIconEngineV2 a) -> CInt -> CInt -> CLong -> CLong -> Ptr CInt -> Ptr CInt -> IO ()
instance QactualSize (QIconEngineV2Sc a) ((Size, QIconMode, QIconState)) where
actualSize x0 (x1, x2, x3)
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_actualSize_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3) csize_ret_w csize_ret_h
instance QqaddFile (QIconEngineV2 ()) ((String, QSize t2, QIconMode, QIconState)) where
qaddFile x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIconEngineV2_addFile_h cobj_x0 cstr_x1 cobj_x2 (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
foreign import ccall "qtc_QIconEngineV2_addFile_h" qtc_QIconEngineV2_addFile_h :: Ptr (TQIconEngineV2 a) -> CWString -> Ptr (TQSize t2) -> CLong -> CLong -> IO ()
instance QqaddFile (QIconEngineV2Sc a) ((String, QSize t2, QIconMode, QIconState)) where
qaddFile x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIconEngineV2_addFile_h cobj_x0 cstr_x1 cobj_x2 (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
instance QaddFile (QIconEngineV2 ()) ((String, Size, QIconMode, QIconState)) where
addFile x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCSize x2 $ \csize_x2_w csize_x2_h ->
qtc_QIconEngineV2_addFile_qth_h cobj_x0 cstr_x1 csize_x2_w csize_x2_h (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
foreign import ccall "qtc_QIconEngineV2_addFile_qth_h" qtc_QIconEngineV2_addFile_qth_h :: Ptr (TQIconEngineV2 a) -> CWString -> CInt -> CInt -> CLong -> CLong -> IO ()
instance QaddFile (QIconEngineV2Sc a) ((String, Size, QIconMode, QIconState)) where
addFile x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCSize x2 $ \csize_x2_w csize_x2_h ->
qtc_QIconEngineV2_addFile_qth_h cobj_x0 cstr_x1 csize_x2_w csize_x2_h (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
instance QaddPixmap (QIconEngineV2 ()) ((QPixmap t1, QIconMode, QIconState)) (IO ()) where
addPixmap x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_addPixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QIconEngineV2_addPixmap_h" qtc_QIconEngineV2_addPixmap_h :: Ptr (TQIconEngineV2 a) -> Ptr (TQPixmap t1) -> CLong -> CLong -> IO ()
instance QaddPixmap (QIconEngineV2Sc a) ((QPixmap t1, QIconMode, QIconState)) (IO ()) where
addPixmap x0 (x1, x2, x3)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_addPixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qqpaint (QIconEngineV2 ()) ((QPainter t1, QRect t2, QIconMode, QIconState)) where
qpaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIconEngineV2_paint_h cobj_x0 cobj_x1 cobj_x2 (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
foreign import ccall "qtc_QIconEngineV2_paint_h" qtc_QIconEngineV2_paint_h :: Ptr (TQIconEngineV2 a) -> Ptr (TQPainter t1) -> Ptr (TQRect t2) -> CLong -> CLong -> IO ()
instance Qqpaint (QIconEngineV2Sc a) ((QPainter t1, QRect t2, QIconMode, QIconState)) where
qpaint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QIconEngineV2_paint_h cobj_x0 cobj_x1 cobj_x2 (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
instance Qpaint (QIconEngineV2 ()) ((QPainter t1, Rect, QIconMode, QIconState)) where
paint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRect x2 $ \crect_x2_x crect_x2_y crect_x2_w crect_x2_h ->
qtc_QIconEngineV2_paint_qth_h cobj_x0 cobj_x1 crect_x2_x crect_x2_y crect_x2_w crect_x2_h (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
foreign import ccall "qtc_QIconEngineV2_paint_qth_h" qtc_QIconEngineV2_paint_qth_h :: Ptr (TQIconEngineV2 a) -> Ptr (TQPainter t1) -> CInt -> CInt -> CInt -> CInt -> CLong -> CLong -> IO ()
instance Qpaint (QIconEngineV2Sc a) ((QPainter t1, Rect, QIconMode, QIconState)) where
paint x0 (x1, x2, x3, x4)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCRect x2 $ \crect_x2_x crect_x2_y crect_x2_w crect_x2_h ->
qtc_QIconEngineV2_paint_qth_h cobj_x0 cobj_x1 crect_x2_x crect_x2_y crect_x2_w crect_x2_h (toCLong $ qEnum_toInt x3) (toCLong $ qEnum_toInt x4)
instance Qqpixmap (QIconEngineV2 ()) ((QSize t1, QIconMode, QIconState)) where
qpixmap x0 (x1, x2, x3)
= withQPixmapResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_pixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QIconEngineV2_pixmap_h" qtc_QIconEngineV2_pixmap_h :: Ptr (TQIconEngineV2 a) -> Ptr (TQSize t1) -> CLong -> CLong -> IO (Ptr (TQPixmap ()))
instance Qqpixmap (QIconEngineV2Sc a) ((QSize t1, QIconMode, QIconState)) where
qpixmap x0 (x1, x2, x3)
= withQPixmapResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_pixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qqpixmap_nf (QIconEngineV2 ()) ((QSize t1, QIconMode, QIconState)) where
qpixmap_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_pixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qqpixmap_nf (QIconEngineV2Sc a) ((QSize t1, QIconMode, QIconState)) where
qpixmap_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QIconEngineV2_pixmap_h cobj_x0 cobj_x1 (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qpixmap (QIconEngineV2 ()) ((Size, QIconMode, QIconState)) where
pixmap x0 (x1, x2, x3)
= withQPixmapResult $
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_pixmap_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
foreign import ccall "qtc_QIconEngineV2_pixmap_qth_h" qtc_QIconEngineV2_pixmap_qth_h :: Ptr (TQIconEngineV2 a) -> CInt -> CInt -> CLong -> CLong -> IO (Ptr (TQPixmap ()))
instance Qpixmap (QIconEngineV2Sc a) ((Size, QIconMode, QIconState)) where
pixmap x0 (x1, x2, x3)
= withQPixmapResult $
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_pixmap_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qpixmap_nf (QIconEngineV2 ()) ((Size, QIconMode, QIconState)) where
pixmap_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_pixmap_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
instance Qpixmap_nf (QIconEngineV2Sc a) ((Size, QIconMode, QIconState)) where
pixmap_nf x0 (x1, x2, x3)
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
withCSize x1 $ \csize_x1_w csize_x1_h ->
qtc_QIconEngineV2_pixmap_qth_h cobj_x0 csize_x1_w csize_x1_h (toCLong $ qEnum_toInt x2) (toCLong $ qEnum_toInt x3)
| uduki/hsQt | Qtc/Gui/QIconEngineV2.hs | bsd-2-clause | 13,356 | 0 | 16 | 2,193 | 4,389 | 2,274 | 2,115 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.PointSprite
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.PointSprite (
-- * Extension Support
glGetARBPointSprite,
gl_ARB_point_sprite,
-- * Enums
pattern GL_COORD_REPLACE_ARB,
pattern GL_POINT_SPRITE_ARB
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/PointSprite.hs | bsd-3-clause | 674 | 0 | 5 | 95 | 52 | 39 | 13 | 8 | 0 |
{-
******************************************************************************
* I N V A D E R S *
* *
* Module: ObjectBehavior *
* Purpose: Behavior of objects. *
* Author: Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module ObjectBehavior (
gun, -- :: Position2 -> Object
missile, -- :: Position2 -> Velocity2 -> Object
alien -- :: RandomGen g => g -> Position2 -> Object
) where
import Data.AffineSpace ((.+^))
import Data.Point2 (Point2 (..), point2X)
import Data.Vector2 (vector2, vector2Polar, vector2Rho,
vector2Theta, vector2X, vector2Y)
import qualified System.Random as Random
import FRP.Yampa
import FRP.Yampa.Integration
import PhysicalDimensions
import WorldGeometry
import Parser
import Object
------------------------------------------------------------------------------
-- Gun
------------------------------------------------------------------------------
gun :: Position2 -> Object
gun (Point2 x0 y0) = proc (ObjInput {oiGameInput = gi}) -> do
-- Position.
--
-- There is a bug in the GHC 8.* series that makes deconstructing
-- a Point2 in the left-hand side of an arrow expression trigger a
-- compilation error:
--
-- https://gitlab.haskell.org/ghc/ghc/-/issues/15175
-- https://gitlab.haskell.org/ghc/ghc/-/issues/18950
--
-- Instead, we name the output with a variable and then use let to
-- deconstruct the expression, which compiles correctly.
desiredPos <- ptrPos -< gi -- Desired position
let (Point2 xd _) = desiredPos
rec
-- Controller.
let ad = 10 * (xd - x) - 5 * v -- Desired acceleration
-- Physics with hard limits on acceleration and speed.
v <- integral -< let a = symLimit gunAccMax ad
in
if (-gunSpeedMax) <= v && v <= gunSpeedMax
|| v < (-gunSpeedMax) && a > 0
|| v > gunSpeedMax && a < 0
then a
else 0
x <- (x0+) ^<< integral -< v
-- Fire mechanism and ammunition level.
trigger <- lbp -< gi
(level, fire) <- magazine 20 0.5 -< trigger
returnA -< ObjOutput {
ooObsObjState = oosGun (Point2 x y0) (vector2 v 0) level,
ooKillReq = noEvent,
ooSpawnReq =
fire `tag` [missile (Point2 x (y0 + (gunHeight/2)))
(vector2 v missileInitialSpeed)]
}
-- Ammunition magazine. Reloaded up to maximal
-- capacity at constant rate.
-- n ... Maximal and initial number of missiles.
-- f .......... Reload rate.
-- input ...... Trigger.
-- output ..... Tuple:
-- #1: Current number of missiles in magazine.
-- #2: Missile fired event.
magazine ::
Int -> Frequency
-> SF (Event ()) (Int, Event ())
magazine n f = proc trigger -> do
reload <- repeatedly (1/f) () -< ()
(level,canFire)
<- accumHold (n,True) -<
(trigger `tag` dec)
`lMerge` (reload `tag` inc)
returnA -< (level,
trigger `gate` canFire)
where
inc :: (Int,Bool) -> (Int, Bool)
inc (l,_) | l < n = (l + 1, l > 0)
| otherwise = (l, True)
dec :: (Int,Bool) -> (Int, Bool)
dec (l,_) | l > 0 = (l - 1, True)
| otherwise = (l, False)
-- Ammunition magazine. Reloaded up to maximal capacity at constant rate.
-- n .......... Maximal and initial number of missiles.
-- f .......... Reload rate.
-- input ...... Trigger.
-- output ..... Tuple:
-- #1 .... Current number of missiles in magazine.
-- #2 .... Missile fired.
{-
Henrik's original version, commented out for now:
magazine :: Int -> Frequency -> SF (Event ()) (Int, Event ())
magazine n f = proc trigger -> do
reload <- repeatedly (1/f) () -< ()
-- We have a reverse application operator #, but for some reason arrowp
-- chokes on (#).
newLevelFire <- accumFilter (flip ($)) n -< (trigger `tag` dec)
`lMerge` (reload `tag` inc)
level <- hold n -< fmap fst newLevelFire
returnA -< (level, filterE snd newLevelFire `tag` ())
where
-- inc, dec :: Int -> (Int, Maybe (Int, Bool))
inc l | l < n = (l + 1, Just (l + 1, False))
| otherwise = (l, Nothing)
dec l | l > 0 = (l - 1, Just (l - 1, True))
| otherwise = (l, Nothing)
-}
------------------------------------------------------------------------------
-- Missile
------------------------------------------------------------------------------
-- Of course, this would be much better if we used the real impulse stuff:
-- No bogus iPre, for instance.
missile :: Position2 -> Velocity2 -> Object
missile p0 v0 = proc oi -> do
rec
-- Basic physics
vp <- iPre v0 -< v
ffi <- forceField -< (p, vp)
v <- (v0 ^+^) ^<< impulseIntegral -< (gravity, ffi)
p <- (p0 .+^) ^<< integral -< v
die <- after missileLifeSpan () -< ()
returnA -< ObjOutput {
ooObsObjState = oosMissile p v,
ooKillReq = oiHit oi `lMerge` die,
ooSpawnReq = noEvent
}
------------------------------------------------------------------------------
-- Alien
------------------------------------------------------------------------------
type ShieldLevel = Double
-- Alien behavior.
-- g .......... Random generator.
-- p0 ......... Initial position.
-- vyd ........ Desired vertical speed.
alien :: RandomGen g => g -> Position2 -> Velocity -> Object
alien g p0 vyd = proc oi -> do
rec
-- About 4% of time spent here.
-- Pick a desired horizontal position.
rx <- noiseR (worldXMin, worldXMax) g -< ()
sample <- occasionally g 5 () -< ()
xd <- hold (point2X p0) -< sample `tag` rx
-- Controller. Control constants not optimized. Who says aliens know
-- anything about control theory?
let axd = 5 * (xd - point2X p) - 3 * (vector2X v)
ayd = 20 * (vyd - (vector2Y v))
ad = vector2 axd ayd
h = vector2Theta ad
-- About 46% of time spent in Physics..
-- Physics
let a = vector2Polar (min alienAccMax (vector2Rho ad)) h
vp <- iPre v0 -< v
ffi <- forceField -< (p, vp)
-- 28 % of time spent in the following line.
v <- (v0 ^+^) ^<< impulseIntegral -< (gravity ^+^ a, ffi)
-- 25 % of time spent on the following line.
-- (Surprising: integral should be cheaper than impulseIntegral,
-- plus it ides not add up!)
p <- (p0 .+^) ^<< integral -< v
-- Shields
sl <- shield -< oiHit oi
die <- edge -< sl <= 0
returnA -< ObjOutput {
ooObsObjState = oosAlien p h v,
ooKillReq = die,
ooSpawnReq = noEvent
}
where
v0 = zeroVector
-- About 20% of the time spent here.
shield :: SF (Event ()) ShieldLevel
shield = proc hit -> do
rec
let rechargeRate = if sl < slMax then slMax / 10 else 0
sl <- (slMax +) ^<< impulseIntegral -< (rechargeRate, hit `tag` damage)
returnA -< sl
where
slMax = 100
damage = -50
------------------------------------------------------------------------------
-- Force fields acting on objects
------------------------------------------------------------------------------
-- Object are subject to gravity and a strange repellent forcefield that
-- drives objects away from the edges, effectively creating a corridor.
-- The strange field is inversely proportional to the cube of the distance
-- from either edge. It is thought that the field is a remnant of a defence
-- system put in place by the mythical and technologically advanced
-- "Predecessors" eons ago.
{-
field :: Position2 -> Acceleration2
field (Point2 x _) = vector2 (leftAcc - rightAcc) 0 ^+^ gravity
where
leftAcc = min (if x > worldXMin
then k / (x - worldXMin)^3
else maxAcc)
maxAcc
rightAcc = min (if x < worldXMax
then k / (worldXMax - x)^3
else maxAcc)
maxAcc
k = 10000000
maxAcc = 10000
-}
-- New attempt. Force fields act like invisible walls.
-- The fact that this is a stateful *signal* function (Fields having state?
-- Come on ...), can be attributed to the fact that we are cheating in the
-- first place by abstracting events of short duration to instantaneous
-- events. "field" being a stateful signal functio is part of the price
-- one have to pay for that to make this work in practice.
-- Not much time spent here, it seems.
forceField :: SF (Position2, Velocity2) (Event Acceleration2)
forceField = proc (p, v) -> do
lfi <- edge -< point2X p < worldXMin && vector2X v < 0
rfi <- edge -< point2X p > worldXMax && vector2X v > 0
returnA -< (mergeBy (^+^) (lfi `tag` (vector2 (-2 * vector2X v) 0))
(rfi `tag` (vector2 (-2 * vector2X v) 0)))
gravity = vector2 0 (-20)
------------------------------------------------------------------------------
-- Support
------------------------------------------------------------------------------
limit ll ul x = if x < ll then ll else if x > ul then ul else x
symLimit l = let absl = abs l in limit (-absl) absl
| ivanperez-keera/SpaceInvaders | src/ObjectBehavior.hs | bsd-3-clause | 10,453 | 6 | 27 | 3,634 | 1,733 | 953 | 780 | -1 | -1 |
{-# language CPP #-}
-- | = Name
--
-- VK_EXT_conditional_rendering - device extension
--
-- == VK_EXT_conditional_rendering
--
-- [__Name String__]
-- @VK_EXT_conditional_rendering@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 82
--
-- [__Revision__]
-- 2
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - Vikram Kushwaha
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_conditional_rendering] @vkushwaha%0A<<Here describe the issue or question you have about the VK_EXT_conditional_rendering extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-05-21
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Vikram Kushwaha, NVIDIA
--
-- - Daniel Rakos, AMD
--
-- - Jesse Hall, Google
--
-- - Jeff Bolz, NVIDIA
--
-- - Piers Daniell, NVIDIA
--
-- - Stuart Smith, Imagination Technologies
--
-- == Description
--
-- This extension allows the execution of one or more rendering commands to
-- be conditional on a value in buffer memory. This may help an application
-- reduce the latency by conditionally discarding rendering commands
-- without application intervention. The conditional rendering commands are
-- limited to draws, compute dispatches and clearing attachments within a
-- conditional rendering block.
--
-- == New Commands
--
-- - 'cmdBeginConditionalRenderingEXT'
--
-- - 'cmdEndConditionalRenderingEXT'
--
-- == New Structures
--
-- - 'ConditionalRenderingBeginInfoEXT'
--
-- - Extending
-- 'Vulkan.Core10.CommandBuffer.CommandBufferInheritanceInfo':
--
-- - 'CommandBufferInheritanceConditionalRenderingInfoEXT'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceConditionalRenderingFeaturesEXT'
--
-- == New Enums
--
-- - 'ConditionalRenderingFlagBitsEXT'
--
-- == New Bitmasks
--
-- - 'ConditionalRenderingFlagsEXT'
--
-- == New Enum Constants
--
-- - 'EXT_CONDITIONAL_RENDERING_EXTENSION_NAME'
--
-- - 'EXT_CONDITIONAL_RENDERING_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.AccessFlagBits.AccessFlagBits':
--
-- - 'Vulkan.Core10.Enums.AccessFlagBits.ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT'
--
-- - Extending
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BufferUsageFlagBits':
--
-- - 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineStageFlagBits.PipelineStageFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineStageFlagBits.PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT'
--
-- == Issues
--
-- 1) Should conditional rendering affect copy and blit commands?
--
-- __RESOLVED__: Conditional rendering should not affect copies and blits.
--
-- 2) Should secondary command buffers be allowed to execute while
-- conditional rendering is active in the primary command buffer?
--
-- __RESOLVED__: The rendering commands in secondary command buffer will be
-- affected by an active conditional rendering in primary command buffer if
-- the @conditionalRenderingEnable@ is set to
-- 'Vulkan.Core10.FundamentalTypes.TRUE'. Conditional rendering /must/ not
-- be active in the primary command buffer if @conditionalRenderingEnable@
-- is 'Vulkan.Core10.FundamentalTypes.FALSE'.
--
-- == Examples
--
-- None.
--
-- == Version History
--
-- - Revision 1, 2018-04-19 (Vikram Kushwaha)
--
-- - First Version
--
-- - Revision 2, 2018-05-21 (Vikram Kushwaha)
--
-- - Add new pipeline stage, access flags and limit conditional
-- rendering to a subpass or entire render pass.
--
-- == See Also
--
-- 'CommandBufferInheritanceConditionalRenderingInfoEXT',
-- 'ConditionalRenderingBeginInfoEXT', 'ConditionalRenderingFlagBitsEXT',
-- 'ConditionalRenderingFlagsEXT',
-- 'PhysicalDeviceConditionalRenderingFeaturesEXT',
-- 'cmdBeginConditionalRenderingEXT', 'cmdEndConditionalRenderingEXT'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_conditional_rendering Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_EXT_conditional_rendering ( cmdBeginConditionalRenderingEXT
, cmdUseConditionalRenderingEXT
, cmdEndConditionalRenderingEXT
, ConditionalRenderingBeginInfoEXT(..)
, CommandBufferInheritanceConditionalRenderingInfoEXT(..)
, PhysicalDeviceConditionalRenderingFeaturesEXT(..)
, ConditionalRenderingFlagsEXT
, ConditionalRenderingFlagBitsEXT( CONDITIONAL_RENDERING_INVERTED_BIT_EXT
, ..
)
, EXT_CONDITIONAL_RENDERING_SPEC_VERSION
, pattern EXT_CONDITIONAL_RENDERING_SPEC_VERSION
, EXT_CONDITIONAL_RENDERING_EXTENSION_NAME
, pattern EXT_CONDITIONAL_RENDERING_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showString)
import Numeric (showHex)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (Buffer)
import Vulkan.Core10.Handles (CommandBuffer)
import Vulkan.Core10.Handles (CommandBuffer(..))
import Vulkan.Core10.Handles (CommandBuffer(CommandBuffer))
import Vulkan.Core10.Handles (CommandBuffer_T)
import Vulkan.Dynamic (DeviceCmds(pVkCmdBeginConditionalRenderingEXT))
import Vulkan.Dynamic (DeviceCmds(pVkCmdEndConditionalRenderingEXT))
import Vulkan.Core10.FundamentalTypes (DeviceSize)
import Vulkan.Core10.FundamentalTypes (Flags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdBeginConditionalRenderingEXT
:: FunPtr (Ptr CommandBuffer_T -> Ptr ConditionalRenderingBeginInfoEXT -> IO ()) -> Ptr CommandBuffer_T -> Ptr ConditionalRenderingBeginInfoEXT -> IO ()
-- | vkCmdBeginConditionalRenderingEXT - Define the beginning of a
-- conditional rendering block
--
-- == Valid Usage
--
-- - #VUID-vkCmdBeginConditionalRenderingEXT-None-01980# Conditional
-- rendering /must/ not already be
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#active-conditional-rendering active>
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdBeginConditionalRenderingEXT-pConditionalRenderingBegin-parameter#
-- @pConditionalRenderingBegin@ /must/ be a valid pointer to a valid
-- 'ConditionalRenderingBeginInfoEXT' structure
--
-- - #VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdBeginConditionalRenderingEXT-commandBuffer-cmdpool# The
-- 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support graphics, or compute operations
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Both | Graphics |
-- | Secondary | | Compute |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'Vulkan.Core10.Handles.CommandBuffer',
-- 'ConditionalRenderingBeginInfoEXT'
cmdBeginConditionalRenderingEXT :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which this command will be
-- recorded.
CommandBuffer
-> -- | @pConditionalRenderingBegin@ is a pointer to a
-- 'ConditionalRenderingBeginInfoEXT' structure specifying parameters of
-- conditional rendering.
ConditionalRenderingBeginInfoEXT
-> io ()
cmdBeginConditionalRenderingEXT commandBuffer conditionalRenderingBegin = liftIO . evalContT $ do
let vkCmdBeginConditionalRenderingEXTPtr = pVkCmdBeginConditionalRenderingEXT (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
lift $ unless (vkCmdBeginConditionalRenderingEXTPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdBeginConditionalRenderingEXT is null" Nothing Nothing
let vkCmdBeginConditionalRenderingEXT' = mkVkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXTPtr
pConditionalRenderingBegin <- ContT $ withCStruct (conditionalRenderingBegin)
lift $ traceAroundEvent "vkCmdBeginConditionalRenderingEXT" (vkCmdBeginConditionalRenderingEXT' (commandBufferHandle (commandBuffer)) pConditionalRenderingBegin)
pure $ ()
-- | This function will call the supplied action between calls to
-- 'cmdBeginConditionalRenderingEXT' and 'cmdEndConditionalRenderingEXT'
--
-- Note that 'cmdEndConditionalRenderingEXT' is *not* called if an
-- exception is thrown by the inner action.
cmdUseConditionalRenderingEXT :: forall io r . MonadIO io => CommandBuffer -> ConditionalRenderingBeginInfoEXT -> io r -> io r
cmdUseConditionalRenderingEXT commandBuffer pConditionalRenderingBegin a =
(cmdBeginConditionalRenderingEXT commandBuffer pConditionalRenderingBegin) *> a <* (cmdEndConditionalRenderingEXT commandBuffer)
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCmdEndConditionalRenderingEXT
:: FunPtr (Ptr CommandBuffer_T -> IO ()) -> Ptr CommandBuffer_T -> IO ()
-- | vkCmdEndConditionalRenderingEXT - Define the end of a conditional
-- rendering block
--
-- = Description
--
-- Once ended, conditional rendering becomes inactive.
--
-- == Valid Usage
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-None-01985# Conditional
-- rendering /must/ be
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#active-conditional-rendering active>
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-None-01986# If conditional
-- rendering was made
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#active-conditional-rendering active>
-- outside of a render pass instance, it /must/ not be ended inside a
-- render pass instance
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-None-01987# If conditional
-- rendering was made
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#active-conditional-rendering active>
-- within a subpass it /must/ be ended in the same subpass
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-parameter#
-- @commandBuffer@ /must/ be a valid
-- 'Vulkan.Core10.Handles.CommandBuffer' handle
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-recording#
-- @commandBuffer@ /must/ be in the
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#commandbuffers-lifecycle recording state>
--
-- - #VUID-vkCmdEndConditionalRenderingEXT-commandBuffer-cmdpool# The
-- 'Vulkan.Core10.Handles.CommandPool' that @commandBuffer@ was
-- allocated from /must/ support graphics, or compute operations
--
-- == Host Synchronization
--
-- - Host access to @commandBuffer@ /must/ be externally synchronized
--
-- - Host access to the 'Vulkan.Core10.Handles.CommandPool' that
-- @commandBuffer@ was allocated from /must/ be externally synchronized
--
-- == Command Properties
--
-- \'
--
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
-- | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkCommandBufferLevel Command Buffer Levels> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#vkCmdBeginRenderPass Render Pass Scope> | <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VkQueueFlagBits Supported Queue Types> |
-- +============================================================================================================================+========================================================================================================================+=======================================================================================================================+
-- | Primary | Both | Graphics |
-- | Secondary | | Compute |
-- +----------------------------------------------------------------------------------------------------------------------------+------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------------------------+
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'Vulkan.Core10.Handles.CommandBuffer'
cmdEndConditionalRenderingEXT :: forall io
. (MonadIO io)
=> -- | @commandBuffer@ is the command buffer into which this command will be
-- recorded.
CommandBuffer
-> io ()
cmdEndConditionalRenderingEXT commandBuffer = liftIO $ do
let vkCmdEndConditionalRenderingEXTPtr = pVkCmdEndConditionalRenderingEXT (case commandBuffer of CommandBuffer{deviceCmds} -> deviceCmds)
unless (vkCmdEndConditionalRenderingEXTPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCmdEndConditionalRenderingEXT is null" Nothing Nothing
let vkCmdEndConditionalRenderingEXT' = mkVkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXTPtr
traceAroundEvent "vkCmdEndConditionalRenderingEXT" (vkCmdEndConditionalRenderingEXT' (commandBufferHandle (commandBuffer)))
pure $ ()
-- | VkConditionalRenderingBeginInfoEXT - Structure specifying conditional
-- rendering begin information
--
-- = Description
--
-- If the 32-bit value at @offset@ in @buffer@ memory is zero, then the
-- rendering commands are discarded, otherwise they are executed as normal.
-- If the value of the predicate in buffer memory changes while conditional
-- rendering is active, the rendering commands /may/ be discarded in an
-- implementation-dependent way. Some implementations may latch the value
-- of the predicate upon beginning conditional rendering while others may
-- read it before every rendering command.
--
-- == Valid Usage
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-buffer-01981# If @buffer@
-- is non-sparse then it /must/ be bound completely and contiguously to
-- a single 'Vulkan.Core10.Handles.DeviceMemory' object
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-buffer-01982# @buffer@
-- /must/ have been created with the
-- 'Vulkan.Core10.Enums.BufferUsageFlagBits.BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT'
-- bit set
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-offset-01983# @offset@
-- /must/ be less than the size of @buffer@ by at least 32 bits
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-offset-01984# @offset@
-- /must/ be a multiple of 4
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-sType-sType# @sType@ /must/
-- be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT'
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-pNext-pNext# @pNext@ /must/
-- be @NULL@
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-buffer-parameter# @buffer@
-- /must/ be a valid 'Vulkan.Core10.Handles.Buffer' handle
--
-- - #VUID-VkConditionalRenderingBeginInfoEXT-flags-parameter# @flags@
-- /must/ be a valid combination of 'ConditionalRenderingFlagBitsEXT'
-- values
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'Vulkan.Core10.Handles.Buffer', 'ConditionalRenderingFlagsEXT',
-- 'Vulkan.Core10.FundamentalTypes.DeviceSize',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'cmdBeginConditionalRenderingEXT'
data ConditionalRenderingBeginInfoEXT = ConditionalRenderingBeginInfoEXT
{ -- | @buffer@ is a buffer containing the predicate for conditional rendering.
buffer :: Buffer
, -- | @offset@ is the byte offset into @buffer@ where the predicate is
-- located.
offset :: DeviceSize
, -- | @flags@ is a bitmask of 'ConditionalRenderingFlagsEXT' specifying the
-- behavior of conditional rendering.
flags :: ConditionalRenderingFlagsEXT
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ConditionalRenderingBeginInfoEXT)
#endif
deriving instance Show ConditionalRenderingBeginInfoEXT
instance ToCStruct ConditionalRenderingBeginInfoEXT where
withCStruct x f = allocaBytes 40 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ConditionalRenderingBeginInfoEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Buffer)) (buffer)
poke ((p `plusPtr` 24 :: Ptr DeviceSize)) (offset)
poke ((p `plusPtr` 32 :: Ptr ConditionalRenderingFlagsEXT)) (flags)
f
cStructSize = 40
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Buffer)) (zero)
poke ((p `plusPtr` 24 :: Ptr DeviceSize)) (zero)
f
instance FromCStruct ConditionalRenderingBeginInfoEXT where
peekCStruct p = do
buffer <- peek @Buffer ((p `plusPtr` 16 :: Ptr Buffer))
offset <- peek @DeviceSize ((p `plusPtr` 24 :: Ptr DeviceSize))
flags <- peek @ConditionalRenderingFlagsEXT ((p `plusPtr` 32 :: Ptr ConditionalRenderingFlagsEXT))
pure $ ConditionalRenderingBeginInfoEXT
buffer offset flags
instance Storable ConditionalRenderingBeginInfoEXT where
sizeOf ~_ = 40
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ConditionalRenderingBeginInfoEXT where
zero = ConditionalRenderingBeginInfoEXT
zero
zero
zero
-- | VkCommandBufferInheritanceConditionalRenderingInfoEXT - Structure
-- specifying command buffer inheritance information
--
-- = Description
--
-- If this structure is not present, the behavior is as if
-- @conditionalRenderingEnable@ is 'Vulkan.Core10.FundamentalTypes.FALSE'.
--
-- == Valid Usage
--
-- - #VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-conditionalRenderingEnable-01977#
-- If the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-inheritedConditionalRendering inherited conditional rendering>
-- feature is not enabled, @conditionalRenderingEnable@ /must/ be
-- 'Vulkan.Core10.FundamentalTypes.FALSE'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkCommandBufferInheritanceConditionalRenderingInfoEXT-sType-sType#
-- @sType@ /must/ be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data CommandBufferInheritanceConditionalRenderingInfoEXT = CommandBufferInheritanceConditionalRenderingInfoEXT
{ -- | @conditionalRenderingEnable@ specifies whether the command buffer /can/
-- be executed while conditional rendering is active in the primary command
-- buffer. If this is 'Vulkan.Core10.FundamentalTypes.TRUE', then this
-- command buffer /can/ be executed whether the primary command buffer has
-- active conditional rendering or not. If this is
-- 'Vulkan.Core10.FundamentalTypes.FALSE', then the primary command buffer
-- /must/ not have conditional rendering active.
conditionalRenderingEnable :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (CommandBufferInheritanceConditionalRenderingInfoEXT)
#endif
deriving instance Show CommandBufferInheritanceConditionalRenderingInfoEXT
instance ToCStruct CommandBufferInheritanceConditionalRenderingInfoEXT where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p CommandBufferInheritanceConditionalRenderingInfoEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (conditionalRenderingEnable))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct CommandBufferInheritanceConditionalRenderingInfoEXT where
peekCStruct p = do
conditionalRenderingEnable <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ CommandBufferInheritanceConditionalRenderingInfoEXT
(bool32ToBool conditionalRenderingEnable)
instance Storable CommandBufferInheritanceConditionalRenderingInfoEXT where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero CommandBufferInheritanceConditionalRenderingInfoEXT where
zero = CommandBufferInheritanceConditionalRenderingInfoEXT
zero
-- | VkPhysicalDeviceConditionalRenderingFeaturesEXT - Structure describing
-- if a secondary command buffer can be executed if conditional rendering
-- is active in the primary command buffer
--
-- = Members
--
-- This structure describes the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceConditionalRenderingFeaturesEXT' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceConditionalRenderingFeaturesEXT' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceConditionalRenderingFeaturesEXT = PhysicalDeviceConditionalRenderingFeaturesEXT
{ -- | #features-conditionalRendering# @conditionalRendering@ specifies whether
-- conditional rendering is supported.
conditionalRendering :: Bool
, -- | #features-inheritedConditionalRendering# @inheritedConditionalRendering@
-- specifies whether a secondary command buffer /can/ be executed while
-- conditional rendering is active in the primary command buffer.
inheritedConditionalRendering :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceConditionalRenderingFeaturesEXT)
#endif
deriving instance Show PhysicalDeviceConditionalRenderingFeaturesEXT
instance ToCStruct PhysicalDeviceConditionalRenderingFeaturesEXT where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceConditionalRenderingFeaturesEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (conditionalRendering))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (inheritedConditionalRendering))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceConditionalRenderingFeaturesEXT where
peekCStruct p = do
conditionalRendering <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
inheritedConditionalRendering <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
pure $ PhysicalDeviceConditionalRenderingFeaturesEXT
(bool32ToBool conditionalRendering) (bool32ToBool inheritedConditionalRendering)
instance Storable PhysicalDeviceConditionalRenderingFeaturesEXT where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceConditionalRenderingFeaturesEXT where
zero = PhysicalDeviceConditionalRenderingFeaturesEXT
zero
zero
type ConditionalRenderingFlagsEXT = ConditionalRenderingFlagBitsEXT
-- | VkConditionalRenderingFlagBitsEXT - Specify the behavior of conditional
-- rendering
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_conditional_rendering VK_EXT_conditional_rendering>,
-- 'ConditionalRenderingFlagsEXT'
newtype ConditionalRenderingFlagBitsEXT = ConditionalRenderingFlagBitsEXT Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
-- | 'CONDITIONAL_RENDERING_INVERTED_BIT_EXT' specifies the condition used to
-- determine whether to discard rendering commands or not. That is, if the
-- 32-bit predicate read from @buffer@ memory at @offset@ is zero, the
-- rendering commands are not discarded, and if non zero, then they are
-- discarded.
pattern CONDITIONAL_RENDERING_INVERTED_BIT_EXT = ConditionalRenderingFlagBitsEXT 0x00000001
conNameConditionalRenderingFlagBitsEXT :: String
conNameConditionalRenderingFlagBitsEXT = "ConditionalRenderingFlagBitsEXT"
enumPrefixConditionalRenderingFlagBitsEXT :: String
enumPrefixConditionalRenderingFlagBitsEXT = "CONDITIONAL_RENDERING_INVERTED_BIT_EXT"
showTableConditionalRenderingFlagBitsEXT :: [(ConditionalRenderingFlagBitsEXT, String)]
showTableConditionalRenderingFlagBitsEXT = [(CONDITIONAL_RENDERING_INVERTED_BIT_EXT, "")]
instance Show ConditionalRenderingFlagBitsEXT where
showsPrec = enumShowsPrec enumPrefixConditionalRenderingFlagBitsEXT
showTableConditionalRenderingFlagBitsEXT
conNameConditionalRenderingFlagBitsEXT
(\(ConditionalRenderingFlagBitsEXT x) -> x)
(\x -> showString "0x" . showHex x)
instance Read ConditionalRenderingFlagBitsEXT where
readPrec = enumReadPrec enumPrefixConditionalRenderingFlagBitsEXT
showTableConditionalRenderingFlagBitsEXT
conNameConditionalRenderingFlagBitsEXT
ConditionalRenderingFlagBitsEXT
type EXT_CONDITIONAL_RENDERING_SPEC_VERSION = 2
-- No documentation found for TopLevel "VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION"
pattern EXT_CONDITIONAL_RENDERING_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_CONDITIONAL_RENDERING_SPEC_VERSION = 2
type EXT_CONDITIONAL_RENDERING_EXTENSION_NAME = "VK_EXT_conditional_rendering"
-- No documentation found for TopLevel "VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME"
pattern EXT_CONDITIONAL_RENDERING_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_CONDITIONAL_RENDERING_EXTENSION_NAME = "VK_EXT_conditional_rendering"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_EXT_conditional_rendering.hs | bsd-3-clause | 34,838 | 1 | 17 | 6,620 | 3,595 | 2,163 | 1,432 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
[lq| isEven :: n:Nat -> Bool / [n, 0]|]
[lq| isOdd :: m:Nat -> Bool / [m, 1] |]
isEven, isOdd :: Int -> Bool
isEven 0 = True
isEven n = isOdd $ n - 1
isOdd k = not $ isEven k
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/Even0.hs | bsd-3-clause | 239 | 0 | 6 | 64 | 69 | 39 | 30 | 8 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Servant.ContentType.PlainHtml where
import Data.Typeable (Typeable)
import Network.HTTP.Media ((//), (/:))
import Servant.API (Accept (..), MimeRender (..))
import Text.Blaze.Html (ToMarkup, toMarkup)
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
data PlainHtml deriving Typeable
instance Accept PlainHtml where
contentType _ = "text" // "html" /: ("charset", "utf-8")
instance {-# OVERLAPPABLE #-}
ToMarkup a => MimeRender PlainHtml a where
mimeRender _ = renderHtml . toMarkup
| notcome/liu-ms-adult | src/Servant/ContentType/PlainHtml.hs | bsd-3-clause | 693 | 0 | 7 | 131 | 153 | 95 | 58 | -1 | -1 |
module Spec.CSRFile where
import Spec.CSRField
import Utility.Utility
import Data.Bits
import Data.List
import qualified Data.Map.Strict as M
import Data.Maybe
type CSRFile = M.Map CSRField MachineInt
encodeExtensions :: String -> MachineInt
encodeExtensions extensions = foldr (.|.) 0 (map encode extensions)
where encode c = shift 1 (fromJust (c `elemIndex` ['A'..'Z']))
emptyFile = M.empty
resetCSRFile :: Integer -> CSRFile
resetCSRFile 32 = M.fromList [(MXL, 1), (Extensions, encodeExtensions "IMSU")]
resetCSRFile 64 = M.fromList [(MXL, 2), (Extensions, encodeExtensions "IAMSU")]
getField :: CSRField -> CSRFile -> MachineInt
getField field file = fromMaybe 0 (M.lookup field file)
setField :: CSRField -> MachineInt -> CSRFile -> CSRFile
setField = M.insert
| mit-plv/riscv-semantics | src/Spec/CSRFile.hs | bsd-3-clause | 773 | 0 | 12 | 109 | 268 | 149 | 119 | 19 | 1 |
{- |
Module : Network.Silver.Proto
Description : Bittorrent peer protocol.
Copyright : (c) Eric Izoita 2017
License : BSD3
Maintainer : ericizoita@gmail.com
Stability : experimental
Portability : portable
This module handles the bittorrent peer protocol.
-}
{-# LANGUAGE OverloadedStrings #-}
module Network.Silver.Proto
( PeerStatus(..)
, Handshake(..)
, Message(..)
) where
-- Binary Data
import Data.Binary (Binary(..))
import Data.Binary.Get (Get, getByteString)
import Data.Binary.Put (putByteString)
import qualified Data.ByteString.Char8 as BS
import Data.ByteString.Char8 (ByteString)
import Data.Word (Word32, Word8)
-- Things to add
-- - network hole punching over uTP through
-- an intermediary unrestricted STUN host
data PeerStatus
= Dead
| Alive Bool -- choked
Bool -- interested
deriving (Show)
data Handshake =
Handshake ByteString -- info hash
ByteString -- peer id
deriving (Show)
instance Binary Handshake where
get = do
len <- get :: Get Word8
case len of
19 -> do
proto <- getByteString (fromIntegral len)
if proto == "Bittorrent protocol"
then do
_ <- get :: Get Word32
_ <- get :: Get Word32
info <- getByteString 20
peer <- getByteString 20
return $ Handshake info peer
else fail "invalid protocol string"
_ -> fail "invalid handshake length"
put (Handshake info peer)
| BS.length info /= 20 = fail "invalid info hash length"
| BS.length peer /= 20 = fail "invalid peer id length"
| otherwise = do
put (19 :: Word8)
putByteString "Bittorrent protocol"
put (0 :: Word32)
put (0 :: Word32)
putByteString info
putByteString peer
data Message
= MsgKeepAlive
| MsgChoke
| MsgUnChoke
| MsgInterested
| MsgNotInterested
| MsgHave Word32 -- piece index
| MsgBitfield ByteString
| MsgRequest Word32 -- piece index
Word32 -- begin byte offset
Word32 -- length
| MsgPiece Word32 -- piece index
Word32 -- begin byte offset
ByteString -- piece data
| MsgCancel Word32 -- piece index
Word32 -- begin byte offset
Word32 -- length
deriving (Show)
instance Binary Message where
get = do
len <- get :: Get Word32
case len of
0 -> return MsgKeepAlive
_ -> do
msgId <- get :: Get Word8
case msgId of
0 -> return MsgChoke
1 -> return MsgUnChoke
2 -> return MsgInterested
3 -> return MsgNotInterested
4 -> MsgHave <$> (get :: Get Word32)
5 -> MsgBitfield <$> getByteString (fromIntegral len - 1)
6 -> MsgRequest <$> get <*> get <*> get
7 -> do
let block = getByteString (fromIntegral len - 9)
MsgPiece <$> get <*> get <*> block
8 -> MsgCancel <$> get <*> get <*> get
_ -> fail "invalid protocol message id"
put MsgKeepAlive = do
put (0 :: Word32)
put MsgChoke = do
put (1 :: Word32) >> put (0 :: Word8)
put MsgUnChoke = do
put (1 :: Word32) >> put (1 :: Word8)
put MsgInterested = do
put (1 :: Word32) >> put (2 :: Word8)
put MsgNotInterested = do
put (1 :: Word32) >> put (3 :: Word8)
put (MsgHave idx) = do
put (5 :: Word32) >> put (4 :: Word8) >> put idx
put (MsgBitfield field) = do
let len = fromIntegral $ 1 + BS.length field
put (len :: Word32) >> put (5 :: Word8)
putByteString field
put (MsgRequest index offset len) = do
put (13 :: Word32) >> put (6 :: Word8)
put index >> put offset >> put len
put (MsgPiece index offset block) = do
let len = fromIntegral $ 9 + BS.length block
put (len :: Word32) >> put (7 :: Word8)
put index >> put offset >> put block
put (MsgCancel index offset len) = do
put (13 :: Word32) >> put (8 :: Word8)
put index >> put offset >> put len
| nytopop/silver | src/Network/Silver/Proto.hs | bsd-3-clause | 3,972 | 0 | 24 | 1,186 | 1,192 | 607 | 585 | 109 | 0 |
{-# Language PatternGuards #-}
module Blub
( blub
, foo
, bar
) where
import Control.Applicative hiding (t)
import Ugah.Blub
( a
, b
, c
)
f :: Int -> Int
f = (+ 3)
r :: Int -> Int
r =
| dan-t/hsimport | tests/goldenFiles/SymbolTest41.hs | bsd-3-clause | 210 | 1 | 6 | 68 | 73 | 46 | 27 | -1 | -1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts #-}
module Network.XmlPush.HttpPull.Server (HttpPullSv, HttpPullSvArgs(..)) where
import Network.XmlPush.HttpPull.Server.Body
| YoshikuniJujo/xml-push | src/Network/XmlPush/HttpPull/Server.hs | bsd-3-clause | 172 | 0 | 5 | 14 | 29 | 21 | 8 | 3 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# OPTIONS_GHC -Wall #-}
module ConstMath.PrimRules.V704 (
unaryPrimRules
, binaryPrimRules
) where
unaryPrimRules :: [(String, (forall a. RealFloat a => a -> a))]
unaryPrimRules =
[ ("GHC.Prim.expDouble#" , exp)
, ("GHC.Prim.logDouble#" , log)
, ("GHC.Prim.sqrtDouble#" , sqrt)
, ("GHC.Prim.sinDouble#" , sin)
, ("GHC.Prim.cosDouble#" , cos)
, ("GHC.Prim.tanDouble#" , tan)
, ("GHC.Prim.asinDouble#" , asin)
, ("GHC.Prim.acosDouble#" , acos)
, ("GHC.Prim.atanDouble#" , atan)
, ("GHC.Prim.sinhDouble#" , sinh)
, ("GHC.Prim.coshDouble#" , cosh)
, ("GHC.Prim.tanhDouble#" , tanh)
, ("GHC.Prim.expFloat#" , exp)
, ("GHC.Prim.logFloat#" , log)
, ("GHC.Prim.sqrtFloat#" , sqrt)
, ("GHC.Prim.sinFloat#" , sin)
, ("GHC.Prim.cosFloat#" , cos)
, ("GHC.Prim.tanFloat#" , tan)
, ("GHC.Prim.asinFloat#" , asin)
, ("GHC.Prim.acosFloat#" , acos)
, ("GHC.Prim.atanFloat#" , atan)
, ("GHC.Prim.sinhFloat#" , sinh)
, ("GHC.Prim.coshFloat#" , cosh)
, ("GHC.Prim.tanhFloat#" , tanh)
]
binaryPrimRules :: [(String, (forall a. RealFloat a => a -> a -> a))]
binaryPrimRules =
[ ("GHC.Prim.powerFloat#" , (**)) -- FloatPowerOp
, ("GHC.Prim.**##" , (**)) -- DoublePowerOp
]
| kfish/const-math-ghc-plugin | ConstMath/PrimRules/V704.hs | bsd-3-clause | 1,417 | 0 | 11 | 359 | 338 | 221 | 117 | 36 | 1 |
module AddLocalDecl1 where
-- |This is a function
foo = x -- comment1
-- |Another fun
x = a -- comment2
where
a = 3
| mpickering/ghc-exactprint | tests/examples/transform/AddLocalDecl1.hs | bsd-3-clause | 124 | 0 | 6 | 33 | 25 | 17 | 8 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Data.API.Tools.Lens
( lensTool
, binary
) where
import Data.API.Tools.Combinators
import Data.API.Tools.Datatypes
import Data.API.Types
import Control.Lens
-- | Tool to make lenses for fields in generated types.
lensTool :: APITool
lensTool = apiDataTypeTool $ simpleTool $ makeLenses . rep_type_nm
$(makeLenses ''Binary)
| adinapoli/api-tools | src/Data/API/Tools/Lens.hs | bsd-3-clause | 431 | 0 | 8 | 114 | 77 | 47 | 30 | 11 | 1 |
module Main (main) where
import qualified Test.Attoparsec
import qualified Test.Tasty as Tasty
main :: IO ()
main = Tasty.defaultMain tests
tests :: Tasty.TestTree
tests = Tasty.testGroup "root"
[ Tasty.testGroup "Attoparsec." Test.Attoparsec.tests
]
| k0001/pipes-attoparsec | tests/Main.hs | bsd-3-clause | 266 | 0 | 8 | 47 | 75 | 43 | 32 | 8 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Perform a build
module Stack.Build.Execute
( printPlan
, preFetch
, executePlan
-- * Running Setup.hs
, ExecuteEnv
, withExecuteEnv
, withSingleContext
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Concurrent.Execute
import Control.Concurrent.Async (withAsync, wait)
import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
import Control.Exception.Enclosed (catchIO, tryIO)
import Control.Exception.Lifted
import Control.Monad (liftM, when, unless, void, join, guard, filterM, (<=<))
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import qualified Data.ByteString as S
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Foldable (forM_, any)
import Data.Function
import Data.IORef.RunOnce (runOnce)
import Data.List hiding (any)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Maybe.Extra (forMaybeM)
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Streaming.Process hiding (callProcess, env)
import qualified Data.Streaming.Process as Process
import Data.Traversable (forM)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import Data.Time.Clock (getCurrentTime)
import Data.Word8 (_colon)
import Distribution.System (OS (Windows),
Platform (Platform))
import qualified Distribution.Text
import Language.Haskell.TH as TH (location)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO
import Prelude hiding (FilePath, writeFile, any)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Coverage
import Stack.Types.Build
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.PackageDump
import Stack.Constants
import Stack.Types
import Stack.Types.StackT
import Stack.Types.Internal
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import qualified System.FilePath as FP
import System.IO
import System.PosixCompat.Files (createLink)
import System.Process.Read
import System.Process.Run
import System.Process.Log (showProcessArgDebug)
#if !MIN_VERSION_process(1,2,1)
import System.Process.Internals (createProcess_)
#endif
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
-- | Fetch the packages necessary for a build, for example in combination with a dry run.
preFetch :: M env m => Plan -> m ()
preFetch plan
| Set.null idents = $logDebug "Nothing to fetch"
| otherwise = do
$logDebug $ T.pack $
"Prefetching: " ++
intercalate ", " (map packageIdentifierString $ Set.toList idents)
menv <- getMinimalEnvOverride
fetchPackages menv idents
where
idents = Set.unions $ map toIdent $ Map.toList $ planTasks plan
toIdent (name, task) =
case taskType task of
TTLocal _ -> Set.empty
TTUpstream package _ -> Set.singleton $ PackageIdentifier
name
(packageVersion package)
-- | Print a description of build plan for human consumption.
printPlan :: M env m
=> Plan
-> m ()
printPlan plan = do
case Map.elems $ planUnregisterLocal plan of
[] -> $logInfo "No packages would be unregistered."
xs -> do
$logInfo "Would unregister locally:"
forM_ xs $ \(ident, mreason) -> $logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
$logInfo ""
case Map.elems $ planTasks plan of
[] -> $logInfo "Nothing to build."
xs -> do
$logInfo "Would build:"
mapM_ ($logInfo . displayTask) xs
let hasTests = not . Set.null . testComponents . taskComponents
hasBenches = not . Set.null . benchComponents . taskComponents
tests = Map.elems $ Map.filter hasTests $ planFinals plan
benches = Map.elems $ Map.filter hasBenches $ planFinals plan
unless (null tests) $ do
$logInfo ""
$logInfo "Would test:"
mapM_ ($logInfo . displayTask) tests
unless (null benches) $ do
$logInfo ""
$logInfo "Would benchmark:"
mapM_ ($logInfo . displayTask) benches
$logInfo ""
case Map.toList $ planInstallExes plan of
[] -> $logInfo "No executables to be installed."
xs -> do
$logInfo "Would install executables:"
forM_ xs $ \(name, loc) -> $logInfo $ T.concat
[ name
, " from "
, case loc of
Snap -> "snapshot"
Local -> "local"
, " database"
]
-- | For a dry run
displayTask :: Task -> Text
displayTask task = T.pack $ concat
[ packageIdentifierString $ taskProvides task
, ": database="
, case taskLocation task of
Snap -> "snapshot"
Local -> "local"
, ", source="
, case taskType task of
TTLocal lp -> concat
[ toFilePath $ lpDir lp
]
TTUpstream _ _ -> "package index"
, if Set.null missing
then ""
else ", after: " ++ intercalate "," (map packageIdentifierString $ Set.toList missing)
]
where
missing = tcoMissing $ taskConfigOpts task
data ExecuteEnv = ExecuteEnv
{ eeEnvOverride :: !EnvOverride
, eeConfigureLock :: !(MVar ())
, eeInstallLock :: !(MVar ())
, eeBuildOpts :: !BuildOpts
, eeBaseConfigOpts :: !BaseConfigOpts
, eeGhcPkgIds :: !(TVar (Map PackageIdentifier Installed))
, eeTempDir :: !(Path Abs Dir)
, eeSetupHs :: !(Path Abs File)
-- ^ Temporary Setup.hs for simple builds
, eeSetupExe :: !(Maybe (Path Abs File))
-- ^ Compiled version of eeSetupHs
, eeCabalPkgVer :: !Version
, eeTotalWanted :: !Int
, eeWanted :: !(Set PackageName)
, eeLocals :: ![LocalPackage]
, eeSourceMap :: !SourceMap
, eeGlobalDB :: !(Path Abs Dir)
, eeGlobalDumpPkgs :: !(Map GhcPkgId (DumpPackage () ()))
, eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () ())))
, eeLocalDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () ())))
}
-- | Get a compiled Setup exe
getSetupExe :: M env m
=> Path Abs File -- ^ Setup.hs input file
-> Path Abs Dir -- ^ temporary directory
-> m (Maybe (Path Abs File))
getSetupExe setupHs tmpdir = do
wc <- getWhichCompiler
econfig <- asks getEnvConfig
let config = getConfig econfig
baseNameS = concat
[ "setup-Simple-Cabal-"
, versionString $ envConfigCabalVersion econfig
, "-"
, Distribution.Text.display $ configPlatform config
, "-"
, compilerVersionString $ envConfigCompilerVersion econfig
]
exeNameS = baseNameS ++
case configPlatform config of
Platform _ Windows -> ".exe"
_ -> ""
outputNameS =
case wc of
Ghc -> exeNameS
Ghcjs -> baseNameS ++ ".jsexe"
jsExeNameS =
baseNameS ++ ".jsexe"
setupDir =
configStackRoot config </>
$(mkRelDir "setup-exe-cache")
exePath <- fmap (setupDir </>) $ parseRelFile exeNameS
jsExePath <- fmap (setupDir </>) $ parseRelDir jsExeNameS
exists <- liftIO $ D.doesFileExist $ toFilePath exePath
if exists
then return $ Just exePath
else do
tmpExePath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ exeNameS
tmpOutputPath <- fmap (setupDir </>) $ parseRelFile $ "tmp-" ++ outputNameS
tmpJsExePath <- fmap (setupDir </>) $ parseRelDir $ "tmp-" ++ jsExeNameS
liftIO $ D.createDirectoryIfMissing True $ toFilePath setupDir
menv <- getMinimalEnvOverride
let args =
[ "-clear-package-db"
, "-global-package-db"
, "-hide-all-packages"
, "-package"
, "base"
, "-package"
, "Cabal-" ++ versionString (envConfigCabalVersion econfig)
, toFilePath setupHs
, "-o"
, toFilePath tmpOutputPath
] ++
["-build-runner" | wc == Ghcjs]
runIn tmpdir (compilerExeName wc) menv args Nothing
when (wc == Ghcjs) $ renameDir tmpJsExePath jsExePath
renameFile tmpExePath exePath
return $ Just exePath
-- | Execute a callback that takes an 'ExecuteEnv'.
withExecuteEnv :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> [DumpPackage () ()] -- ^ snapshot packages
-> [DumpPackage () ()] -- ^ local packages
-> SourceMap
-> (ExecuteEnv -> m a)
-> m a
withExecuteEnv menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages sourceMap inner = do
withCanonicalizedSystemTempDirectory stackProgName $ \tmpdir -> do
configLock <- newMVar ()
installLock <- newMVar ()
idMap <- liftIO $ newTVarIO Map.empty
let setupHs = tmpdir </> $(mkRelFile "Setup.hs")
liftIO $ writeFile (toFilePath setupHs) "import Distribution.Simple\nmain = defaultMain"
setupExe <- getSetupExe setupHs tmpdir
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
globalDB <- getGlobalDB menv =<< getWhichCompiler
snapshotPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId snapshotPackages)
localPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages)
inner ExecuteEnv
{ eeEnvOverride = menv
, eeBuildOpts = bopts
-- Uncertain as to why we cannot run configures in parallel. This appears
-- to be a Cabal library bug. Original issue:
-- https://github.com/fpco/stack/issues/84. Ideally we'd be able to remove
-- this.
, eeConfigureLock = configLock
, eeInstallLock = installLock
, eeBaseConfigOpts = baseConfigOpts
, eeGhcPkgIds = idMap
, eeTempDir = tmpdir
, eeSetupHs = setupHs
, eeSetupExe = setupExe
, eeCabalPkgVer = cabalPkgVer
, eeTotalWanted = length $ filter lpWanted locals
, eeWanted = wantedLocalPackages locals
, eeLocals = locals
, eeSourceMap = sourceMap
, eeGlobalDB = globalDB
, eeGlobalDumpPkgs = toDumpPackagesByGhcPkgId globalPackages
, eeSnapshotDumpPkgs = snapshotPackagesTVar
, eeLocalDumpPkgs = localPackagesTVar
}
where
toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dpGhcPkgId dp, dp))
-- | Perform the actual plan
executePlan :: M env m
=> EnvOverride
-> BuildOpts
-> BaseConfigOpts
-> [LocalPackage]
-> [DumpPackage () ()] -- ^ global packages
-> [DumpPackage () ()] -- ^ snapshot packages
-> [DumpPackage () ()] -- ^ local packages
-> SourceMap
-> InstalledMap
-> Plan
-> m ()
executePlan menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages sourceMap installedMap plan = do
withExecuteEnv menv bopts baseConfigOpts locals globalPackages snapshotPackages localPackages sourceMap (executePlan' installedMap plan)
unless (Map.null $ planInstallExes plan) $ do
snapBin <- (</> bindirSuffix) `liftM` installationRootDeps
localBin <- (</> bindirSuffix) `liftM` installationRootLocal
destDir <- asks $ configLocalBin . getConfig
createTree destDir
destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir
isInPATH <- liftIO . fmap (any (FP.equalFilePath destDir')) . (mapM D.canonicalizePath <=< filterM D.doesDirectoryExist) $ (envSearchPath menv)
when (not isInPATH) $
$logWarn $ T.concat
[ "Installation path "
, T.pack destDir'
, " not found in PATH environment variable"
]
platform <- asks getPlatform
let ext =
case platform of
Platform _ Windows -> ".exe"
_ -> ""
currExe <- liftIO getExecutablePath -- needed for windows, see below
installed <- forMaybeM (Map.toList $ planInstallExes plan) $ \(name, loc) -> do
let bindir =
case loc of
Snap -> snapBin
Local -> localBin
mfp <- resolveFileMaybe bindir $ T.unpack name ++ ext
case mfp of
Nothing -> do
$logWarn $ T.concat
[ "Couldn't find executable "
, name
, " in directory "
, T.pack $ toFilePath bindir
]
return Nothing
Just file -> do
let destFile = destDir' FP.</> T.unpack name ++ ext
$logInfo $ T.concat
[ "Copying from "
, T.pack $ toFilePath file
, " to "
, T.pack destFile
]
liftIO $ case platform of
Platform _ Windows | FP.equalFilePath destFile currExe ->
windowsRenameCopy (toFilePath file) destFile
_ -> D.copyFile (toFilePath file) destFile
return $ Just (destDir', [T.append name (T.pack ext)])
let destToInstalled = Map.fromListWith (++) installed
unless (Map.null destToInstalled) $ $logInfo ""
forM_ (Map.toList destToInstalled) $ \(dest, executables) -> do
$logInfo $ T.concat
[ "Copied executables to "
, T.pack dest
, ":"]
forM_ executables $ \exe -> $logInfo $ T.append "- " exe
config <- asks getConfig
menv' <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
forM_ (boptsExec bopts) $ \(cmd, args) -> do
$logProcessRun cmd args
callProcess Nothing menv' cmd args
-- | Windows can't write over the current executable. Instead, we rename the
-- current executable to something else and then do the copy.
windowsRenameCopy :: FilePath -> FilePath -> IO ()
windowsRenameCopy src dest = do
D.copyFile src new
D.renameFile dest old
D.renameFile new dest
where
new = dest ++ ".new"
old = dest ++ ".old"
-- | Perform the actual plan (internal)
executePlan' :: M env m
=> InstalledMap
-> Plan
-> ExecuteEnv
-> m ()
executePlan' installedMap0 plan ee@ExecuteEnv {..} = do
when (toCoverage $ boptsTestOpts eeBuildOpts) deleteHpcReports
wc <- getWhichCompiler
cv <- asks $ envConfigCompilerVersion . getEnvConfig
case Map.toList $ planUnregisterLocal plan of
[] -> return ()
ids -> do
localDB <- packageDatabaseLocal
forM_ ids $ \(id', (ident, mreason)) -> do
$logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, ": unregistering"
, case mreason of
Nothing -> ""
Just reason -> T.concat
[ " ("
, reason
, ")"
]
]
unregisterGhcPkgId eeEnvOverride wc cv localDB id' ident
liftIO $ atomically $ modifyTVar' eeLocalDumpPkgs $ \initMap ->
foldl' (flip Map.delete) initMap $ Map.keys (planUnregisterLocal plan)
-- Yes, we're explicitly discarding result values, which in general would
-- be bad. monad-unlift does this all properly at the type system level,
-- but I don't want to pull it in for this one use case, when we know that
-- stack always using transformer stacks that are safe for this use case.
runInBase <- liftBaseWith $ \run -> return (void . run)
let actions = concatMap (toActions installedMap' runInBase ee) $ Map.elems $ Map.mergeWithKey
(\_ b f -> Just (Just b, Just f))
(fmap (\b -> (Just b, Nothing)))
(fmap (\f -> (Nothing, Just f)))
(planTasks plan)
(planFinals plan)
threads <- asks $ configJobs . getConfig
concurrentTests <- asks $ configConcurrentTests . getConfig
let keepGoing =
case boptsKeepGoing eeBuildOpts of
Just kg -> kg
Nothing -> boptsTests eeBuildOpts || boptsBenchmarks eeBuildOpts
concurrentFinal =
-- TODO it probably makes more sense to use a lock for test suites
-- and just have the execution blocked. Turning off all concurrency
-- on finals based on the --test option doesn't fit in well.
if boptsTests eeBuildOpts
then concurrentTests
else True
terminal <- asks getTerminal
errs <- liftIO $ runActions threads keepGoing concurrentFinal actions $ \doneVar -> do
let total = length actions
loop prev
| prev == total =
runInBase $ $logStickyDone ("Completed all " <> T.pack (show total) <> " actions.")
| otherwise = do
when terminal $ runInBase $
$logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
done <- atomically $ do
done <- readTVar doneVar
check $ done /= prev
return done
loop done
if total > 1
then loop 0
else return ()
when (toCoverage $ boptsTestOpts eeBuildOpts) $ do
generateHpcUnifiedReport
generateHpcMarkupIndex
unless (null errs) $ throwM $ ExecutionFailure errs
when (boptsHaddock eeBuildOpts) $ do
snapshotDumpPkgs <- liftIO (readTVarIO eeSnapshotDumpPkgs)
localDumpPkgs <- liftIO (readTVarIO eeLocalDumpPkgs)
generateLocalHaddockIndex eeEnvOverride wc eeBaseConfigOpts localDumpPkgs eeLocals
generateDepsHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs localDumpPkgs eeLocals
generateSnapHaddockIndex eeEnvOverride wc eeBaseConfigOpts eeGlobalDumpPkgs snapshotDumpPkgs
where
installedMap' = Map.difference installedMap0
$ Map.fromList
$ map (\(ident, _) -> (packageIdentifierName ident, ()))
$ Map.elems
$ planUnregisterLocal plan
toActions :: M env m
=> InstalledMap
-> (m () -> IO ())
-> ExecuteEnv
-> (Maybe Task, Maybe Task) -- build and final
-> [Action]
toActions installedMap runInBase ee (mbuild, mfinal) =
abuild ++ afinal
where
abuild =
case mbuild of
Nothing -> []
Just task@Task {..} ->
[ Action
{ actionId = ActionId taskProvides ATBuild
, actionDeps =
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap False
}
]
afinal =
case mfinal of
Nothing -> []
Just task@Task {..} ->
(if taskAllInOne then [] else
[Action
{ actionId = ActionId taskProvides ATBuildFinal
, actionDeps = addBuild ATBuild
(Set.map (\ident -> ActionId ident ATBuild) (tcoMissing taskConfigOpts))
, actionDo = \ac -> runInBase $ singleBuild runInBase ac ee task installedMap True
}]) ++
[ Action
{ actionId = ActionId taskProvides ATFinal
, actionDeps = addBuild (if taskAllInOne then ATBuild else ATBuildFinal) Set.empty
, actionDo = \ac -> runInBase $ do
let comps = taskComponents task
tests = testComponents comps
benches = benchComponents comps
unless (Set.null tests) $ do
singleTest runInBase topts (Set.toList tests) ac ee task installedMap
unless (Set.null benches) $ do
-- FIXME: shouldn't this use the list of benchmarks to run?
singleBench runInBase beopts ac ee task installedMap
}
]
where
addBuild aty =
case mbuild of
Nothing -> id
Just _ -> Set.insert $ ActionId taskProvides aty
bopts = eeBuildOpts ee
topts = boptsTestOpts bopts
beopts = boptsBenchmarkOpts bopts
-- | Generate the ConfigCache
getConfigCache :: MonadIO m
=> ExecuteEnv -> Task -> InstalledMap -> Bool -> Bool
-> m (Map PackageIdentifier GhcPkgId, ConfigCache)
getConfigCache ExecuteEnv {..} Task {..} installedMap enableTest enableBench = do
let extra =
-- We enable tests if the test suite dependencies are already
-- installed, so that we avoid unnecessary recompilation based on
-- cabal_macros.h changes when switching between 'stack build' and
-- 'stack test'. See:
-- https://github.com/commercialhaskell/stack/issues/805
case taskType of
TTLocal lp -> concat
[ ["--enable-tests" | enableTest || (depsPresent installedMap $ lpTestDeps lp)]
, ["--enable-benchmarks" | enableBench || (depsPresent installedMap $ lpBenchDeps lp)]
]
_ -> []
idMap <- liftIO $ readTVarIO eeGhcPkgIds
let getMissing ident =
case Map.lookup ident idMap of
Nothing -> error "singleBuild: invariant violated, missing package ID missing"
Just (Library ident' x) -> assert (ident == ident') $ Just (ident, x)
Just (Executable _) -> Nothing
missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing
TaskConfigOpts missing mkOpts = taskConfigOpts
opts = mkOpts missing'
allDeps = Set.fromList $ Map.elems missing' ++ Map.elems taskPresent
cache = ConfigCache
{ configCacheOpts = opts
{ coNoDirs = coNoDirs opts ++ map T.unpack extra
}
, configCacheDeps = allDeps
, configCacheComponents =
case taskType of
TTLocal lp -> Set.map renderComponent $ lpComponents lp
TTUpstream _ _ -> Set.empty
, configCacheHaddock =
shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides)
}
allDepsMap = Map.union missing' taskPresent
return (allDepsMap, cache)
-- | Ensure that the configuration for the package matches what is given
ensureConfig :: M env m
=> ConfigCache -- ^ newConfigCache
-> Path Abs Dir -- ^ package directory
-> ExecuteEnv
-> m () -- ^ announce
-> (Bool -> [String] -> m ()) -- ^ cabal
-> Path Abs File -- ^ .cabal file
-> m Bool
ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
newCabalMod <- liftIO (fmap modTime (D.getModificationTime (toFilePath cabalfp)))
needConfig <-
if boptsReconfigure eeBuildOpts
then return True
else do
-- Determine the old and new configuration in the local directory, to
-- determine if we need to reconfigure.
mOldConfigCache <- tryGetConfigCache pkgDir
mOldCabalMod <- tryGetCabalMod pkgDir
return $ mOldConfigCache /= Just newConfigCache
|| mOldCabalMod /= Just newCabalMod
let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache
when needConfig $ withMVar eeConfigureLock $ \_ -> do
deleteCaches pkgDir
announce
menv <- getMinimalEnvOverride
let programNames =
if eeCabalPkgVer < $(mkVersion "1.22")
then ["ghc", "ghc-pkg"]
else ["ghc", "ghc-pkg", "ghcjs", "ghcjs-pkg"]
exes <- forM programNames $ \name -> do
mpath <- findExecutable menv name
return $ case mpath of
Nothing -> []
Just x -> return $ concat ["--with-", name, "=", toFilePath x]
cabal False $ "configure" : concat
[ concat exes
, dirs
, nodirs
]
writeConfigCache pkgDir newConfigCache
writeCabalMod pkgDir newCabalMod
return needConfig
announceTask :: MonadLogger m => Task -> Text -> m ()
announceTask task x = $logInfo $ T.concat
[ T.pack $ packageIdentifierString $ taskProvides task
, ": "
, x
]
withSingleContext :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> Maybe (Map PackageIdentifier GhcPkgId)
-- ^ All dependencies' package ids to provide to Setup.hs. If
-- Nothing, just provide global and snapshot package
-- databases.
-> Maybe String
-> ( Package
-> Path Abs File
-> Path Abs Dir
-> (Bool -> [String] -> m ())
-> (Text -> m ())
-> Bool
-> Maybe (Path Abs File, Handle)
-> m a)
-> m a
withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffix inner0 =
withPackage $ \package cabalfp pkgDir ->
withLogFile package $ \mlogFile ->
withCabal package pkgDir mlogFile $ \cabal ->
inner0 package cabalfp pkgDir cabal announce console mlogFile
where
announce = announceTask task
wanted =
case taskType of
TTLocal lp -> lpWanted lp
TTUpstream _ _ -> False
console = wanted
&& all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining)
&& eeTotalWanted == 1
withPackage inner =
case taskType of
TTLocal lp -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
TTUpstream package _ -> do
mdist <- liftM Just distRelativeDir
m <- unpackPackageIdents eeEnvOverride eeTempDir mdist $ Set.singleton taskProvides
case Map.toList m of
[(ident, dir)]
| ident == taskProvides -> do
let name = packageIdentifierName taskProvides
cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
let cabalfp = dir </> cabalfpRel
inner package cabalfp dir
_ -> error $ "withPackage: invariant violated: " ++ show m
withLogFile package inner
| console = inner Nothing
| otherwise = do
logPath <- buildLogPath package msuffix
createTree (parent logPath)
let fp = toFilePath logPath
bracket
(liftIO $ openBinaryFile fp WriteMode)
(liftIO . hClose)
$ \h -> inner (Just (logPath, h))
withCabal package pkgDir mlogFile inner = do
config <- asks getConfig
let envSettings = EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = True
}
menv <- liftIO $ configEnvOverride config envSettings
-- When looking for ghc to build Setup.hs we want to ignore local binaries, see:
-- https://github.com/commercialhaskell/stack/issues/1052
menvWithoutLocals <- liftIO $ configEnvOverride config envSettings { esIncludeLocals = False }
getGhcPath <- runOnce $ liftIO $ join $ findExecutable menvWithoutLocals "ghc"
getGhcjsPath <- runOnce $ liftIO $ join $ findExecutable menvWithoutLocals "ghcjs"
distRelativeDir' <- distRelativeDir
esetupexehs <-
-- Avoid broken Setup.hs files causing problems for simple build
-- types, see:
-- https://github.com/commercialhaskell/stack/issues/370
case (packageSimpleType package, eeSetupExe) of
(True, Just setupExe) -> return $ Left setupExe
_ -> liftIO $ fmap Right $ getSetupHs pkgDir
inner $ \stripTHLoading args -> do
let cabalPackageArg =
"-package=" ++ packageIdentifierString
(PackageIdentifier cabalPackageName
eeCabalPkgVer)
packageArgs =
case mdeps of
-- This branch is taken when
-- 'explicit-setup-deps' is requested in your
-- stack.yaml file.
Just deps | explicitSetupDeps (packageName package) config ->
-- Stack always builds with the global Cabal for various
-- reproducibility issues.
let depsMinusCabal
= map ghcPkgIdString
$ Set.toList
$ addGlobalPackages deps (Map.elems eeGlobalDumpPkgs)
in
( "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePath) (bcoExtraDBs eeBaseConfigOpts)
) ++
( ("-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts))
: ("-package-db=" ++ toFilePath (bcoLocalDB eeBaseConfigOpts))
: "-hide-all-packages"
: cabalPackageArg
: map ("-package-id=" ++) depsMinusCabal
)
-- This branch is usually taken for builds, and
-- is always taken for `stack sdist`.
--
-- This approach is debatable. It adds access to the
-- snapshot package database for Cabal. There are two
-- possible objections:
--
-- 1. This doesn't isolate the build enough; arbitrary
-- other packages available could cause the build to
-- succeed or fail.
--
-- 2. This doesn't provide enough packages: we should also
-- include the local database when building local packages.
--
-- Currently, this branch is only taken via `stack
-- sdist` or when explicitly requested in the
-- stack.yaml file.
_ ->
cabalPackageArg
: "-clear-package-db"
: "-global-package-db"
: map (("-package-db=" ++) . toFilePath) (bcoExtraDBs eeBaseConfigOpts)
++ ["-package-db=" ++ toFilePath (bcoSnapDB eeBaseConfigOpts)]
setupArgs = ("--builddir=" ++ toFilePath distRelativeDir') : args
runExe exeName fullArgs = do
$logProcessRun (toFilePath exeName) fullArgs
-- Use createProcess_ to avoid the log file being closed afterwards
(Nothing, moutH, merrH, ph) <- liftIO $ createProcess_ "singleBuild" cp
let makeAbsolute = stripTHLoading -- If users want control, we should add a config option for this
ec <-
liftIO $
withAsync (runInBase $ maybePrintBuildOutput stripTHLoading makeAbsolute pkgDir LevelInfo mlogFile moutH) $ \outThreadID ->
withAsync (runInBase $ maybePrintBuildOutput False makeAbsolute pkgDir LevelWarn mlogFile merrH) $ \errThreadID -> do
ec <- waitForProcess ph
wait errThreadID
wait outThreadID
return ec
case ec of
ExitSuccess -> return ()
_ -> do
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
throwM $ CabalExitedUnsuccessfully
ec
taskProvides
exeName
fullArgs
(fmap fst mlogFile)
bs
where
cp0 = proc (toFilePath exeName) fullArgs
cp = cp0
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
-- Ideally we'd create a new pipe here and then close it
-- below to avoid the child process from taking from our
-- stdin. However, if we do this, the child process won't
-- be able to get the codepage on Windows that we want.
-- See:
-- https://github.com/commercialhaskell/stack/issues/738
-- , std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> CreatePipe
Just (_, h) -> UseHandle h
}
wc <- getWhichCompiler
(exeName, fullArgs) <- case (esetupexehs, wc) of
(Left setupExe, _) -> return (setupExe, setupArgs)
(Right setuphs, compiler) -> do
distDir <- distDirFromDir pkgDir
let setupDir = distDir </> $(mkRelDir "setup")
outputFile = setupDir </> $(mkRelFile "setup")
createTree setupDir
compilerPath <-
case compiler of
Ghc -> getGhcPath
Ghcjs -> getGhcjsPath
runExe compilerPath $
[ "--make"
, "-odir", toFilePath setupDir
, "-hidir", toFilePath setupDir
, "-i", "-i."
] ++ packageArgs ++
[ toFilePath setuphs
, "-o", toFilePath outputFile
] ++
(case compiler of
Ghc -> []
Ghcjs -> ["-build-runner"])
return (outputFile, setupArgs)
runExe exeName $ (if boptsCabalVerbose eeBuildOpts then ("--verbose":) else id) fullArgs
maybePrintBuildOutput stripTHLoading makeAbsolute pkgDir level mlogFile mh =
case mh of
Just h ->
case mlogFile of
Just{} -> return ()
Nothing -> printBuildOutput stripTHLoading makeAbsolute pkgDir level h
Nothing -> return ()
singleBuild :: M env m
=> (m () -> IO ())
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> Bool -- ^ Is this a final build?
-> m ()
singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap isFinalBuild = do
(allDepsMap, cache) <- getConfigCache ee task installedMap enableTests enableBenchmarks
mprecompiled <- getPrecompiled cache
minstalled <-
case mprecompiled of
Just precompiled -> copyPreCompiled precompiled
Nothing -> realConfigAndBuild cache allDepsMap
case minstalled of
Nothing -> return ()
Just installed -> do
writeFlagCache installed cache
liftIO $ atomically $ modifyTVar eeGhcPkgIds $ Map.insert taskProvides installed
where
pname = packageIdentifierName taskProvides
shouldHaddockPackage' = shouldHaddockPackage eeBuildOpts eeWanted pname
doHaddock package = shouldHaddockPackage' &&
not isFinalBuild &&
-- Works around haddock failing on bytestring-builder since it has no modules
-- when bytestring is new enough.
packageHasExposedModules package
buildingFinals = isFinalBuild || taskAllInOne
enableTests = buildingFinals && any isCTest (taskComponents task)
enableBenchmarks = buildingFinals && any isCBench (taskComponents task)
annSuffix = if result == "" then "" else " (" <> result <> ")"
where
result = T.intercalate " + " $ concat $
[ ["lib" | taskAllInOne && hasLib]
, ["exe" | taskAllInOne && hasExe]
, ["test" | enableTests]
, ["bench" | enableBenchmarks]
]
(hasLib, hasExe) = case taskType of
TTLocal lp -> (packageHasLibrary (lpPackage lp), not (Set.null (exesToBuild lp)))
-- This isn't true, but we don't want to have this info for
-- upstream deps.
TTUpstream{} -> (False, False)
getPrecompiled cache =
case taskLocation task of
Snap -> do
mpc <- readPrecompiledCache taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
case mpc of
Nothing -> return Nothing
Just pc | maybe False
(bcoSnapInstallRoot eeBaseConfigOpts `isParentOf`)
(parseAbsFile =<< (pcLibrary pc)) ->
-- If old precompiled cache files are left around but snapshots are deleted,
-- it is possible for the precompiled file to refer to the very library
-- we're building, and if flags are changed it may try to copy the library
-- to itself. This check prevents that from happening.
return Nothing
Just pc | otherwise -> do
let allM _ [] = return True
allM f (x:xs) = do
b <- f x
if b then allM f xs else return False
b <- liftIO $ allM D.doesFileExist $ maybe id (:) (pcLibrary pc) $ pcExes pc
return $ if b then Just pc else Nothing
_ -> return Nothing
copyPreCompiled (PrecompiledCache mlib exes) = do
announceTask task "copying precompiled package"
forM_ mlib $ \libpath -> do
menv <- getMinimalEnvOverride
withMVar eeInstallLock $ \() -> do
-- We want to ignore the global and user databases.
-- Unfortunately, ghc-pkg doesn't take such arguments on the
-- command line. Instead, we'll set GHC_PACKAGE_PATH. See:
-- https://github.com/commercialhaskell/stack/issues/1146
menv' <- modifyEnvOverride menv
$ Map.insert
"GHC_PACKAGE_PATH"
(T.pack $ toFilePath $ bcoSnapDB eeBaseConfigOpts)
-- In case a build of the library with different flags already exists, unregister it
-- before copying.
catch
(readProcessNull Nothing menv' "ghc-pkg"
[ "unregister"
, "--force"
, packageIdentifierString taskProvides
])
(\(ReadProcessException _ _ _ _) -> return ())
readProcessNull Nothing menv' "ghc-pkg"
[ "register"
, "--force"
, libpath
]
liftIO $ forM_ exes $ \exe -> do
D.createDirectoryIfMissing True bindir
let dst = bindir FP.</> FP.takeFileName exe
createLink exe dst `catchIO` \_ -> D.copyFile exe dst
case (mlib, exes) of
(Nothing, _:_) -> markExeInstalled (taskLocation task) taskProvides
_ -> return ()
-- Find the package in the database
wc <- getWhichCompiler
let pkgDbs = [bcoSnapDB eeBaseConfigOpts]
case mlib of
Nothing -> return $ Just $ Executable taskProvides
Just _ -> do
mpkgid <- loadInstalledPkg eeEnvOverride wc pkgDbs eeSnapshotDumpPkgs pname
return $ Just $
case mpkgid of
Nothing -> assert False $ Executable taskProvides
Just pkgid -> Library taskProvides pkgid
where
bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts </> bindirSuffix
realConfigAndBuild cache allDepsMap = withSingleContext runInBase ac ee task (Just allDepsMap) Nothing
$ \package cabalfp pkgDir cabal announce console _mlogFile -> do
_neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> annSuffix)) cabal cabalfp
if boptsOnlyConfigure eeBuildOpts
then return Nothing
else liftM Just $ realBuild cache package pkgDir cabal announce console
realBuild cache package pkgDir cabal announce console = do
wc <- getWhichCompiler
markExeNotInstalled (taskLocation task) taskProvides
case taskType of
TTLocal lp -> do
when enableTests $ unsetTestSuccess pkgDir
writeBuildCache pkgDir $ lpNewBuildCache lp
TTUpstream _ _ -> return ()
() <- announce ("build" <> annSuffix)
config <- asks getConfig
extraOpts <- extraBuildOptions eeBuildOpts
preBuildTime <- modTime <$> liftIO getCurrentTime
cabal (console && configHideTHLoading config) $ ("build" :) $ (++ extraOpts) $
case (taskType, taskAllInOne, isFinalBuild) of
(_, True, True) -> fail "Invariant violated: cannot have an all-in-one build that also has a final build step."
(TTLocal lp, False, False) -> primaryComponentOptions lp
(TTLocal lp, False, True) -> finalComponentOptions lp
(TTLocal lp, True, False) -> primaryComponentOptions lp ++ finalComponentOptions lp
(TTUpstream{}, _, _) -> []
checkForUnlistedFiles taskType preBuildTime pkgDir
when (doHaddock package) $ do
announce "haddock"
sourceFlag <- do
hyped <- tryProcessStdout Nothing eeEnvOverride "haddock" ["--hyperlinked-source"]
case hyped of
-- Fancy crosslinked source
Right _ -> do
return ["--haddock-option=--hyperlinked-source"]
-- Older hscolour colouring
Left _ -> do
hscolourExists <- doesExecutableExist eeEnvOverride "HsColour"
unless hscolourExists $ $logWarn
("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <>
"found on PATH (use 'stack install hscolour' to install).")
return ["--hyperlink-source" | hscolourExists]
cabal False (concat [["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"]
,sourceFlag])
unless isFinalBuild $ withMVar eeInstallLock $ \() -> do
announce "copy/register"
cabal False ["copy"]
when (packageHasLibrary package) $ cabal False ["register"]
let (installedPkgDb, installedDumpPkgsTVar) =
case taskLocation task of
Snap ->
( bcoSnapDB eeBaseConfigOpts
, eeSnapshotDumpPkgs )
Local ->
( bcoLocalDB eeBaseConfigOpts
, eeLocalDumpPkgs )
let ident = PackageIdentifier (packageName package) (packageVersion package)
mpkgid <- if packageHasLibrary package
then do
mpkgid <- loadInstalledPkg eeEnvOverride wc [installedPkgDb] installedDumpPkgsTVar (packageName package)
case mpkgid of
Nothing -> throwM $ Couldn'tFindPkgId $ packageName package
Just pkgid -> return $ Library ident pkgid
else do
markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache?
return $ Executable ident
case taskLocation task of
Snap -> writePrecompiledCache eeBaseConfigOpts taskProvides
(configCacheOpts cache)
(configCacheDeps cache)
mpkgid (packageExes package)
Local -> return ()
return mpkgid
loadInstalledPkg menv wc pkgDbs tvar name = do
dps <- ghcPkgDescribe name menv wc pkgDbs $ conduitDumpPackage =$ CL.consume
case dps of
[] -> return Nothing
[dp] -> do
liftIO $ atomically $ modifyTVar' tvar (Map.insert (dpGhcPkgId dp) dp)
return $ Just (dpGhcPkgId dp)
_ -> error "singleBuild: invariant violated: multiple results when describing installed package"
-- | Check if any unlisted files have been found, and add them to the build cache.
checkForUnlistedFiles :: M env m => TaskType -> ModTime -> Path Abs Dir -> m ()
checkForUnlistedFiles (TTLocal lp) preBuildTime pkgDir = do
(addBuildCache,warnings) <-
addUnlistedToBuildCache
preBuildTime
(lpPackage lp)
(lpCabalFile lp)
(lpNewBuildCache lp)
mapM_ ($logWarn . ("Warning: " <>) . T.pack . show) warnings
unless (null addBuildCache) $
writeBuildCache pkgDir $
Map.unions (lpNewBuildCache lp : addBuildCache)
checkForUnlistedFiles (TTUpstream _ _) _ _ = return ()
-- | Determine if all of the dependencies given are installed
depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool
depsPresent installedMap deps = all
(\(name, range) ->
case Map.lookup name installedMap of
Just (_, installed) -> installedVersion installed `withinRange` range
Nothing -> False)
(Map.toList deps)
singleTest :: M env m
=> (m () -> IO ())
-> TestOpts
-> [Text]
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleTest runInBase topts testsToRun ac ee task installedMap = do
-- FIXME: Since this doesn't use cabal, we should be able to avoid using a
-- fullblown 'withSingleContext'.
(allDepsMap, _cache) <- getConfigCache ee task installedMap True False
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "test") $ \package _cabalfp pkgDir _cabal announce _console mlogFile -> do
config <- asks getConfig
let needHpc = toCoverage topts
toRun <-
if toDisableRun topts
then do
announce "Test running disabled by --no-run-tests flag."
return False
else if toRerunTests topts
then return True
else do
success <- checkTestSuccess pkgDir
if success
then do
unless (null testsToRun) $ announce "skipping already passed test"
return False
else return True
when toRun $ do
bconfig <- asks getBuildConfig
buildDir <- distDirFromDir pkgDir
hpcDir <- hpcDirFromDir pkgDir
when needHpc (createTree hpcDir)
let exeExtension =
case configPlatform $ getConfig bconfig of
Platform _ Windows -> ".exe"
_ -> ""
errs <- liftM Map.unions $ forM testsToRun $ \testName -> do
nameDir <- parseRelDir $ T.unpack testName
nameExe <- parseRelFile $ T.unpack testName ++ exeExtension
nameTix <- liftM (pkgDir </>) $ parseRelFile $ T.unpack testName ++ ".tix"
let exeName = buildDir </> $(mkRelDir "build") </> nameDir </> nameExe
exists <- fileExists exeName
menv <- liftIO $ configEnvOverride config EnvSettings
{ esIncludeLocals = taskLocation task == Local
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
}
if exists
then do
-- We clear out the .tix files before doing a run.
when needHpc $ do
tixexists <- fileExists nameTix
when tixexists $
$logWarn ("Removing HPC file " <> T.pack (toFilePath nameTix))
removeFileIfExists nameTix
let args = toAdditionalArgs topts
argsDisplay = case args of
[] -> ""
_ -> ", args: " <> T.intercalate " " (map showProcessArgDebug args)
announce $ "test (suite: " <> testName <> argsDisplay <> ")"
let cp = (proc (toFilePath exeName) args)
{ cwd = Just $ toFilePath pkgDir
, Process.env = envHelper menv
, std_in = CreatePipe
, std_out =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
, std_err =
case mlogFile of
Nothing -> Inherit
Just (_, h) -> UseHandle h
}
-- Use createProcess_ to avoid the log file being closed afterwards
(Just inH, Nothing, Nothing, ph) <- liftIO $ createProcess_ "singleBuild.runTests" cp
liftIO $ hClose inH
ec <- liftIO $ waitForProcess ph
-- Move the .tix file out of the package
-- directory into the hpc work dir, for
-- tidiness.
when needHpc $
updateTixFile (packageName package) nameTix
return $ case ec of
ExitSuccess -> Map.empty
_ -> Map.singleton testName $ Just ec
else do
$logError $ T.concat
[ "Test suite "
, testName
, " executable not found for "
, packageNameText $ packageName package
]
return $ Map.singleton testName Nothing
when needHpc $ generateHpcReport pkgDir package testsToRun
bs <- liftIO $
case mlogFile of
Nothing -> return ""
Just (logFile, h) -> do
hClose h
S.readFile $ toFilePath logFile
unless (Map.null errs) $ throwM $ TestSuiteFailure
(taskProvides task)
errs
(fmap fst mlogFile)
bs
singleBench :: M env m
=> (m () -> IO ())
-> BenchmarkOpts
-> ActionContext
-> ExecuteEnv
-> Task
-> InstalledMap
-> m ()
singleBench runInBase beopts ac ee task installedMap = do
-- FIXME: Since this doesn't use cabal, we should be able to avoid using a
-- fullblown 'withSingleContext'.
(allDepsMap, _cache) <- getConfigCache ee task installedMap False True
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "bench") $ \_package _cabalfp _pkgDir cabal announce _console _mlogFile -> do
let args = maybe []
((:[]) . ("--benchmark-options=" <>))
(beoAdditionalArgs beopts)
toRun <-
if beoDisableRun beopts
then do
announce "Benchmark running disabled by --no-run-benchmarks flag."
return False
else do
return True
when toRun $ do
announce "benchmarks"
cabal False ("bench" : args)
-- | Grab all output from the given @Handle@ and print it to stdout, stripping
-- Template Haskell "Loading package" lines. Does work in a separate thread.
printBuildOutput :: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Bool -- ^ exclude TH loading?
-> Bool -- ^ convert paths to absolute?
-> Path Abs Dir -- ^ package's root directory
-> LogLevel
-> Handle -> m ()
printBuildOutput excludeTHLoading makeAbsolute pkgDir level outH = void $
CB.sourceHandle outH
$$ CB.lines
=$ CL.map stripCarriageReturn
=$ CL.filter (not . isTHLoading)
=$ CL.mapM toAbsolutePath
=$ CL.mapM_ (monadLoggerLog $(TH.location >>= liftLoc) "" level)
where
-- | Is this line a Template Haskell "Loading package" line
-- ByteString
isTHLoading :: S8.ByteString -> Bool
isTHLoading _ | not excludeTHLoading = False
isTHLoading bs =
"Loading package " `S8.isPrefixOf` bs &&
("done." `S8.isSuffixOf` bs || "done.\r" `S8.isSuffixOf` bs)
-- | Convert GHC error lines with file paths to have absolute file paths
toAbsolutePath bs | not makeAbsolute = return bs
toAbsolutePath bs = do
let (x, y) = S.break (== _colon) bs
mabs <-
if isValidSuffix y
then do
efp <- liftIO $ tryIO $ resolveFile pkgDir (S8.unpack x)
case efp of
Left _ -> return Nothing
Right fp -> return $ Just $ S8.pack (toFilePath fp)
else return Nothing
case mabs of
Nothing -> return bs
Just fp -> return $ fp `S.append` y
-- | Match the line:column format at the end of lines
isValidSuffix bs0 = maybe False (const True) $ do
guard $ not $ S.null bs0
guard $ S.head bs0 == _colon
(_, bs1) <- S8.readInt $ S.drop 1 bs0
guard $ not $ S.null bs1
guard $ S.head bs1 == _colon
(_, bs2) <- S8.readInt $ S.drop 1 bs1
guard $ (bs2 == ":" || bs2 == ": Warning:")
-- | Strip @\r@ characters from the byte vector. Used because Windows.
stripCarriageReturn :: ByteString -> ByteString
stripCarriageReturn = S8.filter (not . (=='\r'))
-- | Find the Setup.hs or Setup.lhs in the given directory. If none exists,
-- throw an exception.
getSetupHs :: Path Abs Dir -- ^ project directory
-> IO (Path Abs File)
getSetupHs dir = do
exists1 <- fileExists fp1
if exists1
then return fp1
else do
exists2 <- fileExists fp2
if exists2
then return fp2
else throwM $ NoSetupHsFound dir
where
fp1 = dir </> $(mkRelFile "Setup.hs")
fp2 = dir </> $(mkRelFile "Setup.lhs")
-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled.
-- This helps running stack-compiled programs with dynamic interpreters like `hint`.
-- Cfr: https://github.com/commercialhaskell/stack/issues/997
extraBuildOptions :: M env m => BuildOpts -> m [String]
extraBuildOptions bopts = do
let ddumpOpts = " -ddump-hi -ddump-to-file"
case toCoverage (boptsTestOpts bopts) of
True -> do
hpcIndexDir <- toFilePath <$> hpcRelativeDir
return ["--ghc-options", "-hpcdir " ++ hpcIndexDir ++ ddumpOpts]
False -> return ["--ghc-options", ddumpOpts]
-- Library and executable build components.
primaryComponentOptions :: LocalPackage -> [String]
primaryComponentOptions lp = concat
[ ["lib:" ++ packageNameString (packageName (lpPackage lp))
-- TODO: get this information from target parsing instead,
-- which will allow users to turn off library building if
-- desired
| packageHasLibrary (lpPackage lp)]
, map (T.unpack . T.append "exe:") $ Set.toList $ exesToBuild lp
]
exesToBuild :: LocalPackage -> Set Text
exesToBuild lp = packageExes (lpPackage lp)
-- NOTE: Ideally we'd do something like the following code, allowing
-- the user to control which executables get built. However, due to
-- https://github.com/haskell/cabal/issues/2780 we must build all
-- exes...
--
-- if lpWanted lp
-- then exeComponents (lpComponents lp)
-- -- Build all executables in the event that no
-- -- specific list is provided (as happens with
-- -- extra-deps).
-- else packageExes (lpPackage lp)
-- Test-suite and benchmark build components.
finalComponentOptions :: LocalPackage -> [String]
finalComponentOptions lp =
map (T.unpack . decodeUtf8 . renderComponent) $
Set.toList $
Set.filter (\c -> isCTest c || isCBench c) (lpComponents lp)
taskComponents :: Task -> Set NamedComponent
taskComponents task =
case taskType task of
TTLocal lp -> lpComponents lp
TTUpstream{} -> Set.empty
-- | Take the given list of package dependencies and the contents of the global
-- package database, and construct a set of installed package IDs that:
--
-- * Excludes the Cabal library (it's added later)
--
-- * Includes all packages depended on by this package
--
-- * Includes all global packages, unless: (1) it's hidden, (2) it's shadowed
-- by a depended-on package, or (3) one of its dependencies is not met.
--
-- See:
--
-- * https://github.com/commercialhaskell/stack/issues/941
--
-- * https://github.com/commercialhaskell/stack/issues/944
--
-- * https://github.com/commercialhaskell/stack/issues/949
addGlobalPackages :: Map PackageIdentifier GhcPkgId -- ^ dependencies of the package
-> [DumpPackage () ()] -- ^ global packages
-> Set GhcPkgId
addGlobalPackages deps globals0 =
res
where
-- Initial set of packages: the installed IDs of all dependencies
res0 = Map.elems $ Map.filterWithKey (\ident _ -> not $ isCabal ident) deps
-- First check on globals: it's not shadowed by a dep, it's not Cabal, and
-- it's exposed
goodGlobal1 dp = not (isDep dp)
&& not (isCabal $ dpPackageIdent dp)
&& dpIsExposed dp
globals1 = filter goodGlobal1 globals0
-- Create a Map of unique package names in the global database
globals2 = Map.fromListWith chooseBest
$ map (packageIdentifierName . dpPackageIdent &&& id) globals1
-- Final result: add in globals that have their dependencies met
res = loop id (Map.elems globals2) $ Set.fromList res0
----------------------------------
-- Some auxiliary helper functions
----------------------------------
-- Is the given package identifier for any version of Cabal
isCabal (PackageIdentifier name _) = name == $(mkPackageName "Cabal")
-- Is the given package name provided by the package dependencies?
isDep dp = packageIdentifierName (dpPackageIdent dp) `Set.member` depNames
depNames = Set.map packageIdentifierName $ Map.keysSet deps
-- Choose the best of two competing global packages (the newest version)
chooseBest dp1 dp2
| getVer dp1 < getVer dp2 = dp2
| otherwise = dp1
where
getVer = packageIdentifierVersion . dpPackageIdent
-- Are all dependencies of the given package met by the given Set of
-- installed packages
depsMet dp gids = all (`Set.member` gids) (dpDepends dp)
-- Find all globals that have all of their dependencies met
loop front (dp:dps) gids
-- This package has its deps met. Add it to the list of dependencies
-- and then traverse the list from the beginning (this package may have
-- been a dependency of an earlier one).
| depsMet dp gids = loop id (front dps) (Set.insert (dpGhcPkgId dp) gids)
-- Deps are not met, keep going
| otherwise = loop (front . (dp:)) dps gids
-- None of the packages we checked can be added, therefore drop them all
-- and return our results
loop _ [] gids = gids
| vigoo/stack | src/Stack/Build/Execute.hs | bsd-3-clause | 65,581 | 0 | 32 | 25,458 | 13,909 | 6,959 | 6,950 | -1 | -1 |
{-# LANGUAGE NoMonoLocalBinds #-}
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Taffybar.Information.MPRIS2
-- Copyright : (c) Ivan A. Malison
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Ivan A. Malison
-- Stability : unstable
-- Portability : unportable
--
-----------------------------------------------------------------------------
module System.Taffybar.Information.MPRIS2 where
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Except
import Control.Monad.Trans.Maybe
import qualified DBus
import qualified DBus.Client as DBus
import qualified DBus.Internal.Types as DBus
import qualified DBus.TH as DBus
import Data.Coerce
import Data.List
import qualified Data.Map as M
import Data.Maybe
import System.Log.Logger
import System.Taffybar.DBus.Client.MPRIS2
import Text.Printf
data NowPlaying = NowPlaying
{ npTitle :: String
, npArtists :: [String]
, npStatus :: String
, npBusName :: DBus.BusName
} deriving (Show, Eq)
eitherToMaybeWithLog :: (MonadIO m, Show a1) => Either a1 a2 -> m (Maybe a2)
eitherToMaybeWithLog (Right v) = return $ Just v
eitherToMaybeWithLog (Left e) = liftIO $ do
logM "System.Taffybar.Information.MPRIS2" WARNING $
printf "Got error: %s" $ show e
return Nothing
getNowPlayingInfo :: MonadIO m => DBus.Client -> m [NowPlaying]
getNowPlayingInfo client =
fmap (fromMaybe []) $ eitherToMaybeWithLog =<< liftIO (runExceptT $ do
allBusNames <- ExceptT $ DBus.listNames client
let mediaPlayerBusNames =
filter (isPrefixOf "org.mpris.MediaPlayer2.") allBusNames
getSongData _busName = runMaybeT $
do
let busName = coerce _busName
metadataMap <-
MaybeT $ getMetadata client busName >>= eitherToMaybeWithLog
(title, artists) <- MaybeT $ return $ getSongInfo metadataMap
status <- MaybeT $ getPlaybackStatus client busName >>=
eitherToMaybeWithLog
return NowPlaying { npTitle = title
, npArtists = artists
, npStatus = status
, npBusName = busName
}
lift $ catMaybes <$> mapM getSongData mediaPlayerBusNames)
getSongInfo :: M.Map String DBus.Variant -> Maybe (String, [String])
getSongInfo songData = do
let lookupVariant k = M.lookup k songData >>= DBus.fromVariant
artists <- lookupVariant "xesam:artist" <|> pure []
title <- lookupVariant "xesam:title"
return (title, artists)
| teleshoes/taffybar | src/System/Taffybar/Information/MPRIS2.hs | bsd-3-clause | 2,799 | 0 | 20 | 720 | 616 | 333 | 283 | 56 | 1 |
module Day15 where
import Prelude hiding (length, map, reverse, take)
import Data.Bool
import Data.Foldable (toList)
import Data.Sequence
maxLength = 35651584
showSeq :: Seq Bool -> String
showSeq = toList . fmap (bool '0' '1')
readSeq :: String -> Seq Bool
readSeq = fmap (== '1') . fromList
generateData :: Seq Bool -> Seq Bool
generateData (maybeTake maxLength -> Just a) = a
generateData a = generateData $ a >< singleton False >< (not <$> reverse a)
calculateChecksum :: Seq Bool -> Seq Bool
calculateChecksum (maybeOdd -> Just checksum) = checksum
calculateChecksum a = calculateChecksum $ booleanXor <$> chunks
where
chunks = chunksOf 2 a
booleanXor :: Seq Bool -> Bool
booleanXor (viewl -> a :< (viewl -> b :< empty)) = a == b
maybeTake :: Int -> Seq a -> Maybe (Seq a)
maybeTake n a = if length taken == n then Just taken else Nothing
where taken = take n a
maybeOdd :: Seq a -> Maybe (Seq a)
maybeOdd a = if mod (length a) 2 /= 0 then Just a else Nothing
run = print $ showSeq . calculateChecksum . generateData $ readSeq "10011111011011001"
| ulyssesp/AoC | src/day15.hs | bsd-3-clause | 1,108 | 0 | 11 | 245 | 427 | 219 | 208 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module AWS.RDS.Types.DBSecurityGroup
( DBSecurityGroup(..)
, EC2SecurityGroup(..)
, EC2SecurityGroupStatus(..)
, IPRange(..)
, IPRangeStatus(..)
) where
import AWS.Lib.FromText (deriveFromText, AddrRange, IPv4, Text)
data DBSecurityGroup = DBSecurityGroup
{ dbSecurityGroupEC2SecurityGroups :: [EC2SecurityGroup]
, dbSecurityGroupDescription :: Text
, dbSecurityGroupIPRanges :: [IPRange]
, dbSecurityGroupVpcId :: Maybe Text
, dbSecurityGroupOwnerId :: Text
, dbSecurityGroupName :: Text
}
deriving (Show, Eq)
data EC2SecurityGroup = EC2SecurityGroup
{ ec2SecurityGroupStatus :: EC2SecurityGroupStatus
, ec2SecurityGroupOwnerId :: Maybe Text
, ec2SecurityGroupName :: Text
, ec2SecurityGroupId :: Maybe Text
}
deriving (Show, Eq)
data EC2SecurityGroupStatus
= EC2SecurityGroupStatusAuthorizing
| EC2SecurityGroupStatusAuthorized
| EC2SecurityGroupStatusRevoking
| EC2SecurityGroupStatusRevoked
deriving (Show, Read, Eq)
data IPRange = IPRange
{ ipRangeCidrIp :: AddrRange IPv4
, ipRangeStatus :: IPRangeStatus
}
deriving (Show, Eq)
data IPRangeStatus
= IPRangeStatusAuthorizing
| IPRangeStatusAuthorized
| IPRangeStatusRevoking
| IPRangeStatusRevoked
deriving (Show, Read, Eq)
deriveFromText "EC2SecurityGroupStatus"
["authorizing", "authorized", "revoking", "revoked"]
deriveFromText "IPRangeStatus"
["authorizing", "authorized", "revoking", "revoked"]
| IanConnolly/aws-sdk-fork | AWS/RDS/Types/DBSecurityGroup.hs | bsd-3-clause | 1,530 | 0 | 9 | 282 | 318 | 193 | 125 | 42 | 0 |
{-# LANGUAGE TemplateHaskell #-}
-- | <http://strava.github.io/api/v3/segments/>
module Strive.Types.Segments
( SegmentDetailed (..)
, SegmentSummary (..)
, SegmentLeaderboardResponse (..)
, SegmentLeaderboardEntry (..)
, SegmentExplorerResponse (..)
, SegmentExplorerEntry (..)
) where
import Data.Aeson.TH (deriveFromJSON)
import Data.Text (Text)
import Data.Time.Clock (UTCTime)
import Strive.Enums (ActivityType, Gender, ResourceState)
import Strive.Internal.TH (options)
import Strive.Types.Polylines (Polyline, PolylineDetailed)
-- | <http://strava.github.io/api/v3/segments/#detailed>
data SegmentDetailed = SegmentDetailed
{ segmentDetailed_activityType :: ActivityType
, segmentDetailed_athleteCount :: Integer
, segmentDetailed_averageGrade :: Double
, segmentDetailed_city :: Text
, segmentDetailed_climbCategory :: Integer
, segmentDetailed_country :: Text
, segmentDetailed_createdAt :: UTCTime
, segmentDetailed_distance :: Double
, segmentDetailed_effortCount :: Integer
, segmentDetailed_elevationHigh :: Double
, segmentDetailed_elevationLow :: Double
, segmentDetailed_endLatitude :: Double
, segmentDetailed_endLatlng :: (Double, Double)
, segmentDetailed_endLongitude :: Double
, segmentDetailed_hazardous :: Bool
, segmentDetailed_id :: Integer
, segmentDetailed_map :: PolylineDetailed
, segmentDetailed_maximumGrade :: Double
, segmentDetailed_name :: Text
, segmentDetailed_private :: Bool
, segmentDetailed_resourceState :: ResourceState
, segmentDetailed_starCount :: Integer
, segmentDetailed_starred :: Bool
, segmentDetailed_startLatitude :: Double
, segmentDetailed_startLatlng :: (Double, Double)
, segmentDetailed_startLongitude :: Double
, segmentDetailed_state :: Text
, segmentDetailed_totalElevationGain :: Double
, segmentDetailed_updatedAt :: UTCTime
} deriving Show
$(deriveFromJSON options ''SegmentDetailed)
-- | <http://strava.github.io/api/v3/segments/#summary>
data SegmentSummary = SegmentSummary
{ segmentSummary_activityType :: ActivityType
, segmentSummary_averageGrade :: Double
, segmentSummary_city :: Text
, segmentSummary_climbCategory :: Integer
, segmentSummary_country :: Text
, segmentSummary_distance :: Double
, segmentSummary_elevationHigh :: Double
, segmentSummary_elevationLow :: Double
, segmentSummary_endLatitude :: Double
, segmentSummary_endLatlng :: (Double, Double)
, segmentSummary_endLongitude :: Double
, segmentSummary_id :: Integer
, segmentSummary_maximumGrade :: Double
, segmentSummary_name :: Text
, segmentSummary_private :: Bool
, segmentSummary_resourceState :: ResourceState
, segmentSummary_starred :: Bool
, segmentSummary_startLatitude :: Double
, segmentSummary_startLatlng :: (Double, Double)
, segmentSummary_startLongitude :: Double
, segmentSummary_state :: Text
} deriving Show
$(deriveFromJSON options ''SegmentSummary)
-- | <http://strava.github.io/api/v3/segments/#leaderboard>
data SegmentLeaderboardEntry = SegmentLeaderboardEntry
{ segmentLeaderboardEntry_activityId :: Integer
, segmentLeaderboardEntry_athleteGender :: Maybe Gender
, segmentLeaderboardEntry_athleteId :: Integer
, segmentLeaderboardEntry_athleteName :: Text
, segmentLeaderboardEntry_athleteProfile :: Text
, segmentLeaderboardEntry_averageHr :: Double
, segmentLeaderboardEntry_averageWatts :: Double
, segmentLeaderboardEntry_distance :: Double
, segmentLeaderboardEntry_effortId :: Integer
, segmentLeaderboardEntry_elapsedTime :: Integer
, segmentLeaderboardEntry_movingTime :: Integer
, segmentLeaderboardEntry_rank :: Integer
, segmentLeaderboardEntry_startDate :: UTCTime
, segmentLeaderboardEntry_startDateLocal :: UTCTime
} deriving Show
$(deriveFromJSON options ''SegmentLeaderboardEntry)
-- | <http://strava.github.io/api/v3/segments/#leaderboard>
data SegmentLeaderboardResponse = SegmentLeaderboardResponse
{ segmentLeaderboardResponse_effortCount :: Integer
, segmentLeaderboardResponse_entryCount :: Integer
, segmentLeaderboardResponse_entries :: [SegmentLeaderboardEntry]
} deriving Show
$(deriveFromJSON options ''SegmentLeaderboardResponse)
-- | <http://strava.github.io/api/v3/segments/#explore>
data SegmentExplorerEntry = SegmentExplorerEntry
{ segmentExplorerEntry_avgGrade :: Double
, segmentExplorerEntry_climbCategory :: Integer
, segmentExplorerEntry_climbCategoryDesc :: String
, segmentExplorerEntry_distance :: Double
, segmentExplorerEntry_elevDifference :: Double
, segmentExplorerEntry_endLatlng :: (Double, Double)
, segmentExplorerEntry_id :: Integer
, segmentExplorerEntry_name :: Text
, segmentExplorerEntry_points :: Polyline
, segmentExplorerEntry_resourceState :: ResourceState
, segmentExplorerEntry_starred :: Bool
, segmentExplorerEntry_startLatlng :: (Double, Double)
} deriving Show
$(deriveFromJSON options ''SegmentExplorerEntry)
-- | <http://strava.github.io/api/v3/segments/#explore>
data SegmentExplorerResponse = SegmentExplorerResponse
{ segmentExplorerResponse_segments :: [SegmentExplorerEntry]
} deriving Show
$(deriveFromJSON options ''SegmentExplorerResponse)
| liskin/strive | library/Strive/Types/Segments.hs | mit | 5,669 | 0 | 9 | 1,140 | 813 | 517 | 296 | 112 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ECS.StartTask
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Starts a new task from the specified task definition on the specified
-- container instance or instances. If you want to use the default Amazon ECS
-- scheduler to place your task, use 'RunTask' instead.
--
-- The list of container instances to start tasks on is limited to 10.
--
--
--
-- <http://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_StartTask.html>
module Network.AWS.ECS.StartTask
(
-- * Request
StartTask
-- ** Request constructor
, startTask
-- ** Request lenses
, st1Cluster
, st1ContainerInstances
, st1Overrides
, st1StartedBy
, st1TaskDefinition
-- * Response
, StartTaskResponse
-- ** Response constructor
, startTaskResponse
-- ** Response lenses
, strFailures
, strTasks
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.ECS.Types
import qualified GHC.Exts
data StartTask = StartTask
{ _st1Cluster :: Maybe Text
, _st1ContainerInstances :: List "containerInstances" Text
, _st1Overrides :: Maybe TaskOverride
, _st1StartedBy :: Maybe Text
, _st1TaskDefinition :: Text
} deriving (Eq, Read, Show)
-- | 'StartTask' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'st1Cluster' @::@ 'Maybe' 'Text'
--
-- * 'st1ContainerInstances' @::@ ['Text']
--
-- * 'st1Overrides' @::@ 'Maybe' 'TaskOverride'
--
-- * 'st1StartedBy' @::@ 'Maybe' 'Text'
--
-- * 'st1TaskDefinition' @::@ 'Text'
--
startTask :: Text -- ^ 'st1TaskDefinition'
-> StartTask
startTask p1 = StartTask
{ _st1TaskDefinition = p1
, _st1Cluster = Nothing
, _st1Overrides = Nothing
, _st1ContainerInstances = mempty
, _st1StartedBy = Nothing
}
-- | The short name or full Amazon Resource Name (ARN) of the cluster that you
-- want to start your task on. If you do not specify a cluster, the default
-- cluster is assumed..
st1Cluster :: Lens' StartTask (Maybe Text)
st1Cluster = lens _st1Cluster (\s a -> s { _st1Cluster = a })
-- | The container instance UUIDs or full Amazon Resource Name (ARN) entries for
-- the container instances on which you would like to place your task.
--
-- The list of container instances to start tasks on is limited to 10.
--
--
st1ContainerInstances :: Lens' StartTask [Text]
st1ContainerInstances =
lens _st1ContainerInstances (\s a -> s { _st1ContainerInstances = a })
. _List
-- | A list of container overrides in JSON format that specify the name of a
-- container in the specified task definition and the command it should run
-- instead of its default. A total of 8192 characters are allowed for overrides.
-- This limit includes the JSON formatting characters of the override structure.
st1Overrides :: Lens' StartTask (Maybe TaskOverride)
st1Overrides = lens _st1Overrides (\s a -> s { _st1Overrides = a })
-- | An optional tag specified when a task is started. For example if you
-- automatically trigger a task to run a batch process job, you could apply a
-- unique identifier for that job to your task with the 'startedBy' parameter. You
-- can then identify which tasks belong to that job by filtering the results of
-- a 'ListTasks' call with the 'startedBy' value.
--
-- If a task is started by an Amazon ECS service, then the 'startedBy' parameter
-- contains the deployment ID of the service that starts it.
st1StartedBy :: Lens' StartTask (Maybe Text)
st1StartedBy = lens _st1StartedBy (\s a -> s { _st1StartedBy = a })
-- | The 'family' and 'revision' ('family:revision') or full Amazon Resource Name (ARN)
-- of the task definition that you want to start.
st1TaskDefinition :: Lens' StartTask Text
st1TaskDefinition =
lens _st1TaskDefinition (\s a -> s { _st1TaskDefinition = a })
data StartTaskResponse = StartTaskResponse
{ _strFailures :: List "failures" Failure
, _strTasks :: List "tasks" Task
} deriving (Eq, Read, Show)
-- | 'StartTaskResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'strFailures' @::@ ['Failure']
--
-- * 'strTasks' @::@ ['Task']
--
startTaskResponse :: StartTaskResponse
startTaskResponse = StartTaskResponse
{ _strTasks = mempty
, _strFailures = mempty
}
-- | Any failed tasks from your 'StartTask' action are listed here.
strFailures :: Lens' StartTaskResponse [Failure]
strFailures = lens _strFailures (\s a -> s { _strFailures = a }) . _List
-- | A full description of the tasks that were started. Each task that was
-- successfully placed on your container instances will be described here.
strTasks :: Lens' StartTaskResponse [Task]
strTasks = lens _strTasks (\s a -> s { _strTasks = a }) . _List
instance ToPath StartTask where
toPath = const "/"
instance ToQuery StartTask where
toQuery = const mempty
instance ToHeaders StartTask
instance ToJSON StartTask where
toJSON StartTask{..} = object
[ "cluster" .= _st1Cluster
, "taskDefinition" .= _st1TaskDefinition
, "overrides" .= _st1Overrides
, "containerInstances" .= _st1ContainerInstances
, "startedBy" .= _st1StartedBy
]
instance AWSRequest StartTask where
type Sv StartTask = ECS
type Rs StartTask = StartTaskResponse
request = post "StartTask"
response = jsonResponse
instance FromJSON StartTaskResponse where
parseJSON = withObject "StartTaskResponse" $ \o -> StartTaskResponse
<$> o .:? "failures" .!= mempty
<*> o .:? "tasks" .!= mempty
| kim/amazonka | amazonka-ecs/gen/Network/AWS/ECS/StartTask.hs | mpl-2.0 | 6,639 | 0 | 13 | 1,495 | 844 | 512 | 332 | 89 | 1 |
module Main where
import Control.Exception
import Control.Monad
import System.Mem
import Control.Monad.ST
import Data.Array
import Data.Array.ST
import qualified Data.Array.Unboxed as U
import Control.DeepSeq
import Data.Compact
import Data.Compact.Internal
assertFail :: String -> IO ()
assertFail msg = throwIO $ AssertionFailed msg
assertEquals :: (Eq a, Show a) => a -> a -> IO ()
assertEquals expected actual =
if expected == actual then return ()
else assertFail $ "expected " ++ (show expected)
++ ", got " ++ (show actual)
arrTest :: (Monad m, MArray a e m, Num e) => m (a Int e)
arrTest = do
arr <- newArray (1, 10) 0
forM_ [1..10] $ \j -> do
writeArray arr j (fromIntegral $ 2*j + 1)
return arr
instance NFData (U.UArray i e) where
rnf x = seq x ()
-- test :: (Word -> a -> IO (Maybe (Compact a))) -> IO ()
test func = do
let fromList :: Array Int Int
fromList = listArray (1, 10) [1..]
frozen :: Array Int Int
frozen = runST $ do
arr <- arrTest :: ST s (STArray s Int Int)
freeze arr
stFrozen :: Array Int Int
stFrozen = runSTArray arrTest
unboxedFrozen :: U.UArray Int Int
unboxedFrozen = runSTUArray arrTest
let val = (fromList, frozen, stFrozen, unboxedFrozen)
str <- func val
-- check that val is still good
assertEquals (fromList, frozen, stFrozen, unboxedFrozen) val
-- check the value in the compact
assertEquals val (getCompact str)
performMajorGC
-- check again the value in the compact
assertEquals val (getCompact str)
main = do
test (compactSized 4096 True)
test (compactSized 4096 False)
| olsner/ghc | libraries/compact/tests/compact_simple_array.hs | bsd-3-clause | 1,628 | 0 | 16 | 375 | 574 | 295 | 279 | 46 | 2 |
module Tandoori.Typing.InstanceDecl where
import Tandoori.Typing
import Tandoori.Typing.Monad
import Tandoori.GHC.Internals
import Tandoori.Typing.Repr
import Tandoori.Typing.Error
instDecl :: InstDecl Name -> Typing ((Cls, TyCon), PolyTy)
instDecl (InstDecl lty binds lsigs _) = withLSrc lty $ do
(cls, σ) <- case unLoc lty of
HsForAllTy e bndr ctx (L _ (HsPredTy (HsClassP cls [lty]))) -> do σ <- fromHsType (HsForAllTy e bndr ctx lty)
return (cls, σ)
HsPredTy (HsClassP cls [lty]) -> do σ <- fromHsType $ unLoc lty
return (cls, σ)
_ -> raiseError InvalidInstance
let PolyTy _ τ = σ
case tyCon τ of
Nothing -> raiseError InvalidInstance
Just κ -> return ((cls, κ), σ)
| bitemyapp/tandoori | src/Tandoori/Typing/InstanceDecl.hs | bsd-3-clause | 996 | 0 | 19 | 418 | 297 | 151 | 146 | 18 | 4 |
module Scion.Types.Note
( -- * Locations
Location, LocSource(..), mkLocation, mkNoLoc,
locSource, isValidLoc, noLocText, viewLoc,
locStartCol, locEndCol, locStartLine, locEndLine,
-- ** Absolute FilePaths
AbsFilePath(toFilePath), mkAbsFilePath,
-- * Notes
Note(..), NoteKind(..), Notes, hasErrors
-- ** Converting from GHC Notes
)
where
import Control.Applicative
import Data.Binary
import qualified Data.MultiSet as MS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import System.FilePath
-- | A note from the compiler or some other tool.
data Note = Note
{ noteKind :: NoteKind
, noteLoc :: Location
, noteMessage :: T.Text
} deriving (Eq, Ord, Show)
instance Binary Note where
put (Note knd loc msg) = put knd >> put loc >> put (T.encodeUtf8 msg)
get = Note <$> get <*> get <*> (T.decodeUtf8 <$> get)
-- | Classifies the kind (or severity) of a note.
data NoteKind
= ErrorNote
| WarningNote
| InfoNote
| OtherNote
deriving (Eq, Ord, Show, Enum)
instance Binary NoteKind where
put nk = putWord8 (fromIntegral (fromEnum nk))
get = toEnum . fromIntegral <$> getWord8
type Notes = MS.MultiSet Note
hasErrors :: Notes -> Bool
hasErrors notes =
not $ null [ () | Note{ noteKind = ErrorNote } <- MS.toList notes ]
-- | Represents a 'FilePath' which we know is absolute.
--
-- Since relative 'FilePath's depend on the a current working directory we
-- normalise all paths to absolute paths. Use 'mkAbsFilePath' to create
-- absolute file paths.
newtype AbsFilePath = AFP { toFilePath :: FilePath } deriving (Eq, Ord)
instance Binary AbsFilePath where
put (AFP fp) = put fp
get = AFP <$> get
instance Show AbsFilePath where show (AFP s) = show s
-- | Create an absolute file path given a base directory.
--
-- Throws an error if the first argument is not an absolute path.
mkAbsFilePath :: FilePath -- ^ base directory (must be absolute)
-> FilePath -- ^ absolute or relative
-> AbsFilePath
mkAbsFilePath baseDir dir
| isAbsolute baseDir = AFP $ normalise $ baseDir </> dir
| otherwise =
error "mkAbsFilePath: first argument must be an absolute path"
-- | Scion's type for source code locations (regions).
--
-- We use a custom location type for two reasons:
--
-- 1. We enforce the invariant that the file path of the location is an
-- absolute path.
--
-- 2. Independent evolution from the GHC API.
--
-- To save space, the 'Location' type is kept abstract and uses special
-- cases for notes that span only one line or are only one character wide.
-- Use 'mkLocation' and 'viewLoc' as well as the respective accessor
-- functions to construct and destruct nodes.
--
-- If no reasonable location info can be given, use the 'mkNoLoc'
-- function, but be careful not to call 'viewLoc' or any other
-- accessor function on such a 'Location'.
--
data Location
= LocOneLine {
locSource :: LocSource,
locLine :: {-# UNPACK #-} !Int,
locSCol :: {-# UNPACK #-} !Int,
locECol :: {-# UNPACK #-} !Int
}
| LocMultiLine {
locSource :: LocSource,
locSLine :: {-# UNPACK #-} !Int,
locELine :: {-# UNPACK #-} !Int,
locSCol :: {-# UNPACK #-} !Int,
locECol :: {-# UNPACK #-} !Int
}
| LocPoint {
locSource :: LocSource,
locLine :: {-# UNPACK #-} !Int,
locCol :: {-# UNPACK #-} !Int
}
| LocNone { noLocText :: String }
deriving (Eq, Show)
instance Binary Location where
put (LocNone msg) = putWord8 1 >> put msg
put loc | (src, l1, c1, l2, c2) <- viewLoc loc =
putWord8 2 >> put src >> put l1 >> put c1 >> put l2 >> put c2
get = do
tag <- getWord8
case tag of
1 -> LocNone <$> get
2 -> mkLocation <$> get <*> get <*> get <*> get <*> get
_ -> fail "Binary Location get: tag error"
-- | The \"source\" of a location.
data LocSource
= FileSrc AbsFilePath
-- ^ The location refers to a position in a file.
| OtherSrc String
-- ^ The location refers to something else, e.g., the command line, or
-- stdin.
deriving (Eq, Ord, Show)
instance Binary LocSource where
put (FileSrc fp) = putWord8 1 >> put fp
put (OtherSrc s) = putWord8 2 >> put s
get = do tag <- getWord8
case tag of
1 -> FileSrc <$> get
2 -> OtherSrc <$> get
_ -> fail "Binary LocSource get: tag error"
instance Ord Location where compare = cmpLoc
-- | Construct a source code location from start and end point.
--
-- If the start point is after the end point, they are swapped
-- automatically.
mkLocation :: LocSource
-> Int -- ^ start line
-> Int -- ^ start column
-> Int -- ^ end line
-> Int -- ^ end column
-> Location
mkLocation file l0 c0 l1 c1
| l0 > l1 = mkLocation file l1 c0 l0 c1
| l0 == l1 && c0 > c1 = mkLocation file l0 c1 l1 c0
| l0 == l1 = if c0 == c1
then LocPoint file l0 c0
else LocOneLine file l0 c0 c1
| otherwise = LocMultiLine file l0 l1 c0 c1
-- | Construct a source location that does not specify a region. The
-- argument can be used to give some hint as to why there is no location
-- available. (E.g., \"File not found\").
mkNoLoc :: String -> Location
mkNoLoc msg = LocNone msg
-- | Test whether a location is valid, i.e., not constructed with 'mkNoLoc'.
isValidLoc :: Location -> Bool
isValidLoc (LocNone _) = False
isValidLoc _ = True
noLocError :: String -> a
noLocError f = error $ f ++ ": argument must not be a noLoc"
-- | Return the start column. Only defined on valid locations.
locStartCol :: Location -> Int
locStartCol l@LocPoint{} = locCol l
locStartCol LocNone{} = noLocError "locStartCol"
locStartCol l = locSCol l
-- | Return the end column. Only defined on valid locations.
locEndCol :: Location -> Int
locEndCol l@LocPoint{} = locCol l
locEndCol LocNone{} = noLocError "locEndCol"
locEndCol l = locECol l
-- | Return the start line. Only defined on valid locations.
locStartLine :: Location -> Int
locStartLine l@LocMultiLine{} = locSLine l
locStartLine LocNone{} = noLocError "locStartLine"
locStartLine l = locLine l
-- | Return the end line. Only defined on valid locations.
locEndLine :: Location -> Int
locEndLine l@LocMultiLine{} = locELine l
locEndLine LocNone{} = noLocError "locEndLine"
locEndLine l = locLine l
{-# INLINE viewLoc #-}
-- | View on a (valid) location.
--
-- It holds the property:
--
-- > prop_viewLoc_mkLoc s l0 c0 l1 c1 =
-- > viewLoc (mkLocation s l0 c0 l1 c1) == (s, l0, c0, l1, c1)
--
viewLoc :: Location
-> (LocSource, Int, Int, Int, Int)
-- ^ source, start line, start column, end line, end column.
viewLoc l = (locSource l, locStartLine l, locStartCol l,
locEndLine l, locEndCol l)
-- | Comparison function for two 'Location's.
cmpLoc :: Location -> Location -> Ordering
cmpLoc LocNone{} _ = LT
cmpLoc _ LocNone{} = GT
cmpLoc l1 l2 =
(f1 `compare` f2) `thenCmp`
(sl1 `compare` sl2) `thenCmp`
(sc1 `compare` sc2) `thenCmp`
(el1 `compare` el2) `thenCmp`
(ec1 `compare` ec2)
where
(f1, sl1, sc1, el1, ec1) = viewLoc l1
(f2, sl2, sc2, el2, ec2) = viewLoc l2
-- | Lexicographic composition two orderings. Compare using the first
-- ordering, use the second to break ties.
thenCmp :: Ordering -> Ordering -> Ordering
thenCmp EQ x = x
thenCmp x _ = x
{-# INLINE thenCmp #-}
| CristhianMotoche/scion | src/Scion/Types/Note.hs | bsd-3-clause | 7,480 | 0 | 15 | 1,816 | 1,742 | 960 | 782 | 145 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
-- http://tools.ietf.org/html/rfc2109
module Happstack.Server.Internal.Cookie
( Cookie(..)
, CookieLife(..)
, calcLife
, mkCookie
, mkCookieHeader
, getCookies
, getCookie
, getCookies'
, getCookie'
, parseCookies
, cookiesParser
)
where
import Control.Monad
import qualified Data.ByteString.Char8 as C
import Data.Char (chr, toLower)
import Data.Data (Data, Typeable)
import Data.List ((\\), intersperse)
import Data.Time.Clock (UTCTime, addUTCTime, diffUTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Happstack.Server.Internal.Clock (getApproximateUTCTime)
import Text.ParserCombinators.Parsec hiding (token)
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (formatTime, defaultTimeLocale)
#else
import Data.Time.Format (formatTime)
import System.Locale (defaultTimeLocale)
#endif
-- | a type for HTTP cookies. Usually created using 'mkCookie'.
data Cookie = Cookie
{ cookieVersion :: String
, cookiePath :: String
, cookieDomain :: String
, cookieName :: String
, cookieValue :: String
, secure :: Bool
, httpOnly :: Bool
} deriving(Show,Eq,Read,Typeable,Data)
-- | Specify the lifetime of a cookie.
--
-- Note that we always set the max-age and expires headers because
-- internet explorer does not honor max-age. You can specific 'MaxAge'
-- or 'Expires' and the other will be calculated for you. Choose which
-- ever one makes your life easiest.
--
data CookieLife
= Session -- ^ session cookie - expires when browser is closed
| MaxAge Int -- ^ life time of cookie in seconds
| Expires UTCTime -- ^ cookie expiration date
| Expired -- ^ cookie already expired
deriving (Eq, Ord, Read, Show, Typeable)
-- convert 'CookieLife' to the argument needed for calling 'mkCookieHeader'
calcLife :: CookieLife -> IO (Maybe (Int, UTCTime))
calcLife Session = return Nothing
calcLife (MaxAge s) =
do now <- getApproximateUTCTime
return (Just (s, addUTCTime (fromIntegral s) now))
calcLife (Expires expirationDate) =
do now <- getApproximateUTCTime
return $ Just (round $ expirationDate `diffUTCTime` now, expirationDate)
calcLife Expired =
return $ Just (0, posixSecondsToUTCTime 0)
-- | Creates a cookie with a default version of 1, empty domain, a
-- path of "/", secure == False and httpOnly == False
--
-- see also: 'addCookie'
mkCookie :: String -- ^ cookie name
-> String -- ^ cookie value
-> Cookie
mkCookie key val = Cookie "1" "/" "" key val False False
-- | Set a Cookie in the Result.
-- The values are escaped as per RFC 2109, but some browsers may
-- have buggy support for cookies containing e.g. @\'\"\'@ or @\' \'@.
--
-- Also, it seems that chrome, safari, and other webkit browsers do
-- not like cookies which have double quotes around the domain and
-- reject/ignore the cookie. So, we no longer quote the domain.
--
-- internet explorer does not honor the max-age directive so we set
-- both max-age and expires.
--
-- See 'CookieLife' and 'calcLife' for a convenient way of calculating
-- the first argument to this function.
mkCookieHeader :: Maybe (Int, UTCTime) -> Cookie -> String
mkCookieHeader mLife cookie =
let l = [("Domain=", cookieDomain cookie)
,("Max-Age=", maybe "" (show . max 0 . fst) mLife)
,("expires=", maybe "" (formatTime defaultTimeLocale "%a, %d-%b-%Y %X GMT" . snd) mLife)
,("Path=", cookiePath cookie)
,("Version=", s cookieVersion)]
s f | f cookie == "" = ""
s f = '\"' : concatMap e (f cookie) ++ "\""
e c | fctl c || c == '"' = ['\\',c]
| otherwise = [c]
in concat $ intersperse ";" ((cookieName cookie++"="++s cookieValue):[ (k++v) | (k,v) <- l, "" /= v ] ++
(if secure cookie then ["Secure"] else []) ++
(if httpOnly cookie then ["HttpOnly"] else []))
fctl :: Char -> Bool
fctl ch = ch == chr 127 || ch <= chr 31
-- | Not an supported api. Takes a cookie header and returns
-- either a String error message or an array of parsed cookies
parseCookies :: String -> Either String [Cookie]
parseCookies str = either (Left . show) Right $ parse cookiesParser str str
-- | not a supported api. A parser for RFC 2109 cookies
cookiesParser :: GenParser Char st [Cookie]
cookiesParser = cookies
where -- Parsers based on RFC 2109
cookies = do
ws
ver<-option "" $ try (cookie_version >>= (\x -> cookieSep >> return x))
cookieList<-(cookie_value ver) `sepBy1` try cookieSep
ws
eof
return cookieList
cookie_value ver = do
name<-name_parser
cookieEq
val<-value
path<-option "" $ try (cookieSep >> cookie_path)
domain<-option "" $ try (cookieSep >> cookie_domain)
return $ Cookie ver path domain (low name) val False False
cookie_version = cookie_special "$Version"
cookie_path = cookie_special "$Path"
cookie_domain = cookie_special "$Domain"
cookie_special s = do
void $ string s
cookieEq
value
cookieSep = ws >> oneOf ",;" >> ws
cookieEq = ws >> char '=' >> ws
ws = spaces
value = word
word = try quoted_string <|> try incomp_token <|> return ""
-- Parsers based on RFC 2068
quoted_string = do
void $ char '"'
r <-many ((try quotedPair) <|> (oneOf qdtext))
void $ char '"'
return r
-- Custom parsers, incompatible with RFC 2068, but more forgiving ;)
incomp_token = many1 $ oneOf ((chars \\ ctl) \\ " \t\";")
name_parser = many1 $ oneOf ((chars \\ ctl) \\ "= ;,")
-- Primitives from RFC 2068
ctl = map chr (127:[0..31])
chars = map chr [0..127]
octet = map chr [0..255]
text = octet \\ ctl
qdtext = text \\ "\""
quotedPair = char '\\' >> anyChar
-- | Get all cookies from the HTTP request. The cookies are ordered per RFC from
-- the most specific to the least specific. Multiple cookies with the same
-- name are allowed to exist.
getCookies :: Monad m => C.ByteString -> m [Cookie]
getCookies h = getCookies' h >>= either (fail. ("Cookie parsing failed!"++)) return
-- | Get the most specific cookie with the given name. Fails if there is no such
-- cookie or if the browser did not escape cookies in a proper fashion.
-- Browser support for escaping cookies properly is very diverse.
getCookie :: Monad m => String -> C.ByteString -> m Cookie
getCookie s h = getCookie' s h >>= either (const $ fail ("getCookie: " ++ show s)) return
getCookies' :: Monad m => C.ByteString -> m (Either String [Cookie])
getCookies' header | C.null header = return $ Right []
| otherwise = return $ parseCookies (C.unpack header)
getCookie' :: Monad m => String -> C.ByteString -> m (Either String Cookie)
getCookie' s h = do
cs <- getCookies' h
return $ do -- Either
cooks <- cs
case filter (\x->(==) (low s) (cookieName x) ) cooks of
[] -> fail "No cookie found"
f -> return $ head f
low :: String -> String
low = map toLower
| erantapaa/happstack-server | src/Happstack/Server/Internal/Cookie.hs | bsd-3-clause | 7,573 | 0 | 17 | 2,142 | 1,822 | 970 | 852 | 129 | 4 |
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
module Distribution.Server (
-- * Server control
Server(..),
ServerEnv(..),
initialise,
run,
shutdown,
checkpoint,
reloadDatafiles,
-- * Server configuration
ListenOn(..),
ServerConfig(..),
defaultServerConfig,
hasSavedState,
-- * Server state
serverState,
initState,
-- * Temporary server while loading data
setUpTemp,
tearDownTemp
) where
import Happstack.Server.SimpleHTTP
import Distribution.Server.Framework
import qualified Distribution.Server.Framework.BackupRestore as Import
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import qualified Distribution.Server.Framework.Auth as Auth
import Distribution.Server.Framework.Templating (TemplatesMode(..))
import Distribution.Server.Framework.AuthTypes (PasswdPlain(..))
import Distribution.Server.Framework.HtmlFormWrapper (htmlFormWrapperHack)
import Distribution.Server.Framework.Feature as Feature
import qualified Distribution.Server.Features as Features
import Distribution.Server.Features.Users
import qualified Distribution.Server.Users.Types as Users
import qualified Distribution.Server.Users.Users as Users
import qualified Distribution.Server.Users.Group as Group
import Distribution.Text
import Distribution.Verbosity as Verbosity
import System.Directory (createDirectoryIfMissing, doesDirectoryExist)
import Control.Concurrent
import Network.URI (URI(..), URIAuth(URIAuth), nullURI)
import Network.BSD (getHostName)
import Data.List (foldl', nubBy)
import Data.Int (Int64)
import Control.Arrow (second)
import Data.Function (on)
import qualified System.Log.Logger as HsLogger
import Control.Exception.Lifted as Lifted
import qualified Hackage.Security.Util.Path as Sec
import Paths_hackage_server (getDataDir)
data ListenOn = ListenOn {
loPortNum :: Int,
loIP :: String
} deriving (Show)
data ServerConfig = ServerConfig {
confVerbosity :: Verbosity,
confHostUri :: URI,
confListenOn :: ListenOn,
confStateDir :: FilePath,
confStaticDir :: FilePath,
confTmpDir :: FilePath,
confCacheDelay:: Int,
confLiveTemplates :: Bool
} deriving (Show)
confDbStateDir, confBlobStoreDir :: ServerConfig -> FilePath
confDbStateDir config = confStateDir config </> "db"
confBlobStoreDir config = confStateDir config </> "blobs"
confStaticFilesDir, confTemplatesDir, confTUFDir :: ServerConfig -> FilePath
confStaticFilesDir config = confStaticDir config </> "static"
confTemplatesDir config = confStaticDir config </> "templates"
confTUFDir config = confStaticDir config </> "TUF"
defaultServerConfig :: IO ServerConfig
defaultServerConfig = do
hostName <- getHostName
dataDir <- getDataDir
let portnum = 8080 :: Int
return ServerConfig {
confVerbosity = Verbosity.normal,
confHostUri = nullURI {
uriScheme = "http:",
uriAuthority = Just (URIAuth "" hostName (':' : show portnum))
},
confListenOn = ListenOn {
loPortNum = 8080,
loIP = "127.0.0.1"
},
confStateDir = "state",
confStaticDir = dataDir,
confTmpDir = "state" </> "tmp",
confCacheDelay= 0,
confLiveTemplates = False
}
data Server = Server {
serverFeatures :: [HackageFeature],
serverUserFeature :: UserFeature,
serverListenOn :: ListenOn,
serverEnv :: ServerEnv
}
-- | If we made a server instance from this 'ServerConfig', would we find some
-- existing saved state or would it be a totally clean instance with no
-- existing state.
--
hasSavedState :: ServerConfig -> IO Bool
hasSavedState = doesDirectoryExist . confDbStateDir
mkServerEnv :: ServerConfig -> IO ServerEnv
mkServerEnv config@(ServerConfig verbosity hostURI _
stateDir _ tmpDir
cacheDelay liveTemplates) = do
createDirectoryIfMissing False stateDir
let blobStoreDir = confBlobStoreDir config
staticDir = confStaticFilesDir config
templatesDir = confTemplatesDir config
tufDir' = confTUFDir config
store <- BlobStorage.open blobStoreDir
cron <- newCron verbosity
tufDir <- Sec.makeAbsolute $ Sec.fromFilePath tufDir'
let env = ServerEnv {
serverStaticDir = staticDir,
serverTemplatesDir = templatesDir,
serverTUFDir = tufDir,
serverTemplatesMode = if liveTemplates then DesignMode
else NormalMode,
serverStateDir = stateDir,
serverBlobStore = store,
serverCron = cron,
serverTmpDir = tmpDir,
serverCacheDelay = cacheDelay * 1000000, --microseconds
serverBaseURI = hostURI,
serverVerbosity = verbosity
}
return env
-- | Make a server instance from the server configuration.
--
-- This does not yet run the server (see 'run') but it does setup the server
-- state system, making it possible to import data, and initializes the
-- features.
--
-- Note: the server instance must eventually be 'shutdown' or you'll end up
-- with stale lock files.
--
initialise :: ServerConfig -> IO Server
initialise config = do
env <- mkServerEnv config
-- do feature initialization
(features, userFeature) <- Features.initHackageFeatures env
return Server {
serverFeatures = features,
serverUserFeature = userFeature,
serverListenOn = confListenOn config,
serverEnv = env
}
-- | Actually run the server, i.e. start accepting client http connections.
--
run :: Server -> IO ()
run server@Server{ serverEnv = env } = do
-- We already check this in Main, so we expect this check to always
-- succeed, but just in case...
let staticDir = serverStaticDir (serverEnv server)
exists <- doesDirectoryExist staticDir
when (not exists) $ fail $ "The static files directory " ++ staticDir ++ " does not exist."
addCronJob (serverCron env) CronJob {
cronJobName = "Checkpoint all the server state",
cronJobFrequency = WeeklyJobFrequency,
cronJobOneShot = False,
cronJobAction = checkpoint server
}
runServer listenOn $ do
handlePutPostQuotas
setLogging
fakeBrowserHttpMethods (impl server)
where
listenOn = serverListenOn server
-- HS6 - Quotas should be configurable as well. Also there are places in
-- the code that want to work with the request body directly but maybe
-- fail if the request body has already been consumed. The body will only
-- be consumed if it is a POST/PUT request *and* the content-type is
-- multipart/form-data. If this does happen, you should get a clear error
-- message saying what happened.
handlePutPostQuotas = decodeBody bodyPolicy
where
tmpdir = serverTmpDir (serverEnv server)
quota = 50 * (1024 ^ (2 :: Int64))
-- setting quota at 50mb, though perhaps should be configurable?
bodyPolicy = defaultBodyPolicy tmpdir quota quota quota
setLogging =
liftIO $ HsLogger.updateGlobalLogger
"Happstack.Server"
(adjustLogLevel (serverVerbosity (serverEnv server)))
where
adjustLogLevel v
| v == Verbosity.normal = HsLogger.setLevel HsLogger.WARNING
| v == Verbosity.verbose = HsLogger.setLevel HsLogger.INFO
| v == Verbosity.deafening = HsLogger.setLevel HsLogger.DEBUG
| otherwise = id
-- This is a cunning hack to solve the problem that HTML forms do not
-- support PUT, DELETE, etc, they only support GET and POST. We don't want
-- to compromise the design of the whole server just because HTML does not
-- support HTTP properly, so we allow browsers using HTML forms to do
-- PUT/DELETE etc by POSTing with special body parameters.
fakeBrowserHttpMethods part =
msum [ do method POST
htmlFormWrapperHack part
-- or just do things the normal way
, part
]
-- | Perform a clean shutdown of the server.
--
shutdown :: Server -> IO ()
shutdown server =
Features.shutdownAllFeatures (serverFeatures server)
--TODO: stop accepting incomming connections,
-- wait for connections to be processed.
-- | Write out a checkpoint of the server state. This makes recovery quicker
-- because fewer logged transactions have to be replayed.
--
checkpoint :: Server -> IO ()
checkpoint server =
Features.checkpointAllFeatures (serverFeatures server)
reloadDatafiles :: Server -> IO ()
reloadDatafiles server =
mapM_ Feature.featureReloadFiles (serverFeatures server)
-- | Return /one/ abstract state component per feature
serverState :: Server -> [(String, AbstractStateComponent)]
serverState server = [ (featureName feature, mconcat (featureState feature))
| feature <- serverFeatures server
]
-- An alternative to an import: starts the server off to a sane initial state.
-- To accomplish this, we import a 'null' tarball, finalizing immediately after initializing import
initState :: Server -> (String, String) -> IO ()
initState server (admin, pass) = do
-- We take the opportunity to checkpoint all the acid-state components
-- upon first initialisation as this helps with migration problems later.
-- https://github.com/acid-state/acid-state/issues/20
checkpoint server
let store = serverBlobStore (serverEnv server)
stores = BlobStorage.BlobStores store []
void . Import.importBlank stores $ map (second abstractStateRestore) (serverState server)
-- create default admin user
let UserFeature{updateAddUser, adminGroup} = serverUserFeature server
muid <- case simpleParse admin of
Just uname -> do
let userAuth = Auth.newPasswdHash Auth.hackageRealm uname (PasswdPlain pass)
updateAddUser uname (Users.UserAuth userAuth)
Nothing -> fail "Couldn't parse admin name (should be alphanumeric)"
case muid of
Right uid -> Group.addUserToGroup adminGroup uid
Left Users.ErrUserNameClash -> fail $ "Inconceivable!! failed to create admin user"
-- The top-level server part.
-- It collects resources from Distribution.Server.Features, collects
-- them into a path hierarchy, and serves them.
impl :: Server -> ServerPart Response
impl server = logExceptions $
runServerPartE $
handleErrorResponse (serveErrorResponse errHandlers Nothing) $
renderServerTree [] serverTree
`mplus`
fallbackNotFound
where
serverTree :: ServerTree (DynamicPath -> ServerPartE Response)
serverTree =
fmap (serveResource errHandlers)
-- ServerTree Resource
. foldl' (\acc res -> addServerNode (resourceLocation res) res acc) serverTreeEmpty
-- [Resource]
$ concatMap Feature.featureResources (serverFeatures server)
errHandlers = nubBy ((==) `on` fst)
. reverse
. (("txt", textErrorPage):)
. concatMap Feature.featureErrHandlers
$ serverFeatures server
-- This basic one be overridden in another feature but means even in a
-- minimal server we can provide content-negoticated text/plain errors
textErrorPage :: ErrorResponse -> ServerPartE Response
textErrorPage = return . toResponse
fallbackNotFound =
errNotFound "Page not found"
[MText "Sorry, it's just not here."]
logExceptions :: ServerPart Response -> ServerPart Response
logExceptions act = Lifted.catch act $ \e -> do
lognotice verbosity $ "WARNING: Received exception: " ++ show e
Lifted.throwIO (e :: SomeException)
verbosity = serverVerbosity (serverEnv server)
data TempServer = TempServer ThreadId
setUpTemp :: ServerConfig -> Int -> IO TempServer
setUpTemp sconf secs = do
tid <- forkIO $ do
-- wait a certain amount of time before setting it up, because sometimes
-- happstack-state is very fast, and switching the servers has a time
-- cost to it
threadDelay $ secs*1000000
-- could likewise specify a mirror to redirect to for tarballs, and 503 for everything else
runServer listenOn $ (resp 503 $ setHeader "Content-Type" "text/html" $ toResponse html503)
return (TempServer tid)
where listenOn = confListenOn sconf
runServer :: (ToMessage a) => ListenOn -> ServerPartT IO a -> IO ()
runServer listenOn f
= do socket <- bindIPv4 (loIP listenOn) (loPortNum listenOn)
simpleHTTPWithSocket socket nullConf f
-- | Static 503 page, based on Happstack's 404 page.
html503 :: String
html503 =
"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">" ++
"<html><head><title>503 Service Unavailable</title></head><body><h1>" ++
"503 Service Unavailable</h1><p>The server is undergoing maintenance" ++
"<br>It'll be back soon</p></body></html>"
tearDownTemp :: TempServer -> IO ()
tearDownTemp (TempServer tid) = do
killThread tid
-- give the server enough time to release the bind
threadDelay $ 1000000
| edsko/hackage-server | Distribution/Server.hs | bsd-3-clause | 13,424 | 0 | 18 | 3,256 | 2,462 | 1,350 | 1,112 | 234 | 3 |
{-# language ViewPatterns, ScopedTypeVariables #-}
module Base.Score (
Scores,
HighScoreFile(..),
Record(..),
saveScore,
getScores,
setScores,
getHighScores,
mkScoreString,
showScore,
timeFormat,
batteryFormat,
sumOfEpisodeBatteries,
) where
import Data.Map as Map (Map, empty, lookup, insert)
import Data.Binary
import Data.Binary.Strict
import Data.Initial
import Data.Accessor
import Text.Printf
import Text.Logging
import System.FilePath
import System.Directory
import Utils
import Base.Paths
import Base.Types
import StoryMode.Types
type Scores = Map LevelUID Score
-- | type representing the Map of HighScores,
-- which will be written to file.
-- HighScoreFile has versioned constructors.
-- Serialisation uses the Binary class.
data HighScoreFile
= HighScoreFile_0 {
highScores :: Scores
}
| HighScoreFile_1 {
highScores :: Scores,
episodeScores :: Map EpisodeUID EpisodeScore
}
deriving Show
instance Initial HighScoreFile where
initial = HighScoreFile_1 empty empty
convertToNewest :: HighScoreFile -> HighScoreFile
convertToNewest (HighScoreFile_0 lhs) = HighScoreFile_1 lhs empty
convertToNewest x = x
-- Binary instance for serialisation
-- (to provide minimal cheating protection).
-- This instance has to work with the versioned
-- constructors!!!
instance Binary HighScoreFile where
put s@(HighScoreFile_0 x) = put $ convertToNewest s
put (HighScoreFile_1 lhs ehs) = do
putWord8 1
put lhs
put ehs
get = do
i <- getWord8
case i of
0 -> convertToNewest <$> HighScoreFile_0 <$> get
1 -> HighScoreFile_1 <$> get <*> get
data Record
= NoNewRecord
| NewRecord
| RecordTied
-- | Checks, if the given score is a new record (time- or battery-wise)
-- If yes, saves the new record.
-- Returns (Maybe oldHighScore, newTimeRecord, newBatteryRecord)
saveScore :: LevelFile -> Score -> IO (Maybe Score, Record, Record)
saveScore (levelUID -> uid) currentScore = do
highScores <- getHighScores
let mHighScore = Map.lookup uid highScores
case (currentScore, mHighScore) of
(_, Nothing) -> do
setHighScore uid currentScore
return (Nothing, NoNewRecord, NoNewRecord)
(Score_1_Tried, Just highScore) ->
return (Just highScore, NoNewRecord, NoNewRecord)
(Score_1_Passed scoreTime scoreBatteryPower, oldHighScore)
| oldHighScore `elem` [Nothing, Just Score_1_Tried] -> do
setHighScore uid currentScore
let batteryRecord = if scoreBatteryPower == 0 then NoNewRecord else NewRecord
return (Nothing, NewRecord, batteryRecord)
(Score_1_Passed scoreTime scoreBatteryPower, Just highScore@Score_1_Passed{}) -> do
let newHighScore =
updateRecord timeCompare scoreTimeA currentScore $
updateRecord compare scoreBatteryPowerA currentScore $
highScore
when (newHighScore /= highScore) $
setHighScore uid newHighScore
return (Just highScore,
record timeCompare scoreTimeA currentScore highScore,
record batteryCompare scoreBatteryPowerA currentScore highScore)
where
timeCompare a b = swapOrdering $ compare a b
batteryCompare 0 x = LT
batteryCompare a b = compare a b
updateRecord :: Compare a -> Accessor Score a -> Score -> Score -> Score
updateRecord compare acc current high =
case compare (current ^. acc) (high ^. acc) of
GT -> acc ^= (current ^. acc) $ high
_ -> high
record :: Compare a -> Accessor Score a -> Score -> Score -> Record
record compare acc current high =
case compare (current ^. acc) (high ^. acc) of
GT -> NewRecord
EQ -> RecordTied
LT -> NoNewRecord
type Compare a = a -> a -> Ordering
getScores :: IO HighScoreFile
getScores = do
filePath <- getHighScoreFilePath
content :: Maybe HighScoreFile <- decodeFileStrict filePath
case content of
Nothing -> do
logg Warning "highscore file not readable."
return initial
Just c -> return c
setScores scores = do
filePath <- getHighScoreFilePath
encodeFileStrict filePath scores
getHighScores :: IO Scores
getHighScores = highScores <$> getScores
setHighScores :: Scores -> IO ()
setHighScores m = do
eps <- episodeScores <$> getScores
let content :: HighScoreFile = HighScoreFile_1 m eps
setScores content
setHighScore :: LevelUID -> Score -> IO ()
setHighScore uid score = do
setHighScores . insert uid score =<< getHighScores
-- * pretty printing
mkScoreString :: Maybe Integer -> Maybe Score -> String
mkScoreString _ Nothing =
-- unplayed levels
showScore Nothing Nothing Nothing
mkScoreString mBatteries (Just score) =
inner score
where
inner :: Score -> String
inner Score_1_Tried =
showScore Nothing Nothing mBatteries
inner (Score_1_Passed time batteries) =
showScore (Just time) (Just batteries) mBatteries
showScore :: Maybe Seconds -> Maybe Integer -> Maybe Integer -> String
showScore mTime mCollected mTotal =
printf "[ %s | %s/%s ]"
(maybe "--:--:--" timeFormat mTime)
(maybe "---" batteryFormat mCollected)
(maybe "---" batteryFormat mTotal)
-- | formats the time (MM:SS:MM)
timeFormat :: Seconds -> String
timeFormat time =
printf "%02i:%02i:%02i" minutes seconds centiSeconds
where
(intSeconds, fractionSeconds) = properFraction time
intMinutes = floor (time / 60)
minutes :: Int = min 99 intMinutes
seconds :: Int = min 59 (intSeconds - (intMinutes * 60))
centiSeconds :: Int = min 99 $ ceiling (fractionSeconds * 100)
batteryFormat :: Integer -> String
batteryFormat = printf "%03i"
-- * file paths
-- | Returns the filepath to the highscore file
-- Initializes the file, if it doesn't exist.
getHighScoreFilePath :: IO FilePath
getHighScoreFilePath = do
confDir <- getConfigurationDirectory
let highScoreFilePath = confDir </> "highscores"
exists <- doesFileExist highScoreFilePath
when (not exists) $ do
-- initialize the file
let content :: HighScoreFile = initial
encodeFileStrict highScoreFilePath content
return highScoreFilePath
-- * episodes
-- | Adds up all collected batteries for one episode.
-- Does not account for batteries in an currently played level.
sumOfEpisodeBatteries :: Scores -> Episode LevelFile -> Integer
sumOfEpisodeBatteries highscore episode =
sum $ ftoList $ fmap getBatteryPower episode
where
getBatteryPower :: LevelFile -> Integer
getBatteryPower lf = case Map.lookup (levelUID lf) highscore of
Nothing -> 0
Just Score_1_Tried -> 0
Just (Score_1_Passed _ bp) -> bp
| geocurnoff/nikki | src/Base/Score.hs | lgpl-3.0 | 6,939 | 0 | 17 | 1,731 | 1,719 | 874 | 845 | 160 | 9 |
{-
Copyright 2017 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
-- |
-- Module : Data.MultiMap
-- Copyright : (c) CodeWorld Authors 2017
-- License : Apache-2.0
--
-- Maintainer : Joachim Breitner <mail@joachim-breitner.de>
--
-- A simple MultiMap.
--
-- This differs from the one in the @multimap@ package by using
-- 'Data.Sequence.Seq' for efficient insert-at-end and other improved speeds.
--
-- Also only supports the operations required by CodeWorld for now.
{-# LANGUAGE TupleSections #-}
module Data.MultiMap (MultiMap, empty, null, insertL, insertR, toList, spanAntitone, union, keys) where
import Prelude hiding (null)
import qualified Data.Sequence as S
import qualified Data.Map as M
import qualified Data.Foldable (toList)
import Data.Bifunctor
import Data.Coerce
newtype MultiMap k v = MM (M.Map k (S.Seq v)) deriving (Show, Eq)
empty :: MultiMap k v
empty = MM M.empty
null :: MultiMap k v -> Bool
null (MM m) = M.null m
insertL :: Ord k => k -> v -> MultiMap k v -> MultiMap k v
insertL k v (MM m) = MM (M.alter (Just . maybe (S.singleton v) (v S.<|)) k m)
insertR :: Ord k => k -> v -> MultiMap k v -> MultiMap k v
insertR k v (MM m) = MM (M.alter (Just . maybe (S.singleton v) (S.|> v)) k m)
toList :: MultiMap k v -> [(k,v)]
toList (MM m) = [ (k,v) | (k,vs) <- M.toList m , v <- Data.Foldable.toList vs ]
-- TODO: replace with M.spanAntitone once containers is updated
mapSpanAntitone :: (k -> Bool) -> M.Map k a -> (M.Map k a, M.Map k a)
mapSpanAntitone p = bimap M.fromDistinctAscList M.fromDistinctAscList . span (p.fst) . M.toList
spanAntitone :: (k -> Bool) -> MultiMap k v -> (MultiMap k v, MultiMap k v)
spanAntitone p (MM m) = coerce (mapSpanAntitone p m)
union :: Ord k => MultiMap k v -> MultiMap k v -> MultiMap k v
union (MM m1) (MM m2) = MM (M.unionWith (S.><) m1 m2)
keys :: MultiMap k v -> [k]
keys (MM m) = M.keys m
| three/codeworld | codeworld-prediction/src/Data/MultiMap.hs | apache-2.0 | 2,426 | 0 | 13 | 471 | 716 | 383 | 333 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Text.LaTeX
import Text.LaTeX.Packages.Inputenc
import Text.LaTeX.Packages.TikZ
main :: IO ()
main = execLaTeXT tikztest >>= renderFile "tikz.tex"
tikztest :: LaTeXT IO ()
tikztest = do
thePreamble
document theBody
thePreamble :: LaTeXT IO ()
thePreamble = do
documentclass [] article
usepackage [utf8] inputenc
usepackage [] tikz
author "Daniel Díaz"
title "Example using TikZ"
theBody :: LaTeXT IO ()
theBody = do
maketitle
"Below a picture generated using the TikZ DSL of "
hatex
"."
center $ tikzpicture $ draw $
Cycle $ Start (pointAtXY 0 0) ->- pointAtXY 1 0 ->- pointAtXY 0 1
"And some pictures more."
center $ tikzpicture $
draw (Rectangle (Start $ pointAtXY 0 0 ) (pointAtXY 1 1))
->> fill (Circle (Start $ pointAtXY 1.5 0.5) 0.5)
->> shade (Ellipse (Start $ pointAtXY 3 0.5 ) 1 0.5)
center $ tikzpicture $ draw $
(Cycle $ Start (pointAtXY 0 0) ->- pointAtXY 1 0 ->- pointAtXY 0 1) ->- pointAtXY 1 1
"We also show the graph of the "
emph "sine"
" function."
center $ tikzpicture $
draw (Start (pointAtXY 0 1) ->- pointAtXY 0 (-1))
->> draw (Start (pointAtXY (-0.2) 0) ->- pointAtXY (3*pi) 0 )
->> scope [TColor $ BasicColor Blue, TWidth (Pt 1)] (draw $ bpath (pointAtXY 0 0) $
mapM_ line [ pointAtXY x (sin x) | x <- [0,0.05 .. 3*pi] ]
)
| dmcclean/HaTeX | Examples/tikz.hs | bsd-3-clause | 1,384 | 0 | 16 | 322 | 559 | 267 | 292 | 40 | 1 |
module Compiler.OptimiseHs(optimiseHs) where
import Compiler.Hs
import Compiler.Util
optimiseHs :: Program -> Program
optimiseHs = error . ("\n"++) . show . simpleLet
simpleLet :: Program -> Program
simpleLet = transformBi f
where
f (Let [] x) = x
f (Let [(x,y)] z) | show x == show z = y
f x = x
| ndmitchell/tagsoup | dead/parser/Compiler/OptimiseHs.hs | bsd-3-clause | 331 | 0 | 11 | 87 | 137 | 73 | 64 | 10 | 3 |
module Helper where
import qualified Data.ByteString.Lazy as BS
import Data.Aeson
import System.IO.Unsafe
defaultInput :: FromJSON a => a
{-# NOINLINE defaultInput #-}
defaultInput = case unsafePerformIO $ fmap eitherDecode' $ BS.readFile "input.json" of
Left e -> error (show e)
Right a -> a | NCrashed/pdf-slave | examples/template06/dep1/Helper.hs | bsd-3-clause | 298 | 0 | 10 | 48 | 89 | 48 | 41 | 9 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>Front-End Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/frontendscanner/src/main/javahelp/org/zaproxy/zap/extension/frontendscanner/resources/help_el_GR/helpset_el_GR.hs | apache-2.0 | 978 | 78 | 67 | 159 | 417 | 211 | 206 | -1 | -1 |
{-# OPTIONS -XNoMonoLocalBinds #-}
module Grin.DeadCode(deadCode) where
import Control.Monad
import Control.Monad.Trans (MonadIO)
import Data.Monoid
import qualified Data.Set as Set
import Fixer.Fixer
import Fixer.Supply
import Grin.Grin
import Grin.Noodle
import Grin.Whiz
import Stats hiding(print, singleton)
import StringTable.Atom
import Support.CanType
import Support.FreeVars
import Util.Gen
import Util.SetLike hiding(Value)
implies :: Value Bool -> Value Bool -> Rule
implies x y = y `isSuperSetOf` x
-- | Remove dead code from Grin.
deadCode ::
Stats.Stats -- ^ stats to update with what was done
-> [Atom] -- ^ roots
-> Grin -- ^ input
-> IO Grin -- ^ output
deadCode stats roots grin = do
fixer <- newFixer
usedFuncs <- newSupply fixer
usedArgs <- newSupply fixer
usedCafs <- newSupply fixer
pappFuncs <- newValue fixer bottom
suspFuncs <- newValue fixer bottom
-- set all roots as used
flip mapM_ roots $ \r -> do
addRule $ value True `implies` sValue usedFuncs r
let postInline = phaseEvalInlined (grinPhase grin)
forM_ (grinCafs grin) $ \ (v,NodeC t []) -> do
(0,fn) <- tagUnfunction t
v' <- supplyValue usedCafs v
addRule $ conditionalRule id v' $ (suspFuncs `isSuperSetOf` value (singleton fn))
addRule $ v' `implies` (sValue usedFuncs fn)
mapM_ (go fixer pappFuncs suspFuncs usedFuncs usedArgs usedCafs postInline) (grinFuncs grin)
findFixpoint Nothing {-"Dead Code"-} fixer
ua <- supplyReadValues usedArgs
uc <- supplyReadValues usedCafs
uf <- supplyReadValues usedFuncs
pappFuncs <- readValue pappFuncs
suspFuncs <- readValue suspFuncs
when False $ do
putStrLn "usedArgs"
mapM_ print ua
putStrLn "usedCafs"
mapM_ print uc
putStrLn "usedFuncs"
mapM_ print uf
putStrLn "pappFuncs"
print pappFuncs
putStrLn "suspFuncs"
print suspFuncs
let cafSet = fg uc
funSet = fg uf
argSet = fg ua
`union`
fromList [ (n,i) | FuncDef n (args :-> _) _ _ <- grinFunctions grin,
n `member` grinEntryPoints grin,
i <- [0 .. length args] ]
directFuncs = funSet \\ suspFuncs \\ pappFuncs
fg xs = fromList [ x | (x,True) <- xs ]
newCafs <- flip mconcatMapM (grinCafs grin) $ \ (x,y) -> if x `member` cafSet then return [(x,y)] else tick stats "Optimize.dead-code.caf" >> return []
let f ((x,y):xs) rs ws = do
if not $ x `member` funSet then tick stats "Optimize.dead-code.func" >> f xs rs ws else do
(ws',r) <- runStatIO stats $ removeDeadArgs postInline funSet directFuncs cafSet argSet (x,y) ws
f xs (r:rs) ws'
f [] rs _ = return rs
newFuncs <- f (grinFuncs grin) [] whizState
--newFuncs <- flip mconcatMapM (grinFuncs grin) $ \ (x,y) -> do
let (TyEnv mp) = grinTypeEnv grin
mp' <- flip mconcatMapM (toList mp) $ \ (x,tyty@TyTy { tySlots = ts }) -> case Just x of
Just _ | tagIsFunction x, not $ x `member` funSet -> return []
Just fn | fn `member` directFuncs -> do
let da (t,i)
| member (fn,i) argSet = return [t]
| otherwise = tick stats ("Optimize.dead-code.arg-func.{" ++ show x ++ "-" ++ show i) >> return []
ts' <- mconcatMapM da (zip ts naturals)
return [(x,tyty { tySlots = ts' })]
_ -> return [(x,tyty)]
return $ setGrinFunctions newFuncs grin {
grinCafs = newCafs,
grinPartFunctions = pappFuncs,
grinTypeEnv = TyEnv $ fromList mp',
--grinArgTags = Map.fromList newArgTags,
grinSuspFunctions = suspFuncs
}
combineArgs :: a -> [b] -> [((a, Int), b)]
combineArgs fn as = [ ((fn,n),a) | (n,a) <- zip [0 :: Int ..] as]
go :: (MonadIO m, Collection b, Collection a, Fixable b, Fixable a,
Elem b ~ Atom, Elem a ~ Atom) =>
Fixer
-> Value a
-> Value b
-> Supply Tag Bool
-> Supply (Tag, Int) Bool
-> Supply Var Bool
-> Bool
-> (Tag, Lam)
-> m Lam
go fixer pappFuncs suspFuncs usedFuncs usedArgs usedCafs postInline (fn,as :-> body) = ans where
goAgain = go fixer pappFuncs suspFuncs usedFuncs usedArgs usedCafs postInline
ans = do
usedVars <- newSupply fixer
flip mapM_ (combineArgs fn as) $ \ (ap,Var v _) -> do
x <- supplyValue usedArgs ap
v <- supplyValue usedVars v
addRule $ v `implies` x
-- a lot of things are predicated on this so that CAFS are not held on to unnecesarily
fn' <- supplyValue usedFuncs fn
let varValue v | v < v0 = sValue usedCafs v
| otherwise = sValue usedVars v
f e = g e >> return e
g (BaseOp Eval [e]) = addRule (doNode e)
g (BaseOp Apply {} vs) = addRule (mconcatMap doNode vs)
g (Case e _) = addRule (doNode e)
g Prim { expArgs = as } = addRule (mconcatMap doNode as)
g (App a vs _) = do
addRule $ conditionalRule id fn' $ mconcat [ mconcatMap (implies (sValue usedArgs fn) . varValue) (freeVars a) | (fn,a) <- combineArgs a vs]
addRule $ fn' `implies` sValue usedFuncs a
addRule (mconcatMap doNode vs)
g (BaseOp Overwrite [Var v _,n]) | v < v0 = do
v' <- supplyValue usedCafs v
addRule $ conditionalRule id v' $ doNode n
g (BaseOp Overwrite [vv,n]) = addRule $ (doNode vv) `mappend` (doNode n)
g (BaseOp PokeVal [vv,n]) = addRule $ (doNode vv) `mappend` (doNode n)
g (BaseOp PeekVal [vv]) = addRule $ (doNode vv)
g (BaseOp Promote [vv]) = addRule $ (doNode vv)
g (BaseOp _ xs) = addRule $ mconcatMap doNode xs
g Alloc { expValue = v, expCount = c, expRegion = r } = addRule $ doNode v `mappend` doNode c `mappend` doNode r
g Let { expDefs = defs, expBody = body } = do
mapM_ goAgain [ (name,bod) | FuncDef { funcDefBody = bod, funcDefName = name } <- defs]
flip mapM_ (map funcDefName defs) $ \n -> do
--n' <- supplyValue usedFuncs n
--addRule $ fn' `implies` n'
return ()
g Error {} = return ()
-- TODO - handle function and case return values smartier.
g (Return ns) = mapM_ (addRule . doNode) ns
g x = error $ "deadcode.g: " ++ show x
h' (p,e) = h (p,e) >> return (Just (p,e))
h (p,BaseOp (StoreNode _) [v]) = addRule $ mconcat $ [ conditionalRule id (varValue pv) (doNode v) | pv <- freeVars p]
h (p,BaseOp Demote [v]) = addRule $ mconcat $ [ conditionalRule id (varValue pv) (doNode v) | pv <- freeVars p]
h (p,Alloc { expValue = v, expCount = c, expRegion = r }) = addRule $ mconcat $ [ conditionalRule id (varValue pv) (doNode v `mappend` doNode c `mappend` doNode r) | pv <- freeVars p]
h (p,Return vs) = mapM_ (h . \v -> (p,BaseOp Promote [v])) vs -- addRule $ mconcat $ [ conditionalRule id (varValue pv) (doNode v) | pv <- freeVars p]
h (p,BaseOp Promote [v]) = addRule $ mconcat $ [ conditionalRule id (varValue pv) (doNode v) | pv <- freeVars p]
h (p,e) = g e
doNode (NodeC n as) | not postInline, Just (x,fn) <- tagUnfunction n = let
consts = (mconcatMap doNode as)
usedfn = implies fn' (sValue usedFuncs fn)
suspfn | x > 0 = conditionalRule id fn' (pappFuncs `isSuperSetOf` value (singleton fn))
| otherwise = conditionalRule id fn' (suspFuncs `isSuperSetOf` value (singleton fn))
in mappend consts $ mconcat (usedfn:suspfn:[ mconcatMap (implies (sValue usedArgs fn) . varValue) (freeVars a) | (fn,a) <- combineArgs fn as])
doNode x = doConst x `mappend` mconcatMap (implies fn' . varValue) (freeVars x)
doConst _ | postInline = mempty
doConst (Const n) = doNode n
doConst (NodeC n as) = mconcatMap doConst as
doConst _ = mempty
(nl,_) <- whiz (\_ -> id) h' f whizState (as :-> body)
return nl
removeDeadArgs :: MonadStats m => Bool -> Set.Set Atom -> Set.Set Atom -> (Set.Set Var) -> (Set.Set (Atom,Int)) -> (Atom,Lam) -> WhizState -> m (WhizState,(Atom,Lam))
removeDeadArgs postInline funSet directFuncs usedCafs usedArgs (a,l) whizState = whizExps f (margs a l) >>= \(l,ws) -> return (ws,(a,l)) where
whizExps f l = whiz (\_ x -> x) (\(p,e) -> f e >>= \e' -> return (Just (p,e'))) f whizState l
margs fn (as :-> e) | a `Set.member` directFuncs = ((removeArgs fn as) :-> e)
margs _ x = x
f (App fn as ty) = do
as <- dff fn as
as <- mapM clearCaf as
return $ App fn as ty
f (Return [NodeC fn as]) | Just fn' <- tagToFunction fn = do
as <- dff' fn' as
as <- mapM clearCaf as
return $ Return [NodeC fn as]
f (BaseOp (StoreNode False) [NodeC fn as]) | Just fn' <- tagToFunction fn = do
as <- dff' fn' as
as <- mapM clearCaf as
return $ BaseOp (StoreNode False) [NodeC fn as]
f (BaseOp Overwrite [(Var v TyINode),_]) | deadCaf v = do
mtick $ toAtom "Optimize.dead-code.caf-update"
return $ Return []
f (BaseOp Overwrite [p,NodeC fn as]) | Just fn' <- tagToFunction fn = do
as <- dff' fn' as
as <- mapM clearCaf as
return $ BaseOp Overwrite [p,NodeC fn as]
-- f (Update (Var v TyINode) _) | deadCaf v = do
-- mtick $ toAtom "Optimize.dead-code.caf-update"
-- return $ Return []
-- f (Update p (NodeC fn as)) | Just fn' <- tagToFunction fn = do
-- as <- dff' fn' as
-- as <- mapM clearCaf as
-- return $ Update p (NodeC fn as)
f lt@Let { expDefs = defs } = return $ updateLetProps lt { expDefs = defs' } where
defs' = [ updateFuncDefProps df { funcDefBody = margs name body } | df@FuncDef { funcDefName = name, funcDefBody = body } <- defs, name `Set.member` funSet ]
f x = return x
dff' fn as | fn `member` directFuncs = return as
dff' fn as = dff'' fn as
dff fn as | fn `member` directFuncs = return (removeArgs fn as)
dff fn as = dff'' fn as
dff'' fn as | not (fn `member` funSet) = return as -- if function was dropped, we don't have argument use information.
dff'' fn as = mapM df (zip as naturals) where
df (a,i) | not (deadVal a) && not (member (fn,i) usedArgs) = do
mtick $ toAtom "Optimize.dead-code.func-arg"
return $ properHole (getType a)
df (a,_) = return a
clearCaf (Var v TyINode) | deadCaf v = do
mtick $ toAtom "Optimize.dead-code.caf-arg"
return (properHole TyINode)
clearCaf (NodeC a xs) = do
xs <- mapM clearCaf xs
return $ NodeC a xs
clearCaf (Index a b) = return Index `ap` clearCaf a `ap` clearCaf b
clearCaf (Const a) = Const `liftM` clearCaf a
clearCaf x = return x
deadCaf v = v < v0 && not (v `member` usedCafs)
deadVal (Lit 0 _) = True
deadVal x = isHole x
removeArgs fn as = concat [ perhapsM ((fn,i) `member` usedArgs) a | a <- as | i <- naturals ]
| m-alvarez/jhc | src/Grin/DeadCode.hs | mit | 11,386 | 1 | 26 | 3,518 | 4,463 | 2,234 | 2,229 | -1 | -1 |
{-# LANGUAGE DeriveFunctor, StandaloneDeriving, FlexibleInstances, TypeFamilies, GeneralizedNewtypeDeriving #-}
module T4185 where
import Data.Kind (Type)
data family Foo k :: Type -> Type
------------- Generalised newtype deriving of user class -----------
class Bar f where
bar :: f a -> Int
woo :: f a -> f a
instance Bar Maybe where
bar Nothing = 0
bar Just{} = 1
woo x = x
-- Deriving clause
newtype instance Foo Int a = FooInt (Maybe a) deriving (Bar)
-- Standalone deriving
newtype instance Foo Char a = FooChar (Maybe a)
deriving instance Bar (Foo Char)
{-
dBarMaybe :: Bar Maybe
newtype FooInt a = FooInt (Maybe a)
axiom ax7 a : Foo Int a ~ FooInt a -- Family axiom
axiom ax7 : FooInt ~ Maybe -- Newtype axiom
dBarFooInt :: Bar (Foo Int)
dBarFooInt = dBarMaybe |> Bar ax7
-}
------------- Deriving on data types for Functor -----------
-- Deriving clause
data instance Foo Bool a = FB1 a | FB2 a deriving( Functor )
-- Standalone deriving
data instance Foo Float a = FB3 a
deriving instance Functor (Foo Float)
--instance Functor (Foo Bool) where
-- fmap f (FB1 x) = FB1 (f x)
-- fmap f (FB2 y) = FB2 (f y)
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/T4185.hs | bsd-3-clause | 1,184 | 0 | 8 | 272 | 230 | 126 | 104 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Lamdu.GUI.ExpressionEdit.LiteralEdit
( makeInt
) where
import Control.Lens.Operators
import Control.MonadA (MonadA)
import Data.Monoid (Monoid(..))
import Data.Store.Guid (Guid)
import Lamdu.GUI.ExpressionEdit.HoleEdit.State (HoleState(..), setHoleStateAndJump)
import Lamdu.GUI.ExpressionGui (ExpressionGui)
import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM)
import qualified Control.Lens as Lens
import qualified Data.Store.Transaction as Transaction
import qualified Graphics.UI.Bottle.EventMap as E
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Lamdu.Config as Config
import qualified Lamdu.GUI.BottleWidgets as BWidgets
import qualified Lamdu.GUI.ExpressionGui as ExpressionGui
import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM
import qualified Lamdu.GUI.WidgetEnvT as WE
import qualified Lamdu.Sugar.Types as Sugar
type T = Transaction.Transaction
setColor :: MonadA m => ExprGuiM m a -> ExprGuiM m a
setColor action = do
config <- ExprGuiM.widgetEnv WE.readConfig
ExprGuiM.withFgColor (Config.literalIntColor config) action
mkEditEventMap ::
MonadA m => Integer -> T m Guid -> Widget.EventHandlers (T m)
mkEditEventMap integer setToHole =
Widget.keysEventMapMovesCursor [E.ModKey E.noMods E.Key'Enter]
(E.Doc ["Edit", "Integer"]) $
setHoleStateAndJump (HoleState (show integer)) =<< setToHole
makeInt ::
MonadA m =>
Integer -> Sugar.Payload Sugar.Name m ExprGuiM.Payload ->
Widget.Id ->
ExprGuiM m (ExpressionGui m)
makeInt integer pl myId =
BWidgets.makeFocusableTextView (show integer) myId
& setColor . ExprGuiM.widgetEnv
<&> Widget.weakerEvents editEventMap
<&> ExpressionGui.fromValueWidget
& ExpressionGui.stdWrap pl
where
editEventMap =
maybe mempty (mkEditEventMap integer) $
pl ^? Sugar.plActions . Lens._Just . Sugar.mSetToHole . Lens._Just
| schell/lamdu | Lamdu/GUI/ExpressionEdit/LiteralEdit.hs | gpl-3.0 | 1,887 | 0 | 14 | 254 | 522 | 297 | 225 | -1 | -1 |
main = do
[1,2] <- return [1,2]
putStrLn "OK."
| philderbeast/ghcjs | test/fay/doAssignPatterMatch.hs | mit | 54 | 0 | 9 | 16 | 34 | 17 | 17 | 3 | 1 |
{-# LANGUAGE PatternSynonyms #-}
module Foo (A(P)) where
data A = A
data B = B
pattern P :: () => (f ~ B) => f
pattern P = B
| ezyang/ghc | testsuite/tests/patsyn/should_fail/poly-export-fail2.hs | bsd-3-clause | 128 | 0 | 9 | 33 | 58 | 35 | 23 | 9 | 0 |
{-# OPTIONS -XMultiParamTypeClasses -XFunctionalDependencies -XFlexibleInstances #-}
module HaskellBug where
data Relation c -- The basic Relation
= Rel { relnm :: String -- The name of the relation
, relsrc :: c -- Source concept
, reltrg :: c -- ^Target concept
}
deriving Eq
-- This declaration is ok; should not get an error here
class (Eq concept)=> Association rel concept | rel -> concept where
source, target :: rel -> concept
-- e.g. Declaration Concept -> Concept
sign :: rel -> (concept,concept)
sign x = (source x,target x)
homogeneous :: rel -> Bool
homogeneous s = source s == target s
instance (Eq c)=>Association (Relation c) c where
source = relsrc
target = reltrg
-- This declaration has a kind error
-- The error should be reported here
class (Eq c, Association r c) => Morphic r c where
multiplicities :: r c -> [c]
multiplicities _ = []
| urbanslug/ghc | testsuite/tests/typecheck/should_fail/T4875.hs | bsd-3-clause | 998 | 0 | 8 | 290 | 231 | 131 | 100 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Language.Futhark.TypeChecker.TypesTests (tests) where
import Data.Bifunctor (first)
import qualified Data.Map as M
import Futhark.FreshNames
import Futhark.Util.Pretty (prettyOneLine)
import Language.Futhark
import Language.Futhark.Semantic
import Language.Futhark.SyntaxTests ()
import Language.Futhark.TypeChecker (initialEnv)
import Language.Futhark.TypeChecker.Monad
import Language.Futhark.TypeChecker.Types
import Test.Tasty
import Test.Tasty.HUnit
evalTest :: TypeExp Name -> ([VName], StructRetType) -> TestTree
evalTest te expected =
testCase (pretty te) $
case fmap (extract . fst) (run (checkTypeExp te)) of
Left e -> assertFailure $ "Failed: " <> pretty e
Right actual ->
actual @?= expected
where
extract (_, svars, t, _) = (svars, t)
run = snd . runTypeM env mempty (mkInitialImport "") blankNameSource
-- We hack up an environment with some predefined type
-- abbreviations for testing. This is all pretty sensitive to the
-- specific unique names, so we have to be careful!
env =
initialEnv
{ envTypeTable =
M.fromList
[ ( "square_1000",
TypeAbbr
Unlifted
[TypeParamDim "n_1001" mempty]
"[n_1001][n_1001]i32"
),
( "fun_1100",
TypeAbbr
Lifted
[ TypeParamType Lifted "a_1101" mempty,
TypeParamType Lifted "b_1102" mempty
]
"a_1101 -> b_1102"
)
]
<> envTypeTable initialEnv,
envNameMap =
M.fromList
[ ((Type, "square"), "square_1000"),
((Type, "fun"), "fun_1100")
]
<> envNameMap initialEnv
}
evalTests :: TestTree
evalTests =
testGroup
"Type expression elaboration"
[ evalTest
"[]i32"
([], "?[d_0].[d_0]i32"),
evalTest
"[][]i32"
([], "?[d_0][d_1].[d_0][d_1]i32"),
evalTest
"bool -> []i32"
([], "bool -> ?[d_0].[d_0]i32"),
evalTest
"bool -> []f32 -> []i32"
(["d_0"], "bool -> [d_0]f32 -> ?[d_1].[d_1]i32"),
evalTest
"([]i32,[]i32)"
([], "?[d_0][d_1].([d_0]i32, [d_1]i32)"),
evalTest
"{a:[]i32,b:[]i32}"
([], "?[d_0][d_1].{a:[d_0]i32, b:[d_1]i32}"),
evalTest
"?[n].[n][n]bool"
([], "?[n_0].[n_0][n_0]bool"),
evalTest
"([]i32 -> []i32) -> bool -> []i32"
(["d_0"], "([d_0]i32 -> ?[d_1].[d_1]i32) -> bool -> ?[d_2].[d_2]i32"),
evalTest
"((k: i64) -> [k]i32 -> [k]i32) -> []i32 -> bool"
(["d_1"], "((k_0: i64) -> [k_0]i32 -> [k_0]i32) -> [d_1]i32 -> bool"),
evalTest
"square [10]"
([], "[10][10]i32"),
evalTest
"square []"
([], "?[d_0].[d_0][d_0]i32"),
evalTest
"bool -> square []"
([], "bool -> ?[d_0].[d_0][d_0]i32"),
evalTest
"(k: i64) -> square [k]"
([], "(k_0: i64) -> [k_0][k_0]i32"),
evalTest
"fun i32 bool"
([], "i32 -> bool"),
evalTest
"fun ([]i32) bool"
([], "?[d_0].[d_0]i32 -> bool"),
evalTest
"fun bool ([]i32)"
([], "?[d_0].bool -> [d_0]i32"),
evalTest
"bool -> fun ([]i32) bool"
([], "bool -> ?[d_0].[d_0]i32 -> bool"),
evalTest
"bool -> fun bool ([]i32)"
([], "bool -> ?[d_0].bool -> [d_0]i32")
]
substTest :: M.Map VName (Subst StructRetType) -> StructRetType -> StructRetType -> TestTree
substTest m t expected =
testCase (pretty_m <> ": " <> prettyOneLine t) $
applySubst (`M.lookup` m) t @?= expected
where
pretty_m = prettyOneLine $ map (first prettyName) $ M.toList m
-- Some of these tests may be a bit fragile, in that they depend on
-- internal renumbering, which can be arbitrary.
substTests :: TestTree
substTests =
testGroup
"Type substitution"
[ substTest m0 "t_0" "i64",
substTest m0 "[1]t_0" "[1]i64",
substTest m0 "?[n_10].[n_10]t_0" "?[n_10].[n_10]i64",
--
substTest m1 "t_0" "?[n_1].[n_1]bool",
substTest m1 "f32 -> t_0" "f32 -> ?[n_1].[n_1]bool",
substTest m1 "f32 -> f64 -> t_0" "f32 -> f64 -> ?[n_1].[n_1]bool",
substTest m1 "f32 -> t_0 -> bool" "?[n_1].f32 -> [n_1]bool -> bool",
substTest m1 "f32 -> t_0 -> t_0" "?[n_1].f32 -> [n_1]bool -> ?[n_2].[n_2]bool"
]
where
m0 =
M.fromList [("t_0", Subst [] "i64")]
m1 =
M.fromList [("t_0", Subst [] "?[n_1].[n_1]bool")]
tests :: TestTree
tests = testGroup "Basic type operations" [evalTests, substTests]
| HIPERFIT/futhark | unittests/Language/Futhark/TypeChecker/TypesTests.hs | isc | 4,838 | 0 | 15 | 1,483 | 990 | 550 | 440 | 126 | 2 |
module Update where
import Config
import File
import Nemo
import UpdateApi
update :: Nemo FilePath File -> Nemo FilePath File
update = Nemo.update (moveToConfigDir makeClone)
moveToConfigDir :: (Nemo String File -> String -> (String, File))
-> Nemo String File -> String -> (String, File)
moveToConfigDir fn nemo s =
(id, replaceSubdirectoryPart configDir file)
where
(id, file) = fn nemo s
| larioj/nemo | src/Update.hs | mit | 467 | 0 | 9 | 136 | 143 | 77 | 66 | 12 | 1 |
{--
- Problem 56
(**) Symmetric binary trees
Let us call a binary tree symmetric if you can draw a vertical line through the root node and then the
right subtree is the mirror image of the left subtree. Write a predicate symmetric/1 to check whether a
given binary tree is symmetric.
Hint: Write a predicate mirror/2 first to check whether one tree is the
mirror image of another. We are only interested in the structure, not in the contents of the nodes.
Example in Haskell:
*Main> symmetric (Branch 'x' (Branch 'x' Empty Empty) Empty)
False
*Main> symmetric (Branch 'x' (Branch 'x' Empty Empty) (Branch 'x' Empty Empty))
True
--}
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving (Show, Eq)
symmetric :: Tree a -> Bool
symmetric Empty = True
symmetric (Branch _ l r) = isMirror l r where
isMirror Empty Empty = True
isMirror (Branch _ lx rx) (Branch _ ly ry) = (isMirror lx ry) && (isMirror ly rx)
isMirror _ _ = False
| sighingnow/Functional-99-Problems | Haskell/56.hs | mit | 1,015 | 0 | 9 | 258 | 157 | 80 | 77 | 7 | 3 |
module StarStats.Parsers.ZNC where
import Control.Applicative((<*>), (<$>), (*>), (<*))
import Text.Parsec
import Text.Parsec.String
import qualified Text.Parsec.Token as P
import Text.Parsec.Language (emptyDef)
import Data.Functor.Identity
import Data.Time.LocalTime
import Data.Maybe
import Data.List(intersperse)
import StarStats.Time
import StarStats.Parsers.Common
parseDataLine :: Parser DataLine
parseDataLine = try (parseTimeChange) <|> parseChatLine
parseChatLine :: Parser DataLine
parseChatLine = try parseAction
<|> try parseStatus
<|> try parseInvite
<|> try parseMessage
<|> try parseBad
<|> parseTopic
parsePrefix :: String -> Parser String
parsePrefix s = symbol "--- " *> symbol s *> symbol ":"
parseTimeChange :: Parser DataLine
parseTimeChange = try (Open <$> parseLogDate)
<|> (Close <$> parseLogDate)
parseLogDate :: Parser LocalTime
parseLogDate = do
time <- parseFullTime <* parsePrefix "log"
<* parseNick --started/ended
<* many (noneOf "/") --channelname
<* char '/'
date <- parseDate
return $ makeTime date time
--- quit: Overflow ("
guessYear :: Int -> Integer
guessYear year
--irc didnt exist before the 80s i dont think...
| year < 80 = fromIntegral $ 2000 + year
| otherwise = fromIntegral $ 1900 + year
parseDate :: Parser (Integer, Int, Int)
parseDate = (,,) <$> (guessYear <$> (parseInt <* symbol "."))
<*> (parseInt <* symbol ".")
<*> parseInt
parseInvite :: Parser DataLine
parseInvite = Invite <$> parseTime
<*> ((symbol "!") *> eatLine)
parseStatus :: Parser DataLine
parseStatus = try parseQuit
<|> try parsePart
<|> try parseJoin
<|> try parseMode
<|> try parseNickChange
<|> parseKick
parseBad :: Parser DataLine
parseBad =
Bad <$> (parseTime *> badInner *> eatLine)
where badInner = try (parsePrefix "topic" *> char '\'' *> return "")
<|> try (parsePrefix "topic" *> symbol "set by ")
<|> try (symbol "***")
<|> try (char '-' *> noneOf "-" *> return "")
<|> parsePrefix "names"
parseJoin :: Parser DataLine
parseJoin =
Join <$> parseTime
<*> (parsePrefix "join" *> parseNick <* eatLine)
parseQuit :: Parser DataLine
parseQuit =
Quit <$> parseTime
<*> (parsePrefix "quit" *> parseNick)
<*> (symbol "(" *> many (noneOf ")") <* (symbol ")"))
parsePart :: Parser DataLine
parsePart =
Part <$> parseTime
<*> (parsePrefix "part" *> parseNick)
<*> (symbol "left #" *> parseNick *> return "")
parseMode :: Parser DataLine
parseMode = Mode <$> parseTime
<*> (parsePrefix "mode" *> eatLine)
extractLongestQuote :: String -> (String,String)
extractLongestQuote xs =
(\(x, y) -> (reverse x, y)) $ helper xs []
where helper (x:xs) acc =
if elem '\'' xs
then helper xs (x:acc)
else (acc, xs)
stripBy :: String -> String
stripBy (' ':'b':'y':' ':xs) = xs
stripBy s = s
parseTopic :: Parser DataLine
parseTopic = do
time <- parseTime <* parsePrefix "topic" <* symbol "set to"
contents <- char '\'' *> eatLine
let (topic, rest) = extractLongestQuote contents
let name = stripBy rest
return $ Topic time name topic
parseKick :: Parser DataLine
parseKick = Kick <$> parseTime
<*> (parsePrefix "kick" *> parseNick)
<*> (symbol "was kicked by" *> parseNick)
<*> eatLine --todo, strip parens
parseNick :: Parser Name
parseNick = many (noneOf " ") <* whiteSpace
parseNickChange :: Parser DataLine
parseNickChange = Nick <$> parseTime
<*> (parsePrefix "nick" *> parseNick)
<*> (symbol "->" *> parseNick)
parseAction :: Parser DataLine
parseAction = Message <$> parseTime
<*> return 1
<*> (symbol "*" *> parseNick)
<*> eatLine
parseMessage :: Parser DataLine
parseMessage = Message <$> parseTime
<*> return 0
<*> (string "<" *> many (noneOf ">") <* symbol ">")
<*> eatLine
parseFullTime :: Parser (Int,Int,Int)
parseFullTime = (,,) <$> (parseInt <* symbol ":")
<*> (parseInt <* symbol ":")
<*> parseInt
parseTime :: Parser Time
parseTime = do
(h, m, _) <- parseFullTime
return (h, m)
parseLine :: String -> Either DbParseError [DataLine]
parseLine s =
case parse parseDataLine "" s of
Left err -> Left (DbParseError s (show err))
Right success -> Right [success]
| deweyvm/starstats | src/StarStats/Parsers/ZNC.hs | mit | 4,970 | 0 | 15 | 1,596 | 1,466 | 753 | 713 | 128 | 2 |
-- arith4.hs
module Arith4 where
-- id :: a -> a
-- id x = x
roundTrip :: ( Show a, Read a ) => a -> a
roundTrip a = read (show a)
-- point free
roundTripPF :: ( Show a, Read a ) => a -> a
roundTripPF = read . show
-- point free general
roundTripPFG :: ( Show a, Read b ) => a -> b
roundTripPFG = read . show
main = do
print (roundTrip 4)
print (roundTripPF 4)
print ( (roundTripPFG 4) :: Double )
print (id 4)
| Lyapunov/haskell-programming-from-first-principles | chapter_7/arith4.hs | mit | 424 | 0 | 10 | 107 | 175 | 92 | 83 | 12 | 1 |
------------------------------------------------------------------------------
-- Error: monad for representing errors in the Apollo language
------------------------------------------------------------------------------
module Error (
ApolloError (..)
, ThrowsError
, trapError
, extractValue
) where
import Control.Monad.Error (Error(noMsg,strMsg), MonadError, catchError)
import Type
type ThrowsError = Either ApolloError
data ApolloError
= TypeMismatch String Type Type
| TypeDMismatch Type Type
| TypeUMismatch String Type
| TypeRMismatch String Type Type
| TypeArgCount String Int Int
| TypeArgMismatch String [Type] [Type]
| TypeExcept String
| UnboundVar String String
| RedefVar String
| ParseErr String
| DivByZero
| Default String
instance Error ApolloError where
noMsg = Default "an error has occured"
strMsg = Default
instance Show ApolloError where
show (TypeMismatch op a b) = "Type error: " ++ show a ++ " and " ++ show b ++ " are wrong operand types for `" ++ op ++ "`"
show (TypeUMismatch op a) = "Type error: " ++ show a ++ " is wrong operand type for unary `" ++ op ++ "`"
show (TypeDMismatch a b) = "Type error: definition of " ++ show a ++ ", but assigned to " ++ show b
show (TypeRMismatch f a b) = "Type error: `" ++ f ++ "` defined with return-type of " ++ show a ++ ", but actual return-type is " ++ show b
show (TypeExcept msg) = "Type error: " ++ show msg
show (TypeArgCount f a b) = "Type error: for `" ++ f ++ "` expected " ++ show a ++ " arguments; received " ++ show b
show (TypeArgMismatch f a b) = "Type error: for `" ++ f ++ "` expected aguments of type (" ++ showArgs a ++ "); received (" ++ showArgs b ++ ")"
show (UnboundVar action var) = action ++ " an unbound variable: " ++ var
show (RedefVar var) = "Multiple declaration: redefining variable " ++ var
show (ParseErr val) = "Parse error: unexpected " ++ val
show (DivByZero) = "Zero-division error: division or modulo by zero"
show (Default msg) = msg
showArgs :: [Type] -> String
showArgs = init . init . concatMap ((++", ") . show)
trapError :: (MonadError e m, Show e) => m String -> m String
trapError action = catchError action (return . show)
extractValue :: Either ApolloError a -> a
extractValue (Right val) = val
extractValue (Left _) = error $ "bug: extractValue called with Left"
| apollo-lang/apollo | src/Error.hs | mit | 2,413 | 0 | 12 | 519 | 675 | 353 | 322 | 44 | 1 |
------------------------------------------------------------------------------
-- | Defined to allow the constructor of 'MediaType' to be exposed to tests.
module Network.HTTP.Media.MediaType.Internal
( MediaType (..)
, Parameters
) where
import qualified Data.ByteString.Char8 as BS
import qualified Data.CaseInsensitive as CI
import qualified Data.Map as Map
import Control.Monad (foldM, guard)
import Data.ByteString (ByteString)
import Data.CaseInsensitive (CI, original)
import Data.Map (Map)
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.String (IsString (..))
import Network.HTTP.Media.Accept (Accept (..))
import Network.HTTP.Media.RenderHeader (RenderHeader (..))
import Network.HTTP.Media.Utils (breakChar, trimBS)
------------------------------------------------------------------------------
-- | An HTTP media type, consisting of the type, subtype, and parameters.
data MediaType = MediaType
{ mainType :: CI ByteString -- ^ The main type of the MediaType
, subType :: CI ByteString -- ^ The sub type of the MediaType
, parameters :: Parameters -- ^ The parameters of the MediaType
} deriving (Eq, Ord)
instance Show MediaType where
show = BS.unpack . renderHeader
instance IsString MediaType where
fromString str = flip fromMaybe (parseAccept $ BS.pack str) $
error $ "Invalid media type literal " ++ str
instance Accept MediaType where
parseAccept bs = do
(s, ps) <- uncons (map trimBS (BS.split ';' bs))
(a, b) <- breakChar '/' s
guard $ not (BS.null a || BS.null b) && (a /= "*" || b == "*")
ps' <- foldM insert Map.empty ps
return $ MediaType (CI.mk a) (CI.mk b) ps'
where
uncons [] = Nothing
uncons (a : b) = Just (a, b)
both f (a, b) = (f a, f b)
insert ps =
fmap (flip (uncurry Map.insert) ps . both CI.mk) . breakChar '='
matches a b
| mainType b == "*" = params
| subType b == "*" = mainType a == mainType b && params
| otherwise = main && sub && params
where
main = mainType a == mainType b
sub = subType a == subType b
params = Map.null (parameters b) || parameters a == parameters b
moreSpecificThan a b = (a `matches` b &&) $
mainType a == "*" && anyB && params ||
subType a == "*" && (anyB || subB && params) ||
anyB || subB || params
where
anyB = mainType b == "*"
subB = subType b == "*"
params = not (Map.null $ parameters a) && Map.null (parameters b)
hasExtensionParameters _ = True
instance RenderHeader MediaType where
renderHeader (MediaType a b p) =
Map.foldrWithKey f (original a <> "/" <> original b) p
where
f k v = (<> ";" <> original k <> "=" <> original v)
------------------------------------------------------------------------------
-- | 'MediaType' parameters.
type Parameters = Map (CI ByteString) (CI ByteString)
| zmthy/http-media | src/Network/HTTP/Media/MediaType/Internal.hs | mit | 3,292 | 0 | 17 | 1,030 | 946 | 502 | 444 | 58 | 0 |
{-|
Module : Emit
Maintainer : Dominick DiRenzo
Emit C intermediate representation given AST
-}
module Emit ( compileAST, emitModule, emitAST ) where
import Lexer
import Parser
import Control.Monad
import Control.Monad.State
import Text.Printf (printf)
import qualified Data.Foldable as Fold
import qualified Data.List as List
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.Bimap as Bimap
type Id = Int
nextId :: Id -> Id
nextId i = i + 1
type Scope = Set.Set Expr
data Module = Module { globals :: Set.Set Expr
, lambdas :: Bimap.Bimap (Expr, [Expr]) Id
, constants :: Bimap.Bimap Expr Id
, main :: Block }
instance Show Module where
show m =
let showHelp l = concatMap (\x -> "\t" ++ (show x) ++ "\n") l
in "globals:\n" ++ (showHelp (Set.toList (globals m))) ++ "\n" ++
"lambdas:\n" ++ (showHelp (Bimap.toList (lambdas m))) ++ "\n" ++
"constants:\n" ++ (showHelp (Bimap.toList (constants m))) ++ "\n" ++
"main:\n" ++ (showHelp (main m))
data MyState = MyState { ids :: (Id, Id)
, scope :: (Scope, Scope)
, modl :: Module }
printf_const :: Id -> String
printf_const id = printf "__CONST_%d" id
printf_lambda id [] = printf "_lambda(__LAMBDA_%d, 0)" id
printf_lambda id env = printf "_lambda(__LAMBDA_%d, %d, %s)" id (List.length env) $ List.intercalate "," $ map (\(VAR x) -> x) env
compileExpr :: Expr -> State MyState String
compileExpr c@(CONST _) = state $ \s ->
let MyState { ids=(id0, id1), modl=m } = s
Module { constants=consts } = m
in case Bimap.lookup c consts of
Nothing -> (printf_const id0, s { ids=(nextId id0, id1), modl=m' })
where m' = m { constants = Bimap.insert c id0 consts }
Just id' -> (printf_const id', s)
compileExpr v@(VAR x) = state $ \s ->
let MyState { scope=(vars, outs) } = s
in if Set.member v vars
then (x, s)
else (x, s { scope=(vars, Set.insert v outs) })
compileExpr (LAMBDA arg block) = state $ \s ->
let MyState { scope=(vars0,outs0) } = s
init = s { scope=(Set.singleton arg, Set.empty) }
(block', s') = runState (compileBlock block) init
lambda = LAMBDA arg block'
MyState { ids=(id0,id1), scope=(vars1, outs1), modl=m } = s'
Module { lambdas=lambs } = m
outs2 = outs0 `Set.union` (outs1 `Set.difference` (Set.singleton arg))
env = Set.toList $ outs1 `Set.difference` (Set.singleton arg)
in case Bimap.lookup (lambda,env) lambs of
Nothing -> (printf_lambda id1 env, s' { ids=(id0, nextId id1), scope=(vars0,outs2), modl=m' })
where m' = m { lambdas = Bimap.insert (lambda, env) id1 lambs }
Just id' -> (printf_lambda id' env, s' { scope=(vars0,outs2) })
compileExpr (OPER binop) =
case binop of
ADD e1 e2 -> do a <- compileExpr e1
b <- compileExpr e2
return $ printf "_apply (get_attr(%s, \"__add__\"), %s)" a b
_ -> return $ ""
compileExpr (APPLY e1 e2) =
do a <- compileExpr e1
b <- compileExpr e2
return $ printf "_apply (%s, %s)" a b
compileExpr (ATTR expr id) = do
expr' <- compileExpr expr
return $ printf "get_attr (%s, \"%s\")" expr' id
compileExpr e = return ""
-- updateGlobals :: Expr -> State (Id, Scope, Scope, Module) ()
-- updateGlobals x = state $ \s ->
-- let (id, scope, outs, m) = s
-- gs = globals m
-- m' = m { globals = Set.insert x gs }
-- in ((), (id, scope, outs, m'))
updateScope :: Expr -> State MyState Bool
updateScope x = state $ \s ->
let MyState { scope=(vars, outs) } = s
in if Set.member x vars
then (True, s)
else (False, s { scope=(Set.insert x vars, outs) })
compileStmt :: Stmt -> State MyState Stmt
compileStmt NOP = return $ COMPILED ""
compileStmt (EXPR e) =
do a <- compileExpr e
return $ COMPILED $ printf "%s;" a
compileStmt (SET e1 e2) =
do b <- compileExpr e2
in_scope <- updateScope e1
let VAR x = e1
return $ COMPILED $ if in_scope
then printf "%s = %s;" x b
else printf "obj_t* %s = %s;" x b
-- compileStmt _ (SETATTR e1 e2) =
-- do a <- compileExpr e1
-- b <- compileExpr e2
-- let stmt = SETATTR a b
-- return stmt
-- compileStmt _ (IF e block _ ) =
-- do a <- compileExpr e
-- b <- compileBlock False block
-- let stmt = IF a b Nothing
-- return stmt
compileStmt (RETURN e) =
do a <- compileExpr e
return $ COMPILED $ printf "return %s;" a
compileBlock :: Block -> State MyState Block
compileBlock [] = return []
compileBlock (stmt:block) =
do stmt' <- compileStmt stmt
block' <- compileBlock block
return $ stmt':block'
compileAST :: AST -> Module
compileAST ast =
let empty_modl = Module { globals = Set.empty
, lambdas = Bimap.empty
, constants = Bimap.empty
, main = [] }
init_state = MyState { ids=(0,0)
, scope=(Set.empty, Set.empty)
, modl=empty_modl }
(main_, s) = runState (compileBlock ast) init_state
MyState { scope=(globals_, _), modl=modl_ } = s
in modl_ { globals=globals_, main=main_ }
printf_block :: Block -> String
printf_block block =
concatMap (\(COMPILED s) -> s ++ "\n") block
string_from_const :: Expr -> String
string_from_const (CONST (INT x)) = printf "_int(\"%s\")" x
string_from_const (CONST (STR x)) = printf "_str(\"%s\")" x
string_from_lambda :: ((Expr, [Expr]), Id) -> String
string_from_lambda ((LAMBDA (VAR arg) block, env), id) = printf
"obj_t *__LAMBDA_%d (size_t env_size, obj_t** env, size_t argc, obj_t **argv) {\n %s %s %s }\n"
id
(printf "obj_t* %s = *argv;\n" arg :: String)
(concatMap (\(VAR x) -> printf "obj_t* %s = *env; env++;\n" x) env :: String)
(printf_block block)
lambda_prototype :: ((Expr, [Expr]), Id) -> String
lambda_prototype (_, id) = printf "obj_t *__LAMBDA_%d (size_t env_size, obj_t** env, size_t argc, obj_t **argv);\n" id
emitModule :: Module -> String
emitModule m =
let consts_ = concatMap
(\(c, id) -> printf "%s = %s;\n" (printf_const id) (string_from_const c))
(Bimap.toList $ constants m) :: String
lambdas_ = concatMap
string_from_lambda
(Bimap.toList $ lambdas m) :: String
main_ = printf
"int main (int argc, char *argv[]) { %s %s return 0; }"
consts_
(printf_block (main m)) :: String
in printf "#include <clrs.h>\n %s %s %s %s"
(concatMap (\(_, id) -> "obj_t *" ++ printf_const id ++ ";\n") (Bimap.toList $ constants m))
(concatMap lambda_prototype (Bimap.toList $ lambdas m))
lambdas_
main_
--emitModule m =
-- |Given a list of statements (AST), output the C intermediate representation. This output will
-- be piped to the host systems C compiler
emitAST :: AST -> String
emitAST = emitModule . compileAST
| ddirenzo/clrs | haskell/Emit.hs | mit | 7,425 | 0 | 23 | 2,278 | 2,337 | 1,253 | 1,084 | 146 | 5 |
{-# LANGUAGE ViewPatterns #-}
module Tinc.GhcPkg (
PackageDb
, GhcPkg(..)
, listGlobalPackages
) where
import System.Process
import Tinc.Package
import Tinc.Types
data PackageDb
class (Functor m, Applicative m, Monad m) => GhcPkg m where
readGhcPkg :: [Path PackageDb] -> [String] -> m String
instance GhcPkg IO where
readGhcPkg (packageDbsToArgs -> packageDbs) args = do
readProcess "ghc-pkg" ("--no-user-package-db" : "--simple-output" : packageDbs ++ args) ""
listGlobalPackages :: GhcPkg m => m [Package]
listGlobalPackages = parsePackages <$> readGhcPkg [] ["list"]
where
parsePackages :: String -> [Package]
parsePackages = map parsePackage . words
packageDbsToArgs :: [Path PackageDb] -> [String]
packageDbsToArgs packageDbs = concatMap (\ packageDb -> ["--package-db", path packageDb]) packageDbs
| robbinch/tinc | src/Tinc/GhcPkg.hs | mit | 862 | 0 | 12 | 163 | 256 | 138 | 118 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module System.Directory.Watchman.Fields
( FileField(..)
, FileFieldLabel(..)
, renderFieldLabels
, parseFileFields
) where
import Control.Monad (forM)
import System.Directory.Watchman.WFilePath
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
import Data.Int (Int64)
import qualified Data.Map.Strict as M
import Data.Map.Strict (Map)
import qualified Data.Sequence as Seq
import System.Directory.Watchman.FileType
import System.Directory.Watchman.BSER
import System.Directory.Watchman.BSER.Parser
data FileField
= Fname WFilePath -- ^ the filename, relative to the watched root
| Fexists Bool -- ^ true if the file exists, false if it has been deleted
| Fcclock ByteString -- ^ the "created clock"; the clock value when we first observed the file, or the clock value when it last switched from @not exists@ to @exists@.
| Foclock ByteString -- ^ the "observed clock"; the clock value where we last observed some change in this file or its metadata.
| Fctime Int -- ^ last inode change time measured in integer seconds
| Fctime_ms Int -- ^ last inode change time measured in integer milliseconds
| Fctime_us Int64 -- ^ last inode change time measured in integer microseconds
| Fctime_ns Int64 -- ^ last inode change time measured in integer nanoseconds
| Fctime_f Double -- ^ last inode change time measured in floating point seconds
| Fmtime Int -- ^ modified time measured in integer seconds
| Fmtime_ms Int -- ^ modified time measured in integer milliseconds
| Fmtime_us Int64 -- ^ modified time measured in integer microseconds
| Fmtime_ns Int64 -- ^ modified time measured in integer nanoseconds
| Fmtime_f Double -- ^ modified time measured in floating point seconds
| Fsize Int64 -- ^ file size in bytes
| Fmode Int -- ^ file (or directory) mode expressed as a decimal integer
| Fuid Int -- ^ the owning uid
| Fgid Int -- ^ the owning gid
| Fino Int -- ^ the inode number
| Fdev Int -- ^ the device number
| Fnlink Int -- ^ number of hard links
| Fnew Bool -- ^ whether this entry is newer than the since generator criteria
| Ftype FileType -- ^ the file type
| Fsymlink_target (Maybe WFilePath) -- ^ the target of a symbolic link if the file is a symbolic link
deriving (Show, Eq, Ord)
data FileFieldLabel
= FLname -- ^ the filename, relative to the watched root
| FLexists -- ^ true if the file exists, false if it has been deleted
| FLcclock -- ^ the "created clock"; the clock value when we first observed the file, or the clock value when it last switched from !exists to exists.
| FLoclock -- ^ the "observed clock"; the clock value where we last observed some change in this file or its metadata.
| FLctime -- ^ last inode change time measured in integer seconds
| FLctime_ms -- ^ last inode change time measured in integer milliseconds
| FLctime_us -- ^ last inode change time measured in integer microseconds
| FLctime_ns -- ^ last inode change time measured in integer nanoseconds
| FLctime_f -- ^ last inode change time measured in floating point seconds
| FLmtime -- ^ modified time measured in integer seconds
| FLmtime_ms -- ^ modified time measured in integer milliseconds
| FLmtime_us -- ^ modified time measured in integer microseconds
| FLmtime_ns -- ^ modified time measured in integer nanoseconds
| FLmtime_f -- ^ modified time measured in floating point seconds
| FLsize -- ^ file size in bytes
| FLmode -- ^ file (or directory) mode expressed as a decimal integer
| FLuid -- ^ the owning uid
| FLgid -- ^ the owning gid
| FLino -- ^ the inode number
| FLdev -- ^ the device number
| FLnlink -- ^ number of hard links
| FLnew -- ^ whether this entry is newer than the since generator criteria
| FLtype -- ^ the file type
| FLsymlink_target -- ^ the target of a symbolic link if the file is a symbolic link
deriving (Show, Eq, Ord)
renderFileFieldLabel :: FileFieldLabel -> ByteString
renderFileFieldLabel FLname = "name"
renderFileFieldLabel FLexists = "exists"
renderFileFieldLabel FLcclock = "cclock"
renderFileFieldLabel FLoclock = "oclock"
renderFileFieldLabel FLctime = "ctime"
renderFileFieldLabel FLctime_ms = "ctime_ms"
renderFileFieldLabel FLctime_us = "ctime_us"
renderFileFieldLabel FLctime_ns = "ctime_ns"
renderFileFieldLabel FLctime_f = "ctime_f"
renderFileFieldLabel FLmtime = "mtime"
renderFileFieldLabel FLmtime_ms = "mtime_ms"
renderFileFieldLabel FLmtime_us = "mtime_us"
renderFileFieldLabel FLmtime_ns = "mtime_ns"
renderFileFieldLabel FLmtime_f = "mtime_f"
renderFileFieldLabel FLsize = "size"
renderFileFieldLabel FLmode = "mode"
renderFileFieldLabel FLuid = "uid"
renderFileFieldLabel FLgid = "gid"
renderFileFieldLabel FLino = "ino"
renderFileFieldLabel FLdev = "dev"
renderFileFieldLabel FLnlink = "nlink"
renderFileFieldLabel FLnew = "new"
renderFileFieldLabel FLtype = "type"
renderFileFieldLabel FLsymlink_target = "symlink_target"
parseFileField :: FileFieldLabel -> BSERValue -> Parser FileField
parseFileField FLname (BSERString s) = pure $ Fname (WFilePath s)
parseFileField FLname _ = fail "\"name\" field is not a string"
parseFileField FLexists (BSERBool b) = pure $ Fexists b
parseFileField FLexists _ = fail "\"exists\" field is not a boolean"
parseFileField FLcclock _ = error "TODO 32839423526"
parseFileField FLoclock _ = error "TODO 32839423526"
parseFileField FLctime int = case readBSERInt int of { Right n -> pure (Fctime n); Left err -> fail err }
parseFileField FLctime_ms int = case readBSERInt int of { Right n -> pure (Fctime_ms n); Left err -> fail err }
parseFileField FLctime_us int = case readBSERInt64 int of { Right n -> pure (Fctime_us n); Left err -> fail err }
parseFileField FLctime_ns int = case readBSERInt64 int of { Right n -> pure (Fctime_ns n); Left err -> fail err }
parseFileField FLctime_f (BSERReal r) = pure $ Fctime_f r
parseFileField FLctime_f _ = error "\"ctime_f\" field is not a real"
parseFileField FLmtime int = case readBSERInt int of { Right n -> pure (Fmtime n); Left err -> fail err }
parseFileField FLmtime_ms int = case readBSERInt int of { Right n -> pure (Fmtime_ms n); Left err -> fail err }
parseFileField FLmtime_us int = case readBSERInt64 int of { Right n -> pure (Fmtime_us n); Left err -> fail err }
parseFileField FLmtime_ns int = case readBSERInt64 int of { Right n -> pure (Fmtime_ns n); Left err -> fail err }
parseFileField FLmtime_f (BSERReal r) = pure $ Fmtime_f r
parseFileField FLmtime_f _ = error "\"mtime_f\" field is not a real"
parseFileField FLsize int = case readBSERInt64 int of { Right n -> pure (Fsize n); Left err -> fail err }
parseFileField FLmode int = case readBSERInt int of { Right n -> pure (Fmode n); Left err -> fail err }
parseFileField FLuid int = case readBSERInt int of { Right n -> pure (Fuid n); Left err -> fail err }
parseFileField FLgid int = case readBSERInt int of { Right n -> pure (Fgid n); Left err -> fail err }
parseFileField FLino int = case readBSERInt int of { Right n -> pure (Fino n); Left err -> fail err }
parseFileField FLdev int = case readBSERInt int of { Right n -> pure (Fdev n); Left err -> fail err }
parseFileField FLnlink int = case readBSERInt int of { Right n -> pure (Fnlink n); Left err -> fail err }
parseFileField FLnew (BSERBool b) = pure $ Fnew b
parseFileField FLnew _ = error "\"new\" field is not a boolean"
parseFileField FLtype (BSERString s) = pure (Ftype (fileTypeFromChar s))
parseFileField FLtype _ = error "\"type\" field is not a string"
parseFileField FLsymlink_target BSERNull = pure $ Fsymlink_target Nothing
parseFileField FLsymlink_target (BSERString s) = pure $ Fsymlink_target (Just (WFilePath s))
parseFileField FLsymlink_target _ = error "\"symlink_target\" field is not a string or null"
renderFieldLabels :: [FileFieldLabel] -> Map ByteString BSERValue
renderFieldLabels [] = error "Fields list is empty"
renderFieldLabels labels =
M.singleton "fields" (BSERArray (fmap (BSERString . renderFileFieldLabel) (Seq.fromList labels)))
parseFileFields :: [FileFieldLabel] -> BSERValue -> Parser [FileField]
parseFileFields [single] val = parseFileField single val >>= pure . (:[])
parseFileFields fileFieldLabels (BSERObject o) = do
forM fileFieldLabels $ \f -> do
v <- o .: (renderFileFieldLabel f)
parseFileField f v
parseFileFields _ _ = fail "Not an Object"
| bitc/hs-watchman | src/System/Directory/Watchman/Fields.hs | mit | 8,486 | 0 | 14 | 1,575 | 1,896 | 998 | 898 | 138 | 16 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distributive
-- Copyright : HASLAb team, University of Minho
-- License : MIT
--
-- Maintainer : Victor Miraldo <victor.cacciari@gmail.com>
-- Stability : provisional
-- Portability : portable
--
--This module provides a distributive law for Monads. It's a nice alternative
--to transformers (when you dont have them available).
----------------------------------------------------------------------------
module MMM.Core.Distributive (
Distributive(..)
, Lift(..)
, lft
, unlft
) where
import MMM.Core.Probability
import MMM.Qais
import Control.Monad
import Control.Monad.Free as F
import Data.Maybe
import MMM.Core.FuncComb
--------------------------------------------------------------------------------
-- * Distributive Law
-- |The distributive law on monads is a natural transformation,
-- providing, for each object, a function of type @ft -> tf@
class (Functor f, Functor t) => Distributive t f where
lambda :: f (t a) -> t (f a)
--------------------------------------------------------------------------------
-- ** Some Instances
-- If we have a monad @f@, @Maybe@ is trivially distributed through @f@
instance (Functor f, Monad f) => Distributive f Maybe where
lambda = maybe (return Nothing) (fmap Just)
-- Any monad distributes over lists, 'Control.Monad.sequence' is the
-- distributive law
instance (Functor f, Monad f) => Distributive f [] where
lambda = sequence
-- As we saw in the JLAMP14 paper, the free monads are also distributive.
instance (Functor f, Functor g, Distributive f g)
=> Distributive f (Free g) where
lambda = cataF (either (fmap Pure) (fmap Free . lambda))
-- We need some auxiliar definitions over the Free Monad
outF :: (Functor g) => Free g a -> Either a (g (Free g a))
outF (Pure a) = Left a
outF (Free f) = Right f
cataF :: (Functor g) => (Either a (g b) -> b) -> Free g a -> b
cataF gene = gene . (id -|- fmap (cataF gene)) . outF
--------------------------------------------------------------------------------
-- * Distributive Law
-- The only reason for using this constructor is
-- the hability to use standard monads, instead of
-- transformers, in our behaviour stack.
-- The best example is the Dist monad, we have no transformer
-- version of such monad yet, in the paper, we handle machines
-- with behaviour Dist . MaybeT; This constructor allows
-- us to handle them in Haskell too.
-- |We know that the lifting of a functor (monad) by a monad is equivalent to
-- the existence of a distributive law. The Lift object acts as a Monad Transformer
-- for monads with a distributive law.
data Lift :: (* -> *) -> (* -> *) -> * -> *
where
L :: (Monad t, Functor f) => t (f a) -> Lift t f a
unlft :: (Monad t, Distributive t f) => Lift t f a -> t (f a)
unlft (L f) = f
-- |We have to provide a corresponding lift function, like the MonadTrans class.
lft :: (Monad f, Monad t, Distributive t f) => f a -> Lift t f a
lft = L . return
-- |In fact, if we have two monads @t@ and @f@, such that @t@ and @f@ distribute,
-- then @tf@ is also a monad.
instance (Monad f, Monad t, Distributive t f)
=> Monad (Lift t f) where
return = L . return . return
(L x) >>= k = L $ x >>= fmap join . lambda . (>>= return . unlft . k)
-- |Lifting of functors is, obviously, a functor.
instance (Distributive t f, Monad t) => Functor (Lift t f) where
fmap f a = L $ fmap (fmap f) (unlft a)
-- |Lifting of Strong Monads remains a Strong Monad (in the base cateogry, Hask in
-- our case).
instance (Distributive t f, Strong f, Strong t) => Strong (Lift t f) where
rstr (x, b) = do a <- x; return (a, b)
lstr (b, x) = do a <- x; return (b, a)
--------------------------------------------------------------------------------
-- ** Routine Instances
-- |We might want to show lifted values.
instance (Show (t (f a))) => Show (Lift t f a) where
show (L f) = show f
-- |Or compare lifted values.
instance (Eq (t (f a))) => Eq (Lift t f a) where
(L x) == (L y) = x == y
--------------------------------------------------------------------------------
-- ** Testing
-- |Constant functor
data K a = K
deriving (Show, Ord, Eq)
instance Functor K where
fmap _ = const K
instance (Functor f, Monad f) => Distributive f K where
lambda _ = return K
-- |Maybe as a instance of the free monad for K
type FreeMaybe = Free K
nothing :: FreeMaybe a
nothing = Free K
-- |Totalization of sqrt using the FreeMaybe
sqrtTotal :: Float -> FreeMaybe Float
sqrtTotal n
| n < 0 = nothing
| otherwise = return $ sqrt n
-- |Lifting of sqrtTotal
sqrtFaulty :: Float -> Lift Dist FreeMaybe Float
sqrtFaulty = lft . sqrtTotal
-- |Faulty addition. Works 90% of the time, return m on the other 10%.
additionFaulty :: Float -> Float -> Lift Dist FreeMaybe Float
additionFaulty m = L . schoice 0.9 (add m) (err m)
where
add m n = return $ m + n
err m _ = return $ m
-- |Infix version
(.+) :: Float -> Float -> Lift Dist FreeMaybe Float
(.+) m n = additionFaulty m n
-- |Quadratic Solver
bhaskara :: Float -> Float -> Float -> Lift Dist FreeMaybe (Float, Float)
bhaskara a b c
= do
let d1 = b^2
let d2 = -4*a*c
dsqr <- (d1 .+ d2) >>= sqrtFaulty
let b' = -b
x1 <- b' .+ dsqr
x2 <- b' .+ (-dsqr)
return (x1 / (2*a), x2 / (2*a))
-- |Quadratic equation function
quadratic :: Float -> Float -> Float -> Float -> Float
quadratic a b c x = a * x^2 + b * x + c
| VictorCMiraldo/mmm | MMM/Core/Distributive.hs | mit | 5,835 | 0 | 12 | 1,292 | 1,572 | 837 | 735 | 83 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Partials.KeywordList (KeywordList(..)) where
import Control.Monad (forM_)
import GHC.Exts (fromString)
import Internal.FileDB
import Internal.Partial
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
-- | Partial "KeywordList" displays a sequence of <div class="chip">, one for each keyword.
data KeywordList = KeywordList
instance Partial_ KeywordList where
partialRoutes_ _ = []
partialRender_ = _partial
partialName_ _ = "keywordlist"
_partial :: KeywordList -> FileDB -> Page -> Params -> H.Html
_partial _ _ p _ = H.div ! A.class_ (fromString "keywordlist") $
forM_ (keywords p) ((H.div ! keywordClass) . H.toHtml)
keywordClass :: H.Attribute
keywordClass = A.class_ (fromString "chip")
| mayeranalytics/nanoPage | src/Partials/KeywordList.hs | mit | 933 | 0 | 11 | 233 | 227 | 130 | 97 | 19 | 1 |
module Tasks where
seqA :: Integer -> Integer
seqA 0 = 1
seqA 1 = 2
seqA n = helper 1 2 3 n
where
helper x y z 0 = x
helper x y z n = helper y z (z + y - 2 * x) (n - 1)
sum'n'count :: Integer -> (Integer, Integer)
sum'n'count x = (sum 0 (abs x), count 0 (abs x))
where
sum s 0 = s
sum s x = sum (s + mod x 10) (div x 10)
count 0 0 = 1
count c 0 = c
count c x = count (c + 1) (div x 10)
--integration' :: (Double -> Double) -> Double -> Double -> Double
--integration' f a b = helper f a b (abs (b - a) / 1000)
-- where
-- helper f a b limit
-- | abs (b - a) < limit = (f a + f b) * (b - a) / 2
-- | otherwise =
-- let
-- mid = (b + a) / 2
-- in helper f a mid limit +
-- helper f mid b limit
integration :: (Double -> Double) -> Double -> Double -> Double
integration f a b = let delta = abs $ (b - a) / 10000
in helper f a (a + delta) delta 0 10000
where
helper f a b delta acc 0 = acc + (f a + f b) * delta / 2
helper f a b delta acc n =
helper f b (b + delta) delta
(acc + (f a + f b) * delta / 2) (n - 1)
rootsDiff :: Double -> Double -> Double -> Double
rootsDiff a b c = let
(x1, x2) = roots a b c
in x2 - x1
roots :: Double -> Double -> Double -> (Double, Double)
roots a b c = (x1, x2)
where
d = sqrt $ b ^ 2 - 4 * a * c
x1 = (-b - d) / aTwise
x2 = (-b + d) / aTwise
aTwise = 2 * a
nTimes :: a -> Int -> [a]
nTimes x 0 = []
nTimes x n = helper x [] n
where
helper x xs 1 = x : xs
helper x xs n = helper x (x : xs) (n - 1)
groupElems :: Eq a => [a] -> [[a]]
groupElems [] = []
groupElems x = let
(begin, rest) = span (== head x) x
in begin : groupElems rest
sum3 :: Num a => [a] -> [a] -> [a] -> [a]
sum3 [] [] [] = []
sum3 x y [] = sum3 x y [0]
sum3 x [] z = sum3 x [0] z
sum3 [] y z = sum3 [0] y z
sum3 (x:xs) (y:ys) (z:zs) = (x + y + z) : sum3 xs ys zs
qsort :: Ord a => [a] -> [a]
qsort [] = []
qsort (x:xs) = qsort (filter (< x) xs) ++
[x] ++
qsort (filter (>= x) xs)
fibStream :: [Integer]
fibStream = f 0 1 where
f x y = x : f y (x + y)
data Odd = Odd Integer
deriving (Eq, Show)
instance Enum Odd where
toEnum x = Odd $ toInteger $2 * x + 1
fromEnum (Odd x) = div (fromInteger x) 2
succ (Odd x) = Odd (x + 2)
pred (Odd x) = Odd (x - 2)
enumFrom (Odd x) =
map Odd $
filter odd $
enumFrom (toInteger x)
enumFromThen (Odd x) (Odd y) =
map Odd $
filter odd $
enumFromThen (toInteger x) (toInteger y)
enumFromTo (Odd x) (Odd y) =
map Odd $
filter odd $
enumFromTo (toInteger x) (toInteger y)
enumFromThenTo (Odd x) (Odd y) (Odd z) =
map Odd $
filter odd $
enumFromThenTo (toInteger x) (toInteger y) (toInteger z)
data Bit = Zero | One deriving (Show)
data Sign = Minus | Plus deriving (Eq, Show)
data Z = Z Sign [Bit] deriving Show
add :: Z -> Z -> Z
add z1 z2 = toZ $ fromZ z1 + fromZ z2
fromZ :: Z -> Integer
fromZ (Z s x) | s == Minus = (-1) * f x
| otherwise = f x
where
f = foldr (\x rest -> i x + 2 * rest) 0
i One = 1
i _ = 0
toZ :: Integer -> Z
toZ x | x < 0 = Z Minus $ f $ abs x
| otherwise = Z Plus $ f x
where
f 0 = []
f x = i (mod x 2) : f (div x 2)
i 1 = One
i 0 = Zero
mul :: Z -> Z -> Z
mul z1 z2 = toZ $ fromZ z1 * fromZ z2
import Data.Char (isDigit)
import Data.Text (breakOn, pack, unpack)
import Data.List (find)
data Error = ParsingError |
IncompleteDataError |
IncorrectDataError String
deriving Show
data Person = Person {
firstName :: String,
lastName :: String,
age :: Int
} deriving Show
parsePerson :: String -> Either Error Person
parsePerson str = let pairs = map splitNameValue $ lines str in
case find (\(x, y) -> x == "" || y == "") pairs of
(Just _) -> Left ParsingError
Nothing ->
case lookup "firstName" pairs of
Nothing -> Left IncompleteDataError
(Just fn) ->
case lookup "lastName" pairs of
Nothing -> Left IncompleteDataError
(Just ln) ->
case lookup "age" pairs of
Nothing -> Left IncompleteDataError
(Just strAge) ->
case all isDigit strAge of
False -> Left $ IncorrectDataError strAge
True -> Right $ Person { firstName = fn, lastName = ln, age = read strAge }
splitNameValue :: String -> (String, String)
splitNameValue str = (unpack x, drop 3 $ unpack y) where
(x, y) = f str
f = breakOn (pack " = ") . pack {- --!!! (== " = ") -}
data Nat = Zero | Suc Nat
deriving Show
fromNat :: Nat -> Integer
fromNat Zero = 0
fromNat (Suc n) = fromNat n + 1
toNat :: Integer -> Nat
toNat 0 = Zero
toNat x = Suc $ toNat $ x - 1
add :: Nat -> Nat -> Nat
add x y = toNat $ fromNat x + fromNat y
mul :: Nat -> Nat -> Nat
mul x y = toNat $ fromNat x * fromNat y
fac :: Nat -> Nat
fac x = toNat $ f (fromNat x) 1
where
f 0 acc = acc
f n acc = f (n - 1) (n * acc) | FeodorM/some_code | haskell/stuff_from_stepic/tasks.hs | mit | 4,888 | 82 | 25 | 1,485 | 2,556 | 1,306 | 1,250 | -1 | -1 |
-- Watermelon
-- http://www.codewars.com/kata/55192f4ecd82ff826900089e
module Codewars.Kata.Watermelon where
divide :: Integer -> Bool
divide w | w <= 2 = False
| otherwise = even w
| gafiatulin/codewars | src/8 kyu/Watermelon.hs | mit | 193 | 0 | 8 | 36 | 48 | 25 | 23 | 4 | 1 |
module Main where
import Carter
import System.Environment
main :: IO ()
main = do
(file:_) <- getArgs
summed <- getAtBatsSum file
putStrLn $ "Total atBats was: " ++ (show summed)
| mjamesruggiero/carter | src/Main.hs | mit | 193 | 0 | 9 | 44 | 68 | 35 | 33 | 8 | 1 |
import Criterion.Main
import Data.Monoid
import Data.Text as T
import Melvin.Damn.Tablumps
import Prelude
main :: IO ()
main = defaultMain [
bgroup "test" [
bench "100" $ nf delump ("&b\t" <> T.replicate 10 "&" <> "&/b\t")
, bench "300" $ nf delump ("&b\t" <> T.replicate 30 "&" <> "&/b\t")
, bench "500" $ nf delump ("&b\t" <> T.replicate 50 "&" <> "&/b\t")
, bench "700" $ nf delump ("&b\t" <> T.replicate 70 "&" <> "&/b\t")
]
]
| pikajude/melvin | benchmarks/parser.hs | mit | 519 | 0 | 15 | 148 | 185 | 94 | 91 | 12 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UnicodeSyntax #-}
-- | This is a demonstration of using open recursion to implement the
-- judgements of a small theory modularly.
module Control.Monad.Trans.Open.Example
( -- * Syntax
Ty(..)
, Tm(..)
-- * Judgement Forms
, J(..)
-- * Theories
, Theory
, unitThy
, prodThy
, combinedThy
-- * Result
, judge
-- ** Injecting Effects
, traceThy
, Pack(..)
, tracedJudge
) where
import Control.Applicative
import Control.Monad.Open.Class
import Control.Monad.Trans.Open
import Control.Monad.Trans
import Control.Monad.Identity
import Control.Monad.Trans.Maybe
import Control.Monad.Writer
import Data.Monoid
-- | The syntax of types in our theory.
data Ty
= Unit
| Prod Ty Ty
deriving (Eq, Show)
-- | The syntax of terms in our theory.
--
data Tm
= Ax
| Pair Tm Tm
deriving (Eq, Show)
-- | Next, the forms of judgements are inductively defined. We index the
-- 'J' type by its synthesis.
--
-- ['DisplayTy'] To know @'DisplayTy' α@ is to know the textual notation for the type @α@.
-- ['DisplayTm'] To know @'DisplayTm' m@ is to know the textual notation for the term @m@.
-- ['Check'] To know @'Check' α m@ is to know that @m@ is a canonical verification of @α@.
--
data J a where
DisplayTy ∷ Ty → J String
DisplayTm ∷ Tm → J String
Check ∷ Ty → Tm → J Bool
deriving instance Show (J a)
-- | A @'Theory' j@ is an open, partial implementation of the judgements
-- defined by the judgement signature @j@. Theories may be composed, since
-- @'Monoid' ('Theory' j)@ holds.
--
type Theory j
= ( Monad m
, Alternative m
, MonadOpen (j a) a m
)
⇒ Op (j a) m a
-- | A 'Theory' implementing the judgements as they pertain to the 'Unit' type former.
--
unitThy ∷ Theory J
unitThy = Op $ \case
DisplayTy Unit → return "unit"
DisplayTm Ax → return "ax"
Check Unit m → return $ m == Ax
_ → empty
-- | A 'Theory' implementing the judgments as they pertain to the 'Prod' type former.
--
prodThy ∷ Theory J
prodThy = Op $ \case
DisplayTy (Prod a b) → do
x ← call $ DisplayTy a
y ← call $ DisplayTy b
return $ "(" ++ x ++ " * " ++ y ++ ")"
DisplayTm (Pair m n) → do
x ← call $ DisplayTm m
y ← call $ DisplayTm n
return $ "<" ++ x ++ ", " ++ y ++ ">"
Check (Prod a b) mn →
case mn of
Pair m n → (&&) <$> call (Check a m) <*> call (Check b n)
_ → return False
_ → empty
-- | The horizontal composition of the two above theories.
--
-- @
-- 'combinedThy' = 'unitThy' '<>' 'prodThy'
-- @
--
combinedThy ∷ Theory J
combinedThy = unitThy <> prodThy
-- | Judgements may be tested through the result of closing the theory.
--
-- @
-- 'judge' = 'close' 'combinedThy'
-- @
--
-- >>> judge $ DisplayTy $ Prod Unit (Prod Unit Unit)
-- "(unit * (unit * unit))"
--
-- >>> judge $ DisplayTm $ Pair Ax (Pair Ax Ax)
-- "<ax, <ax, ax>>"
--
-- >>> judge $ Check (Prod Unit Unit) Ax
-- False
--
-- >>> judge $ Check (Prod Unit (Prod Unit Unit)) (Pair Ax (Pair Ax Ax))
-- True
--
judge = close combinedThy
data Pack φ = forall a. Pack (φ a)
instance Show (Pack J) where
show (Pack j) = show j
-- | We can inject a log of all assertions into the theory!
--
traceThy
∷ ( Monad m
, Alternative m
, MonadOpen (j a) a m
, MonadWriter [Pack J] m
)
⇒ Op (J a) m a
traceThy = Op $ \j → do
tell $ [Pack j]
empty
-- | A traced judging routine is constructed by precomposing 'traceThy' onto the main theory.
--
-- @
-- 'tracedJudge' j = 'runIdentity' . 'runWriterT' . 'runMaybeT' $ ('close' $ 'traceThy' '<>' 'combinedThy') j
-- @
--
-- >>> tracedJudge $ Check (Prod Unit Unit) (Pair Ax Ax)
-- (Just True,[Check (Prod Unit Unit) (Pair Ax Ax),Check Unit Ax,Check Unit Ax])
--
-- >>> tracedJudge $ Check (Prod Unit Unit) (Pair Ax (Pair Ax Ax))
-- (Just False,[Check (Prod Unit Unit) (Pair Ax (Pair Ax Ax)),Check Unit Ax,Check Unit (Pair Ax Ax)])
--
tracedJudge ∷ J b → (Maybe b, [Pack J])
tracedJudge j = runIdentity . runWriterT . runMaybeT $ (close $ traceThy <> combinedThy) j
| jonsterling/hs-monad-open | src/Control/Monad/Trans/Open/Example.hs | mit | 4,277 | 0 | 16 | 928 | 875 | 493 | 382 | -1 | -1 |
{-| Module describing a node group.
-}
{-
Copyright (C) 2010 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Group
( Group(..)
, List
, AssocList
-- * Constructor
, create
, setIdx
) where
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Types as T
-- * Type declarations
-- | The node group type.
data Group = Group
{ name :: String -- ^ The node name
, uuid :: T.GroupID -- ^ The UUID of the group
, idx :: T.Gdx -- ^ Internal index for book-keeping
, allocPolicy :: T.AllocPolicy -- ^ The allocation policy for this group
} deriving (Show, Read, Eq)
-- Note: we use the name as the alias, and the UUID as the official
-- name
instance T.Element Group where
nameOf = uuid
idxOf = idx
setAlias = setName
setIdx = setIdx
allNames n = [name n, uuid n]
-- | A simple name for the int, node association list.
type AssocList = [(T.Gdx, Group)]
-- | A simple name for a node map.
type List = Container.Container Group
-- * Initialization functions
-- | Create a new group.
create :: String -> T.GroupID -> T.AllocPolicy -> Group
create name_init id_init apol_init =
Group { name = name_init
, uuid = id_init
, allocPolicy = apol_init
, idx = -1
}
-- This is used only during the building of the data structures.
setIdx :: Group -> T.Gdx -> Group
setIdx t i = t {idx = i}
-- | Changes the alias.
--
-- This is used only during the building of the data structures.
setName :: Group -> String -> Group
setName t s = t { name = s }
| ganeti/htools | Ganeti/HTools/Group.hs | gpl-2.0 | 2,335 | 0 | 9 | 599 | 322 | 198 | 124 | 32 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DefaultSignatures, DeriveFunctor, DeriveFoldable, DeriveTraversable, OverloadedStrings #-}
module Lib.Cmp
( ComparisonResult(..), Reasons
, Cmp(..)
, eqShow, eq
, cmpGetter, cmpGetterBy
) where
import Prelude.Compat hiding (show)
import Data.ByteString (ByteString)
import Data.Monoid((<>))
import Lib.Show (show)
type Reasons = [ByteString]
data ComparisonResult reason = NotEquals reason | Equals
deriving (Eq, Ord, Functor, Foldable, Traversable)
instance Monoid reason => Monoid (ComparisonResult reason) where
mempty = Equals
mappend (NotEquals x) (NotEquals y) = NotEquals (mappend x y)
mappend Equals x = x
mappend x Equals = x
class Cmp a where
cmp :: a -> a -> ComparisonResult Reasons
default cmp :: (Eq a, Show a) => a -> a -> ComparisonResult Reasons
cmp = eqShow
eqShow :: (Eq a, Show a) => a -> a -> ComparisonResult Reasons
eqShow x y
| x == y = Equals
| otherwise = NotEquals [show x <> " /= " <> show y]
eq :: Eq a => reason -> a -> a -> ComparisonResult reason
eq reason x y
| x == y = Equals
| otherwise = NotEquals reason
cmpGetterBy ::
(b -> b -> ComparisonResult Reasons) ->
ByteString -> (a -> b) -> a -> a -> ComparisonResult Reasons
cmpGetterBy f str getter x y =
map ((str <> ": ") <>) <$>
f (getter x) (getter y)
cmpGetter :: Cmp b => ByteString -> (a -> b) -> a -> a -> ComparisonResult Reasons
cmpGetter = cmpGetterBy cmp
| sinelaw/buildsome | src/Lib/Cmp.hs | gpl-2.0 | 1,452 | 0 | 11 | 294 | 556 | 291 | 265 | 39 | 1 |
-- -*-haskell-*-
-- Vision (for the Voice): an XMMS2 client.
--
-- Author: Oleg Belozeorov
-- Created: 4 Mar. 2009
--
-- Copyright (C) 2009-2010 Oleg Belozeorov
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation; either version 3 of
-- the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
--
{-# LANGUAGE Rank2Types, DeriveDataTypeable #-}
module Properties.Model
( lookup
, initModel
, WithModel
, withModel
, property
, propertyMap
, propertyStore
, getProperties
, setProperties
, propertiesGeneration
) where
import Prelude hiding (lookup)
import Control.Concurrent.STM
import Control.Concurrent.MVar
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Typeable
import Data.Env
import Graphics.UI.Gtk
import Config
import Registry
import Environment
import Properties.Property
data Ix = Ix deriving (Typeable)
data Model
= Model { _map :: MVar (Map String Property)
, _store :: ListStore Property
, _generation :: TVar Integer
}
deriving (Typeable)
type WithModel = ?_Properties_Model :: Model
propertyMap :: WithModel => MVar (Map String Property)
propertyMap = _map ?_Properties_Model
propertyStore :: WithModel => ListStore Property
propertyStore = _store ?_Properties_Model
propertiesGeneration :: WithModel => TVar Integer
propertiesGeneration = _generation ?_Properties_Model
withModel :: WithRegistry => (WithModel => IO a) -> IO a
withModel func = do
Just (Env model) <- getEnv (Extract :: Extract Ix Model)
let ?_Properties_Model = model
func
initModel :: (WithRegistry, WithEnvironment) => IO ()
initModel = do
model <- mkModel
let ?_Properties_Model = model
loadProperties
addEnv Ix model
mkModel :: IO Model
mkModel = do
map <- newMVar $ mkMap builtinProperties
store <- listStoreNewDND [] Nothing Nothing
generation <- newTVarIO 0
return Model { _map = map
, _store = store
, _generation = generation
}
property :: WithModel => String -> IO (Maybe Property)
property name = withMVar propertyMap $ return . Map.lookup name
loadProperties :: (WithEnvironment, WithModel) => IO ()
loadProperties = do
props <- config "properties.conf" []
mapM_ (listStoreAppend propertyStore) =<<
modifyMVar propertyMap
(\m ->
let m' = Map.union m $ mkMap props in
return (m', Map.elems m'))
getProperties :: WithModel => IO [Property]
getProperties =
withMVar propertyMap $ return . Map.elems
setProperties :: (WithEnvironment, WithModel) => [Property] -> IO ()
setProperties props = do
listStoreClear propertyStore
mapM_ (listStoreAppend propertyStore) =<<
modifyMVar propertyMap
(const $
let m = mkMap props in
return (m, Map.elems m))
writeConfig "properties.conf" props
atomically $ do
g <- readTVar propertiesGeneration
writeTVar propertiesGeneration $ g + 1
mkMap :: [Property] -> Map String Property
mkMap = Map.fromList . map (\p -> (propName p, p))
| upwawet/vision | src/Properties/Model.hs | gpl-3.0 | 3,403 | 0 | 17 | 725 | 853 | 444 | 409 | -1 | -1 |
module BackS
( backS
, backSpublic
)
-- check local rewrite closure:
-- for any state that produces reduct x z(y z)
-- check that it also produces redex S x y z.
-- if not, return empty automaton
where
import Set
import FiniteMap
import Stuff
import Options
import TA
import FAtypes
import Ids
import Reuse
import Monad ( guard )
-- | look for all matches of x z (y z)
backS :: Opts -> TNFA Int -> TNFA Int
backS opts a @ (TNFA cons all starts moves) =
let
-- this is a bit ugly
-- need to find the complete id information for the constructors
-- we hope they are there
ap = head [ con | con <- setToList cons, tconname con == "@" ]
s = head [ con | con <- setToList cons, tconname con == "S" ]
imoves = invert moves
reducts_without_redexes = do
t <- setToList all
[l, r] <- sons a ap t
[x, z] <- sons a ap l
[y, z'] <- sons a ap r
guard $ z == z' -- these are the two z's
guard $ null $ do
s <- setToList $ lookupset imoves $ mksterm s []
sx <- setToList $ lookupset imoves $ mksterm ap [s, x]
sxy <- setToList $ lookupset imoves $ mksterm ap [sx, y]
sxyz <- setToList $ lookupset imoves $ mksterm ap [sxy, z]
guard $ t == sxyz
return ()
return (t, x, y, z)
r = if null reducts_without_redexes
then a
else error $ unlines $ "missing redexes for" : map show reducts_without_redexes
in
trinfo opts "backS" r r
backSpublic :: Opts -> [ TNFA Int ] -> TNFA Int
backSpublic opts args =
if length args /= 1
then error "backSpublic.args"
else
let [arg1] = args
in backS opts arg1
-- later:
-- iterate the backS operation
-- making the automaton deterministic and minimal
-- before and after each step
-- until process converges
-- making determin. should ensure that the two z's really "are the same"
| jwaldmann/rx | src/BackS.hs | gpl-3.0 | 1,987 | 7 | 18 | 635 | 546 | 279 | 267 | 42 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-|
Module : DeadlinkDatabase
Description : Deadlink database management
Copyright : (c) Frédéric BISSON, 2016
License : GPL-3
Maintainer : zigazou@free.fr
Stability : experimental
Portability : POSIX
Functions dealing with Deadlink database management
-}
module Database.DeadlinkDatabase where
import Data.Text (Text)
import Data.FileEmbed(embedStringFile)
import Database.SQLite3 (open, exec, close)
createDeadlinkDB :: Text -> IO ()
createDeadlinkDB dbname = do
db <- open dbname
exec db $(embedStringFile "src/Database/createdb.sql")
close db
| Zigazou/deadlink | src/Database/DeadlinkDatabase.hs | gpl-3.0 | 611 | 0 | 10 | 100 | 97 | 51 | 46 | 10 | 1 |
module Main where
import Control.DeepSeq
import Control.Exception
import Control.Monad
import Data.ByteString as BS
import Jason
import System.Environment
main :: IO ()
main = do
args <- getArgs
let times = read (Prelude.head args) :: Int
bs <- BS.readFile "../fixtures/sample1.json"
replicateM_ times $ do
let (Just jVal) = parse bs
void $ evaluate $ force jVal
| TOSPIO/jason | benchmark/Sample1.hs | gpl-3.0 | 381 | 0 | 14 | 73 | 134 | 67 | 67 | 15 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Dhek.Utils
--
--------------------------------------------------------------------------------
module Dhek.Utils where
--------------------------------------------------------------------------------
import Data.Functor.Foldable
--------------------------------------------------------------------------------
-- | Finds a element that satisfy the predicate and deletes it from the list
-- > findDelete (== 2) [1,2,3] == (Just 2, [1,3])
-- > findDelete (== 4) [1,2,3] == (Nothing, [])
-- > findDelete (== 0) [] == (Nothing, [])
findDelete :: (a -> Bool) -> [a] -> (Maybe a, [a])
findDelete p
= para go where
go Nil = (Nothing, [])
go (Cons a (as, res))
| p a = (Just a, as)
| otherwise = let (r, rs) = res in (r, a : rs)
| cchantep/dhek | Dhek/Utils.hs | gpl-3.0 | 868 | 0 | 12 | 155 | 164 | 93 | 71 | 9 | 2 |
{-
Copyright (C) 2017-2018 defanor <defanor@uberspace.net>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{- |
Module : Text.Pandoc.Readers.Plain
Maintainer : defanor <defanor@uberspace.net>
Stability : unstable
Portability : portable
-}
{-# LANGUAGE OverloadedStrings #-}
module Text.Pandoc.Readers.Plain ( readPlain
, lineToInlines
) where
import Text.Pandoc.Definition
import Text.Pandoc.Class
import qualified Data.Text as T
-- | Translates a text line into a list of 'Inline' elements suitable
-- for further processing.
lineToInlines :: String -> [Inline]
lineToInlines [] = []
lineToInlines (' ':rest) = Space : lineToInlines rest
lineToInlines s = let (cur, next) = break (== ' ') s
in Str cur : lineToInlines next
-- | Reads plain text, always succeeding and producing a single
-- 'Plain' block.
readPlain :: PandocMonad m => T.Text -> m Pandoc
readPlain = pure . Pandoc mempty . pure . LineBlock
. map (\l -> (lineToInlines $ T.unpack l))
. T.lines . T.filter (/= '\r')
| defanor/pancake | Text/Pandoc/Readers/Plain.hs | gpl-3.0 | 1,655 | 0 | 14 | 353 | 221 | 122 | 99 | 15 | 1 |
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE TypeOperators #-}
module Data.Predicate.Product where
infixr 5 :::
-- | A data-type for combining results of predicate evaluations.
data a ::: b = a ::: b deriving (Eq, Show)
-- | @flip ($)@ - useful in combination with indexed access, e.g.
-- @('x' ::: True ::: False)#_2@ yields @True@.
(#) :: a -> (a -> b) -> b
(#) = flip ($)
hd :: a ::: b -> a
hd (a ::: _) = a
{-# INLINE hd #-}
tl :: a ::: b -> b
tl (_ ::: b) = b
{-# INLINE tl #-}
-----------------------------------------------------------------------------
-- Indexed access (except for last element)
_1 :: a ::: b -> a
_1 = _1'
{-# INLINE _1 #-}
_2 :: a ::: b ::: c -> b
_2 = hd . _2'
{-# INLINE _2 #-}
_3 :: a ::: b ::: c ::: d -> c
_3 = hd . _3'
{-# INLINE _3 #-}
_4 :: a ::: b ::: c ::: d ::: e -> d
_4 = hd . _4'
{-# INLINE _4 #-}
_5 :: a ::: b ::: c ::: d ::: e ::: f -> e
_5 = hd . _5'
{-# INLINE _5 #-}
_6 :: a ::: b ::: c ::: d ::: e ::: f ::: g -> f
_6 = hd . _6'
{-# INLINE _6 #-}
_7 :: a ::: b ::: c ::: d ::: e ::: f ::: g ::: h -> g
_7 = hd . _7'
{-# INLINE _7 #-}
_8 :: a ::: b ::: c ::: d ::: e ::: f ::: g ::: h ::: i -> h
_8 = hd . _8'
{-# INLINE _8 #-}
_9 :: a ::: b ::: c ::: d ::: e ::: f ::: g ::: h ::: i ::: j -> i
_9 = hd . _9'
{-# INLINE _9 #-}
-----------------------------------------------------------------------------
-- Access last element
_1' :: a ::: b -> a
_1' = hd
{-# INLINE _1' #-}
_2' :: a ::: b -> b
_2' = tl
{-# INLINE _2' #-}
_3' :: a ::: b ::: c -> c
_3' = tl . tl
{-# INLINE _3' #-}
_4' :: a ::: b ::: c ::: d -> d
_4' = tl . tl . tl
{-# INLINE _4' #-}
_5' :: a ::: b ::: c ::: d ::: e -> e
_5' = tl . tl . tl . tl
{-# INLINE _5' #-}
_6' :: a ::: b ::: c ::: d ::: e ::: f -> f
_6' = tl . tl . tl . tl . tl
{-# INLINE _6' #-}
_7' :: a ::: b ::: c ::: d ::: e ::: f ::: g -> g
_7' = tl . tl . tl . tl . tl . tl
{-# INLINE _7' #-}
_8' :: a ::: b ::: c ::: d ::: e ::: f ::: g ::: h -> h
_8' = tl . tl . tl . tl . tl . tl . tl
{-# INLINE _8' #-}
_9' :: a ::: b ::: c ::: d ::: e ::: f ::: g ::: h ::: i -> i
_9' = tl . tl . tl . tl . tl . tl . tl . tl
{-# INLINE _9' #-}
| twittner/wai-predicates | src/Data/Predicate/Product.hs | mpl-2.0 | 2,314 | 0 | 14 | 628 | 881 | 484 | 397 | 66 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ElasticTranscoder.Types
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
module Network.AWS.ElasticTranscoder.Types
(
-- * Service
ElasticTranscoder
-- ** Error
, JSONError
-- * PipelineOutputConfig
, PipelineOutputConfig
, pipelineOutputConfig
, pocBucket
, pocPermissions
, pocStorageClass
-- * CreateJobPlaylist
, CreateJobPlaylist
, createJobPlaylist
, cjpFormat
, cjpHlsContentProtection
, cjpName
, cjpOutputKeys
-- * Captions
, Captions
, captions
, cCaptionFormats
, cCaptionSources
, cMergePolicy
-- * AudioCodecOptions
, AudioCodecOptions
, audioCodecOptions
, acoProfile
-- * JobOutput
, JobOutput
, jobOutput
, joAlbumArt
, joCaptions
, joComposition
, joDuration
, joEncryption
, joHeight
, joId
, joKey
, joPresetId
, joRotate
, joSegmentDuration
, joStatus
, joStatusDetail
, joThumbnailEncryption
, joThumbnailPattern
, joWatermarks
, joWidth
-- * Job'
, Job'
, job
, jArn
, jId
, jInput
, jOutput
, jOutputKeyPrefix
, jOutputs
, jPipelineId
, jPlaylists
, jStatus
, jUserMetadata
-- * CaptionSource
, CaptionSource
, captionSource
, csEncryption
, csKey
, csLabel
, csLanguage
, csTimeOffset
-- * Artwork
, Artwork
, artwork
, aAlbumArtFormat
, aEncryption
, aInputKey
, aMaxHeight
, aMaxWidth
, aPaddingPolicy
, aSizingPolicy
-- * TimeSpan
, TimeSpan
, timeSpan
, tsDuration
, tsStartTime
-- * CreateJobOutput
, CreateJobOutput
, createJobOutput
, cjoAlbumArt
, cjoCaptions
, cjoComposition
, cjoEncryption
, cjoKey
, cjoPresetId
, cjoRotate
, cjoSegmentDuration
, cjoThumbnailEncryption
, cjoThumbnailPattern
, cjoWatermarks
-- * AudioParameters
, AudioParameters
, audioParameters
, apBitRate
, apChannels
, apCodec
, apCodecOptions
, apSampleRate
-- * Thumbnails
, Thumbnails
, thumbnails
, tAspectRatio
, tFormat
, tInterval
, tMaxHeight
, tMaxWidth
, tPaddingPolicy
, tResolution
, tSizingPolicy
-- * Encryption
, Encryption
, encryption
, eInitializationVector
, eKey
, eKeyMd5
, eMode
-- * JobAlbumArt
, JobAlbumArt
, jobAlbumArt
, jaaArtwork
, jaaMergePolicy
-- * JobWatermark
, JobWatermark
, jobWatermark
, jwEncryption
, jwInputKey
, jwPresetWatermarkId
-- * Pipeline
, Pipeline
, pipeline
, pArn
, pAwsKmsKeyArn
, pContentConfig
, pId
, pInputBucket
, pName
, pNotifications
, pOutputBucket
, pRole
, pStatus
, pThumbnailConfig
-- * Preset
, Preset
, preset
, p1Arn
, p1Audio
, p1Container
, p1Description
, p1Id
, p1Name
, p1Thumbnails
, p1Type
, p1Video
-- * CaptionFormat
, CaptionFormat
, captionFormat
, cfEncryption
, cfFormat
, cfPattern
-- * HlsContentProtection
, HlsContentProtection
, hlsContentProtection
, hcpInitializationVector
, hcpKey
, hcpKeyMd5
, hcpKeyStoragePolicy
, hcpLicenseAcquisitionUrl
, hcpMethod
-- * PresetWatermark
, PresetWatermark
, presetWatermark
, pwHorizontalAlign
, pwHorizontalOffset
, pwId
, pwMaxHeight
, pwMaxWidth
, pwOpacity
, pwSizingPolicy
, pwTarget
, pwVerticalAlign
, pwVerticalOffset
-- * Permission
, Permission
, permission
, pAccess
, pGrantee
, pGranteeType
-- * VideoParameters
, VideoParameters
, videoParameters
, vpAspectRatio
, vpBitRate
, vpCodec
, vpCodecOptions
, vpDisplayAspectRatio
, vpFixedGOP
, vpFrameRate
, vpKeyframesMaxDist
, vpMaxFrameRate
, vpMaxHeight
, vpMaxWidth
, vpPaddingPolicy
, vpResolution
, vpSizingPolicy
, vpWatermarks
-- * Playlist
, Playlist
, playlist
, p2Format
, p2HlsContentProtection
, p2Name
, p2OutputKeys
, p2Status
, p2StatusDetail
-- * Notifications
, Notifications
, notifications
, nCompleted
, nError
, nProgressing
, nWarning
-- * Clip
, Clip
, clip
, cTimeSpan
-- * JobInput
, JobInput
, jobInput
, jiAspectRatio
, jiContainer
, jiEncryption
, jiFrameRate
, jiInterlaced
, jiKey
, jiResolution
) where
import Network.AWS.Prelude
import Network.AWS.Signing
import qualified GHC.Exts
-- | Version @2012-09-25@ of the Amazon Elastic Transcoder service.
data ElasticTranscoder
instance AWSService ElasticTranscoder where
type Sg ElasticTranscoder = V4
type Er ElasticTranscoder = JSONError
service = service'
where
service' :: Service ElasticTranscoder
service' = Service
{ _svcAbbrev = "ElasticTranscoder"
, _svcPrefix = "elastictranscoder"
, _svcVersion = "2012-09-25"
, _svcTargetPrefix = Nothing
, _svcJSONVersion = Nothing
, _svcHandle = handle
, _svcRetry = retry
}
handle :: Status
-> Maybe (LazyByteString -> ServiceError JSONError)
handle = jsonError statusSuccess service'
retry :: Retry ElasticTranscoder
retry = Exponential
{ _retryBase = 0.05
, _retryGrowth = 2
, _retryAttempts = 5
, _retryCheck = check
}
check :: Status
-> JSONError
-> Bool
check (statusCode -> s) (awsErrorCode -> e)
| s == 400 && "ThrottlingException" == e = True -- Throttling
| s == 500 = True -- General Server Error
| s == 509 = True -- Limit Exceeded
| s == 503 = True -- Service Unavailable
| otherwise = False
data PipelineOutputConfig = PipelineOutputConfig
{ _pocBucket :: Maybe Text
, _pocPermissions :: List "Permissions" Permission
, _pocStorageClass :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'PipelineOutputConfig' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pocBucket' @::@ 'Maybe' 'Text'
--
-- * 'pocPermissions' @::@ ['Permission']
--
-- * 'pocStorageClass' @::@ 'Maybe' 'Text'
--
pipelineOutputConfig :: PipelineOutputConfig
pipelineOutputConfig = PipelineOutputConfig
{ _pocBucket = Nothing
, _pocStorageClass = Nothing
, _pocPermissions = mempty
}
-- | The Amazon S3 bucket in which you want Elastic Transcoder to save the
-- transcoded files. Specify this value when all of the following are true: You
-- want to save transcoded files, thumbnails (if any), and playlists (if any)
-- together in one bucket. You do not want to specify the users or groups who
-- have access to the transcoded files, thumbnails, and playlists. You do not
-- want to specify the permissions that Elastic Transcoder grants to the files.
-- You want to associate the transcoded files and thumbnails with the Amazon S3
-- Standard storage class. If you want to save transcoded files and playlists
-- in one bucket and thumbnails in another bucket, specify which users can
-- access the transcoded files or the permissions the users have, or change the
-- Amazon S3 storage class, omit OutputBucket and specify values for 'ContentConfig' and 'ThumbnailConfig' instead.
pocBucket :: Lens' PipelineOutputConfig (Maybe Text)
pocBucket = lens _pocBucket (\s a -> s { _pocBucket = a })
-- | Optional. The 'Permissions' object specifies which users and/or predefined
-- Amazon S3 groups you want to have access to transcoded files and playlists,
-- and the type of access you want them to have. You can grant permissions to a
-- maximum of 30 users and/or predefined Amazon S3 groups.
--
-- If you include 'Permissions', Elastic Transcoder grants only the permissions
-- that you specify. It does not grant full permissions to the owner of the role
-- specified by 'Role'. If you want that user to have full control, you must
-- explicitly grant full control to the user.
--
-- If you omit 'Permissions', Elastic Transcoder grants full control over the
-- transcoded files and playlists to the owner of the role specified by 'Role',
-- and grants no other permissions to any other user or group.
pocPermissions :: Lens' PipelineOutputConfig [Permission]
pocPermissions = lens _pocPermissions (\s a -> s { _pocPermissions = a }) . _List
-- | The Amazon S3 storage class, 'Standard' or 'ReducedRedundancy', that you want
-- Elastic Transcoder to assign to the video files and playlists that it stores
-- in your Amazon S3 bucket.
pocStorageClass :: Lens' PipelineOutputConfig (Maybe Text)
pocStorageClass = lens _pocStorageClass (\s a -> s { _pocStorageClass = a })
instance FromJSON PipelineOutputConfig where
parseJSON = withObject "PipelineOutputConfig" $ \o -> PipelineOutputConfig
<$> o .:? "Bucket"
<*> o .:? "Permissions" .!= mempty
<*> o .:? "StorageClass"
instance ToJSON PipelineOutputConfig where
toJSON PipelineOutputConfig{..} = object
[ "Bucket" .= _pocBucket
, "StorageClass" .= _pocStorageClass
, "Permissions" .= _pocPermissions
]
data CreateJobPlaylist = CreateJobPlaylist
{ _cjpFormat :: Maybe Text
, _cjpHlsContentProtection :: Maybe HlsContentProtection
, _cjpName :: Maybe Text
, _cjpOutputKeys :: List "OutputKeys" Text
} deriving (Eq, Read, Show)
-- | 'CreateJobPlaylist' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cjpFormat' @::@ 'Maybe' 'Text'
--
-- * 'cjpHlsContentProtection' @::@ 'Maybe' 'HlsContentProtection'
--
-- * 'cjpName' @::@ 'Maybe' 'Text'
--
-- * 'cjpOutputKeys' @::@ ['Text']
--
createJobPlaylist :: CreateJobPlaylist
createJobPlaylist = CreateJobPlaylist
{ _cjpName = Nothing
, _cjpFormat = Nothing
, _cjpOutputKeys = mempty
, _cjpHlsContentProtection = Nothing
}
-- | The format of the output playlist. Valid formats include 'HLSv3', 'HLSv4', and 'Smooth'.
cjpFormat :: Lens' CreateJobPlaylist (Maybe Text)
cjpFormat = lens _cjpFormat (\s a -> s { _cjpFormat = a })
-- | The HLS content protection settings, if any, that you want Elastic Transcoder
-- to apply to the output files associated with this playlist.
cjpHlsContentProtection :: Lens' CreateJobPlaylist (Maybe HlsContentProtection)
cjpHlsContentProtection =
lens _cjpHlsContentProtection (\s a -> s { _cjpHlsContentProtection = a })
-- | The name that you want Elastic Transcoder to assign to the master playlist,
-- for example, nyc-vacation.m3u8. If the name includes a '/' character, the
-- section of the name before the last '/' must be identical for all 'Name' objects.
-- If you create more than one master playlist, the values of all 'Name' objects
-- must be unique.
--
-- Note: Elastic Transcoder automatically appends the relevant file extension
-- to the file name ('.m3u8' for 'HLSv3' and 'HLSv4' playlists, and '.ism' and '.ismc' for 'Smooth' playlists). If you include a file extension in 'Name', the file name
-- will have two extensions.
cjpName :: Lens' CreateJobPlaylist (Maybe Text)
cjpName = lens _cjpName (\s a -> s { _cjpName = a })
-- | For each output in this job that you want to include in a master playlist,
-- the value of the 'Outputs:Key' object.
--
-- If your output is not 'HLS' or does not have a segment duration set, the
-- name of the output file is a concatenation of 'OutputKeyPrefix' and 'Outputs:Key':
--
-- OutputKeyPrefix'Outputs:Key'
--
-- If your output is 'HLSv3' and has a segment duration set, or is not included
-- in a playlist, Elastic Transcoder creates an output playlist file with a file
-- extension of '.m3u8', and a series of '.ts' files that include a five-digit
-- sequential counter beginning with 00000:
--
-- OutputKeyPrefix'Outputs:Key'.m3u8
--
-- OutputKeyPrefix'Outputs:Key'00000.ts
--
-- If your output is 'HLSv4', has a segment duration set, and is included in an 'HLSv4' playlist, Elastic Transcoder creates an output playlist file with a
-- file extension of '_v4.m3u8'. If the output is video, Elastic Transcoder also
-- creates an output file with an extension of '_iframe.m3u8':
--
-- OutputKeyPrefix'Outputs:Key'_v4.m3u8
--
-- OutputKeyPrefix'Outputs:Key'_iframe.m3u8
--
-- OutputKeyPrefix'Outputs:Key'.ts
--
-- Elastic Transcoder automatically appends the relevant file extension to
-- the file name. If you include a file extension in Output Key, the file name
-- will have two extensions.
--
-- If you include more than one output in a playlist, any segment duration
-- settings, clip settings, or caption settings must be the same for all outputs
-- in the playlist. For 'Smooth' playlists, the 'Audio:Profile', 'Video:Profile', and 'Video:FrameRate' to 'Video:KeyframesMaxDist' ratio must be the same for all outputs.
cjpOutputKeys :: Lens' CreateJobPlaylist [Text]
cjpOutputKeys = lens _cjpOutputKeys (\s a -> s { _cjpOutputKeys = a }) . _List
instance FromJSON CreateJobPlaylist where
parseJSON = withObject "CreateJobPlaylist" $ \o -> CreateJobPlaylist
<$> o .:? "Format"
<*> o .:? "HlsContentProtection"
<*> o .:? "Name"
<*> o .:? "OutputKeys" .!= mempty
instance ToJSON CreateJobPlaylist where
toJSON CreateJobPlaylist{..} = object
[ "Name" .= _cjpName
, "Format" .= _cjpFormat
, "OutputKeys" .= _cjpOutputKeys
, "HlsContentProtection" .= _cjpHlsContentProtection
]
data Captions = Captions
{ _cCaptionFormats :: List "CaptionFormats" CaptionFormat
, _cCaptionSources :: List "CaptionSources" CaptionSource
, _cMergePolicy :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'Captions' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cCaptionFormats' @::@ ['CaptionFormat']
--
-- * 'cCaptionSources' @::@ ['CaptionSource']
--
-- * 'cMergePolicy' @::@ 'Maybe' 'Text'
--
captions :: Captions
captions = Captions
{ _cMergePolicy = Nothing
, _cCaptionSources = mempty
, _cCaptionFormats = mempty
}
-- | The array of file formats for the output captions. If you leave this value
-- blank, Elastic Transcoder returns an error.
cCaptionFormats :: Lens' Captions [CaptionFormat]
cCaptionFormats = lens _cCaptionFormats (\s a -> s { _cCaptionFormats = a }) . _List
-- | Source files for the input sidecar captions used during the transcoding
-- process. To omit all sidecar captions, leave 'CaptionSources' blank.
cCaptionSources :: Lens' Captions [CaptionSource]
cCaptionSources = lens _cCaptionSources (\s a -> s { _cCaptionSources = a }) . _List
-- | A policy that determines how Elastic Transcoder handles the existence of
-- multiple captions.
--
-- MergeOverride: Elastic Transcoder transcodes both embedded and sidecar
-- captions into outputs. If captions for a language are embedded in the input
-- file and also appear in a sidecar file, Elastic Transcoder uses the sidecar
-- captions and ignores the embedded captions for that language.
--
-- MergeRetain: Elastic Transcoder transcodes both embedded and sidecar
-- captions into outputs. If captions for a language are embedded in the input
-- file and also appear in a sidecar file, Elastic Transcoder uses the embedded
-- captions and ignores the sidecar captions for that language. If 'CaptionSources'
-- is empty, Elastic Transcoder omits all sidecar captions from the output
-- files.
--
-- Override: Elastic Transcoder transcodes only the sidecar captions that you
-- specify in 'CaptionSources'.
--
-- 'MergePolicy' cannot be null.
cMergePolicy :: Lens' Captions (Maybe Text)
cMergePolicy = lens _cMergePolicy (\s a -> s { _cMergePolicy = a })
instance FromJSON Captions where
parseJSON = withObject "Captions" $ \o -> Captions
<$> o .:? "CaptionFormats" .!= mempty
<*> o .:? "CaptionSources" .!= mempty
<*> o .:? "MergePolicy"
instance ToJSON Captions where
toJSON Captions{..} = object
[ "MergePolicy" .= _cMergePolicy
, "CaptionSources" .= _cCaptionSources
, "CaptionFormats" .= _cCaptionFormats
]
newtype AudioCodecOptions = AudioCodecOptions
{ _acoProfile :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'AudioCodecOptions' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'acoProfile' @::@ 'Maybe' 'Text'
--
audioCodecOptions :: AudioCodecOptions
audioCodecOptions = AudioCodecOptions
{ _acoProfile = Nothing
}
-- | You can only choose an audio profile when you specify AAC for the value of
-- Audio:Codec.
--
-- Specify the AAC profile for the output file. Elastic Transcoder supports the
-- following profiles:
--
-- 'auto': If you specify 'auto', Elastic Transcoder will select the profile
-- based on the bit rate selected for the output file. 'AAC-LC': The most common
-- AAC profile. Use for bit rates larger than 64 kbps. 'HE-AAC': Not supported on
-- some older players and devices. Use for bit rates between 40 and 80 kbps. 'HE-AACv2': Not supported on some players and devices. Use for bit rates less than 48
-- kbps. All outputs in a 'Smooth' playlist must have the same value for 'Profile'.
--
-- If you created any presets before AAC profiles were added, Elastic
-- Transcoder automatically updated your presets to use AAC-LC. You can change
-- the value as required.
--
acoProfile :: Lens' AudioCodecOptions (Maybe Text)
acoProfile = lens _acoProfile (\s a -> s { _acoProfile = a })
instance FromJSON AudioCodecOptions where
parseJSON = withObject "AudioCodecOptions" $ \o -> AudioCodecOptions
<$> o .:? "Profile"
instance ToJSON AudioCodecOptions where
toJSON AudioCodecOptions{..} = object
[ "Profile" .= _acoProfile
]
data JobOutput = JobOutput
{ _joAlbumArt :: Maybe JobAlbumArt
, _joCaptions :: Maybe Captions
, _joComposition :: List "Composition" Clip
, _joDuration :: Maybe Integer
, _joEncryption :: Maybe Encryption
, _joHeight :: Maybe Int
, _joId :: Maybe Text
, _joKey :: Maybe Text
, _joPresetId :: Maybe Text
, _joRotate :: Maybe Text
, _joSegmentDuration :: Maybe Text
, _joStatus :: Maybe Text
, _joStatusDetail :: Maybe Text
, _joThumbnailEncryption :: Maybe Encryption
, _joThumbnailPattern :: Maybe Text
, _joWatermarks :: List "Watermarks" JobWatermark
, _joWidth :: Maybe Int
} deriving (Eq, Read, Show)
-- | 'JobOutput' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'joAlbumArt' @::@ 'Maybe' 'JobAlbumArt'
--
-- * 'joCaptions' @::@ 'Maybe' 'Captions'
--
-- * 'joComposition' @::@ ['Clip']
--
-- * 'joDuration' @::@ 'Maybe' 'Integer'
--
-- * 'joEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'joHeight' @::@ 'Maybe' 'Int'
--
-- * 'joId' @::@ 'Maybe' 'Text'
--
-- * 'joKey' @::@ 'Maybe' 'Text'
--
-- * 'joPresetId' @::@ 'Maybe' 'Text'
--
-- * 'joRotate' @::@ 'Maybe' 'Text'
--
-- * 'joSegmentDuration' @::@ 'Maybe' 'Text'
--
-- * 'joStatus' @::@ 'Maybe' 'Text'
--
-- * 'joStatusDetail' @::@ 'Maybe' 'Text'
--
-- * 'joThumbnailEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'joThumbnailPattern' @::@ 'Maybe' 'Text'
--
-- * 'joWatermarks' @::@ ['JobWatermark']
--
-- * 'joWidth' @::@ 'Maybe' 'Int'
--
jobOutput :: JobOutput
jobOutput = JobOutput
{ _joId = Nothing
, _joKey = Nothing
, _joThumbnailPattern = Nothing
, _joThumbnailEncryption = Nothing
, _joRotate = Nothing
, _joPresetId = Nothing
, _joSegmentDuration = Nothing
, _joStatus = Nothing
, _joStatusDetail = Nothing
, _joDuration = Nothing
, _joWidth = Nothing
, _joHeight = Nothing
, _joWatermarks = mempty
, _joAlbumArt = Nothing
, _joComposition = mempty
, _joCaptions = Nothing
, _joEncryption = Nothing
}
-- | The album art to be associated with the output file, if any.
joAlbumArt :: Lens' JobOutput (Maybe JobAlbumArt)
joAlbumArt = lens _joAlbumArt (\s a -> s { _joAlbumArt = a })
-- | You can configure Elastic Transcoder to transcode captions, or subtitles,
-- from one format to another. All captions must be in UTF-8. Elastic Transcoder
-- supports two types of captions:
--
-- Embedded: Embedded captions are included in the same file as the audio and
-- video. Elastic Transcoder supports only one embedded caption per language, to
-- a maximum of 300 embedded captions per file.
--
-- Valid input values include: 'CEA-608 (EIA-608', first non-empty channel only), 'CEA-708 (EIA-708', first non-empty channel only), and 'mov-text'
--
-- Valid outputs include: 'mov-text'
--
-- Elastic Transcoder supports a maximum of one embedded format per output.
--
-- Sidecar: Sidecar captions are kept in a separate metadata file from the
-- audio and video data. Sidecar captions require a player that is capable of
-- understanding the relationship between the video file and the sidecar file.
-- Elastic Transcoder supports only one sidecar caption per language, to a
-- maximum of 20 sidecar captions per file.
--
-- Valid input values include: 'dfxp' (first div element only), 'ebu-tt', 'scc', 'smpt',
-- 'srt', 'ttml' (first div element only), and 'webvtt'
--
-- Valid outputs include: 'dfxp' (first div element only), 'scc', 'srt', and 'webvtt'.
--
-- If you want ttml or smpte-tt compatible captions, specify dfxp as your
-- output format.
--
-- Elastic Transcoder does not support OCR (Optical Character Recognition),
-- does not accept pictures as a valid input for captions, and is not available
-- for audio-only transcoding. Elastic Transcoder does not preserve text
-- formatting (for example, italics) during the transcoding process.
--
-- To remove captions or leave the captions empty, set 'Captions' to null. To
-- pass through existing captions unchanged, set the 'MergePolicy' to 'MergeRetain',
-- and pass in a null 'CaptionSources' array.
--
-- For more information on embedded files, see the Subtitles Wikipedia page.
--
-- For more information on sidecar files, see the Extensible Metadata Platform
-- and Sidecar file Wikipedia pages.
joCaptions :: Lens' JobOutput (Maybe Captions)
joCaptions = lens _joCaptions (\s a -> s { _joCaptions = a })
-- | You can create an output file that contains an excerpt from the input file.
-- This excerpt, called a clip, can come from the beginning, middle, or end of
-- the file. The Composition object contains settings for the clips that make up
-- an output file. For the current release, you can only specify settings for a
-- single clip per output file. The Composition object cannot be null.
joComposition :: Lens' JobOutput [Clip]
joComposition = lens _joComposition (\s a -> s { _joComposition = a }) . _List
-- | Duration of the output file, in seconds.
joDuration :: Lens' JobOutput (Maybe Integer)
joDuration = lens _joDuration (\s a -> s { _joDuration = a })
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your output files. If you choose to use encryption, you must specify a mode
-- to use. If you choose not to use encryption, Elastic Transcoder will write an
-- unencrypted file to your Amazon S3 bucket.
joEncryption :: Lens' JobOutput (Maybe Encryption)
joEncryption = lens _joEncryption (\s a -> s { _joEncryption = a })
-- | Height of the output file, in pixels.
joHeight :: Lens' JobOutput (Maybe Int)
joHeight = lens _joHeight (\s a -> s { _joHeight = a })
-- | A sequential counter, starting with 1, that identifies an output among the
-- outputs from the current job. In the Output syntax, this value is always 1.
joId :: Lens' JobOutput (Maybe Text)
joId = lens _joId (\s a -> s { _joId = a })
-- | The name to assign to the transcoded file. Elastic Transcoder saves the file
-- in the Amazon S3 bucket specified by the 'OutputBucket' object in the pipeline
-- that is specified by the pipeline ID.
joKey :: Lens' JobOutput (Maybe Text)
joKey = lens _joKey (\s a -> s { _joKey = a })
-- | The value of the 'Id' object for the preset that you want to use for this job.
-- The preset determines the audio, video, and thumbnail settings that Elastic
-- Transcoder uses for transcoding. To use a preset that you created, specify
-- the preset ID that Elastic Transcoder returned in the response when you
-- created the preset. You can also use the Elastic Transcoder system presets,
-- which you can get with 'ListPresets'.
joPresetId :: Lens' JobOutput (Maybe Text)
joPresetId = lens _joPresetId (\s a -> s { _joPresetId = a })
-- | The number of degrees clockwise by which you want Elastic Transcoder to
-- rotate the output relative to the input. Enter one of the following values:
--
-- 'auto', '0', '90', '180', '270'
--
-- The value 'auto' generally works only if the file that you're transcoding
-- contains rotation metadata.
joRotate :: Lens' JobOutput (Maybe Text)
joRotate = lens _joRotate (\s a -> s { _joRotate = a })
-- | (Outputs in Fragmented MP4 or MPEG-TS format only.If you specify a preset in 'PresetId' for which the value of 'Container' is 'fmp4' (Fragmented MP4) or 'ts' (MPEG-TS), 'SegmentDuration' is the target maximum duration of each segment in seconds. For 'HLSv3' format
-- playlists, each media segment is stored in a separate '.ts' file. For 'HLSv4' and 'Smooth' playlists, all media segments for an output are stored in a single
-- file. Each segment is approximately the length of the 'SegmentDuration', though
-- individual segments might be shorter or longer.
--
-- The range of valid values is 1 to 60 seconds. If the duration of the video
-- is not evenly divisible by 'SegmentDuration', the duration of the last segment
-- is the remainder of total length/SegmentDuration.
--
-- Elastic Transcoder creates an output-specific playlist for each output 'HLS'
-- output that you specify in OutputKeys. To add an output to the master
-- playlist for this job, include it in the 'OutputKeys' of the associated
-- playlist.
joSegmentDuration :: Lens' JobOutput (Maybe Text)
joSegmentDuration =
lens _joSegmentDuration (\s a -> s { _joSegmentDuration = a })
-- | The status of one output in a job. If you specified only one output for the
-- job, 'Outputs:Status' is always the same as 'Job:Status'. If you specified more
-- than one output: 'Job:Status' and 'Outputs:Status' for all of the outputs is
-- Submitted until Elastic Transcoder starts to process the first output. When
-- Elastic Transcoder starts to process the first output, 'Outputs:Status' for
-- that output and 'Job:Status' both change to Progressing. For each output, the
-- value of 'Outputs:Status' remains Submitted until Elastic Transcoder starts to
-- process the output. Job:Status remains Progressing until all of the outputs
-- reach a terminal status, either Complete or Error. When all of the outputs
-- reach a terminal status, 'Job:Status' changes to Complete only if 'Outputs:Status'
-- for all of the outputs is 'Complete'. If 'Outputs:Status' for one or more
-- outputs is 'Error', the terminal status for 'Job:Status' is also 'Error'. The
-- value of 'Status' is one of the following: 'Submitted', 'Progressing', 'Complete', 'Canceled', or 'Error'.
joStatus :: Lens' JobOutput (Maybe Text)
joStatus = lens _joStatus (\s a -> s { _joStatus = a })
-- | Information that further explains 'Status'.
joStatusDetail :: Lens' JobOutput (Maybe Text)
joStatusDetail = lens _joStatusDetail (\s a -> s { _joStatusDetail = a })
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your thumbnail.
joThumbnailEncryption :: Lens' JobOutput (Maybe Encryption)
joThumbnailEncryption =
lens _joThumbnailEncryption (\s a -> s { _joThumbnailEncryption = a })
-- | Whether you want Elastic Transcoder to create thumbnails for your videos and,
-- if so, how you want Elastic Transcoder to name the files.
--
-- If you don't want Elastic Transcoder to create thumbnails, specify "".
--
-- If you do want Elastic Transcoder to create thumbnails, specify the
-- information that you want to include in the file name for each thumbnail. You
-- can specify the following values in any sequence:
--
-- '{count}' (Required): If you want to create thumbnails, you must include '{count}' in the 'ThumbnailPattern' object. Wherever you specify '{count}', Elastic
-- Transcoder adds a five-digit sequence number (beginning with 00001) to
-- thumbnail file names. The number indicates where a given thumbnail appears in
-- the sequence of thumbnails for a transcoded file.
--
-- If you specify a literal value and/or '{resolution}' but you omit '{count}',
-- Elastic Transcoder returns a validation error and does not create the job.
-- Literal values (Optional): You can specify literal values anywhere in the 'ThumbnailPattern' object. For example, you can include them as a file name prefix or as a
-- delimiter between '{resolution}' and '{count}'.
--
-- '{resolution}' (Optional): If you want Elastic Transcoder to include the
-- resolution in the file name, include '{resolution}' in the 'ThumbnailPattern'
-- object.
--
-- When creating thumbnails, Elastic Transcoder automatically saves the files
-- in the format (.jpg or .png) that appears in the preset that you specified in
-- the 'PresetID' value of 'CreateJobOutput'. Elastic Transcoder also appends the
-- applicable file name extension.
joThumbnailPattern :: Lens' JobOutput (Maybe Text)
joThumbnailPattern =
lens _joThumbnailPattern (\s a -> s { _joThumbnailPattern = a })
-- | Information about the watermarks that you want Elastic Transcoder to add to
-- the video during transcoding. You can specify up to four watermarks for each
-- output. Settings for each watermark must be defined in the preset that you
-- specify in 'Preset' for the current output.
--
-- Watermarks are added to the output video in the sequence in which you list
-- them in the job output—the first watermark in the list is added to the output
-- video first, the second watermark in the list is added next, and so on. As a
-- result, if the settings in a preset cause Elastic Transcoder to place all
-- watermarks in the same location, the second watermark that you add will cover
-- the first one, the third one will cover the second, and the fourth one will
-- cover the third.
joWatermarks :: Lens' JobOutput [JobWatermark]
joWatermarks = lens _joWatermarks (\s a -> s { _joWatermarks = a }) . _List
-- | Specifies the width of the output file in pixels.
joWidth :: Lens' JobOutput (Maybe Int)
joWidth = lens _joWidth (\s a -> s { _joWidth = a })
instance FromJSON JobOutput where
parseJSON = withObject "JobOutput" $ \o -> JobOutput
<$> o .:? "AlbumArt"
<*> o .:? "Captions"
<*> o .:? "Composition" .!= mempty
<*> o .:? "Duration"
<*> o .:? "Encryption"
<*> o .:? "Height"
<*> o .:? "Id"
<*> o .:? "Key"
<*> o .:? "PresetId"
<*> o .:? "Rotate"
<*> o .:? "SegmentDuration"
<*> o .:? "Status"
<*> o .:? "StatusDetail"
<*> o .:? "ThumbnailEncryption"
<*> o .:? "ThumbnailPattern"
<*> o .:? "Watermarks" .!= mempty
<*> o .:? "Width"
instance ToJSON JobOutput where
toJSON JobOutput{..} = object
[ "Id" .= _joId
, "Key" .= _joKey
, "ThumbnailPattern" .= _joThumbnailPattern
, "ThumbnailEncryption" .= _joThumbnailEncryption
, "Rotate" .= _joRotate
, "PresetId" .= _joPresetId
, "SegmentDuration" .= _joSegmentDuration
, "Status" .= _joStatus
, "StatusDetail" .= _joStatusDetail
, "Duration" .= _joDuration
, "Width" .= _joWidth
, "Height" .= _joHeight
, "Watermarks" .= _joWatermarks
, "AlbumArt" .= _joAlbumArt
, "Composition" .= _joComposition
, "Captions" .= _joCaptions
, "Encryption" .= _joEncryption
]
data Job' = Job'
{ _jArn :: Maybe Text
, _jId :: Text
, _jInput :: JobInput
, _jOutput :: Maybe JobOutput
, _jOutputKeyPrefix :: Text
, _jOutputs :: List "Outputs" JobOutput
, _jPipelineId :: Text
, _jPlaylists :: List "Playlists" Playlist
, _jStatus :: Text
, _jUserMetadata :: Map Text Text
} deriving (Eq, Read, Show)
-- | 'Job'' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'jArn' @::@ 'Maybe' 'Text'
--
-- * 'jId' @::@ 'Text'
--
-- * 'jInput' @::@ 'JobInput'
--
-- * 'jOutput' @::@ 'Maybe' 'JobOutput'
--
-- * 'jOutputKeyPrefix' @::@ 'Text'
--
-- * 'jOutputs' @::@ ['JobOutput']
--
-- * 'jPipelineId' @::@ 'Text'
--
-- * 'jPlaylists' @::@ ['Playlist']
--
-- * 'jStatus' @::@ 'Text'
--
-- * 'jUserMetadata' @::@ 'HashMap' 'Text' 'Text'
--
job :: Text -- ^ 'jId'
-> Text -- ^ 'jPipelineId'
-> JobInput -- ^ 'jInput'
-> Text -- ^ 'jOutputKeyPrefix'
-> Text -- ^ 'jStatus'
-> Job'
job p1 p2 p3 p4 p5 = Job'
{ _jId = p1
, _jPipelineId = p2
, _jInput = p3
, _jOutputKeyPrefix = p4
, _jStatus = p5
, _jArn = Nothing
, _jOutput = Nothing
, _jOutputs = mempty
, _jPlaylists = mempty
, _jUserMetadata = mempty
}
-- | The Amazon Resource Name (ARN) for the job.
jArn :: Lens' Job' (Maybe Text)
jArn = lens _jArn (\s a -> s { _jArn = a })
-- | The identifier that Elastic Transcoder assigned to the job. You use this
-- value to get settings for the job or to delete the job.
jId :: Lens' Job' Text
jId = lens _jId (\s a -> s { _jId = a })
-- | A section of the request or response body that provides information about
-- the file that is being transcoded.
jInput :: Lens' Job' JobInput
jInput = lens _jInput (\s a -> s { _jInput = a })
-- | If you specified one output for a job, information about that output. If you
-- specified multiple outputs for a job, the Output object lists information
-- about the first output. This duplicates the information that is listed for
-- the first output in the Outputs object.
--
-- Outputs recommended instead. A section of the request or response body that
-- provides information about the transcoded (target) file.
jOutput :: Lens' Job' (Maybe JobOutput)
jOutput = lens _jOutput (\s a -> s { _jOutput = a })
-- | The value, if any, that you want Elastic Transcoder to prepend to the names
-- of all files that this job creates, including output files, thumbnails, and
-- playlists. We recommend that you add a / or some other delimiter to the end
-- of the 'OutputKeyPrefix'.
jOutputKeyPrefix :: Lens' Job' Text
jOutputKeyPrefix = lens _jOutputKeyPrefix (\s a -> s { _jOutputKeyPrefix = a })
-- | Information about the output files. We recommend that you use the 'Outputs'
-- syntax for all jobs, even when you want Elastic Transcoder to transcode a
-- file into only one format. Do not use both the 'Outputs' and 'Output' syntaxes in
-- the same request. You can create a maximum of 30 outputs per job.
--
-- If you specify more than one output for a job, Elastic Transcoder creates
-- the files for each output in the order in which you specify them in the job.
jOutputs :: Lens' Job' [JobOutput]
jOutputs = lens _jOutputs (\s a -> s { _jOutputs = a }) . _List
-- | The 'Id' of the pipeline that you want Elastic Transcoder to use for
-- transcoding. The pipeline determines several settings, including the Amazon
-- S3 bucket from which Elastic Transcoder gets the files to transcode and the
-- bucket into which Elastic Transcoder puts the transcoded files.
jPipelineId :: Lens' Job' Text
jPipelineId = lens _jPipelineId (\s a -> s { _jPipelineId = a })
-- | Outputs in Fragmented MP4 or MPEG-TS format only.If you specify a preset in 'PresetId' for which the value of 'Container' is fmp4 (Fragmented MP4) or ts (MPEG-TS), 'Playlists' contains information about the master playlists that you want Elastic
-- Transcoder to create.
--
-- The maximum number of master playlists in a job is 30.
jPlaylists :: Lens' Job' [Playlist]
jPlaylists = lens _jPlaylists (\s a -> s { _jPlaylists = a }) . _List
-- | The status of the job: 'Submitted', 'Progressing', 'Complete', 'Canceled', or 'Error'.
jStatus :: Lens' Job' Text
jStatus = lens _jStatus (\s a -> s { _jStatus = a })
-- | User-defined metadata that you want to associate with an Elastic Transcoder
-- job. You specify metadata in 'key/value' pairs, and you can add up to 10 'key/value' pairs per job. Elastic Transcoder does not guarantee that 'key/value' pairs
-- will be returned in the same order in which you specify them.
--
-- Metadata 'keys' and 'values' must use characters from the following list:
--
-- '0-9'
--
-- 'A-Z' and 'a-z'
--
-- 'Space'
--
-- The following symbols: '_.:/=+-%@'
--
--
jUserMetadata :: Lens' Job' (HashMap Text Text)
jUserMetadata = lens _jUserMetadata (\s a -> s { _jUserMetadata = a }) . _Map
instance FromJSON Job' where
parseJSON = withObject "Job'" $ \o -> Job'
<$> o .:? "Arn"
<*> o .: "Id"
<*> o .: "Input"
<*> o .:? "Output"
<*> o .: "OutputKeyPrefix"
<*> o .:? "Outputs" .!= mempty
<*> o .: "PipelineId"
<*> o .:? "Playlists" .!= mempty
<*> o .: "Status"
<*> o .:? "UserMetadata" .!= mempty
instance ToJSON Job' where
toJSON Job'{..} = object
[ "Id" .= _jId
, "Arn" .= _jArn
, "PipelineId" .= _jPipelineId
, "Input" .= _jInput
, "Output" .= _jOutput
, "Outputs" .= _jOutputs
, "OutputKeyPrefix" .= _jOutputKeyPrefix
, "Playlists" .= _jPlaylists
, "Status" .= _jStatus
, "UserMetadata" .= _jUserMetadata
]
data CaptionSource = CaptionSource
{ _csEncryption :: Maybe Encryption
, _csKey :: Maybe Text
, _csLabel :: Maybe Text
, _csLanguage :: Maybe Text
, _csTimeOffset :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'CaptionSource' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'csEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'csKey' @::@ 'Maybe' 'Text'
--
-- * 'csLabel' @::@ 'Maybe' 'Text'
--
-- * 'csLanguage' @::@ 'Maybe' 'Text'
--
-- * 'csTimeOffset' @::@ 'Maybe' 'Text'
--
captionSource :: CaptionSource
captionSource = CaptionSource
{ _csKey = Nothing
, _csLanguage = Nothing
, _csTimeOffset = Nothing
, _csLabel = Nothing
, _csEncryption = Nothing
}
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your caption sources.
csEncryption :: Lens' CaptionSource (Maybe Encryption)
csEncryption = lens _csEncryption (\s a -> s { _csEncryption = a })
-- | The name of the sidecar caption file that you want Elastic Transcoder to
-- include in the output file.
csKey :: Lens' CaptionSource (Maybe Text)
csKey = lens _csKey (\s a -> s { _csKey = a })
-- | The label of the caption shown in the player when choosing a language. We
-- recommend that you put the caption language name here, in the language of the
-- captions.
csLabel :: Lens' CaptionSource (Maybe Text)
csLabel = lens _csLabel (\s a -> s { _csLabel = a })
-- | A string that specifies the language of the caption. Specify this as one of:
--
-- 2-character ISO 639-1 code
--
-- 3-character ISO 639-2 code
--
-- For more information on ISO language codes and language names, see the List
-- of ISO 639-1 codes.
csLanguage :: Lens' CaptionSource (Maybe Text)
csLanguage = lens _csLanguage (\s a -> s { _csLanguage = a })
-- | For clip generation or captions that do not start at the same time as the
-- associated video file, the 'TimeOffset' tells Elastic Transcoder how much of
-- the video to encode before including captions.
--
-- Specify the TimeOffset in the form [+-]SS.sss or [+-]HH:mm:SS.ss.
csTimeOffset :: Lens' CaptionSource (Maybe Text)
csTimeOffset = lens _csTimeOffset (\s a -> s { _csTimeOffset = a })
instance FromJSON CaptionSource where
parseJSON = withObject "CaptionSource" $ \o -> CaptionSource
<$> o .:? "Encryption"
<*> o .:? "Key"
<*> o .:? "Label"
<*> o .:? "Language"
<*> o .:? "TimeOffset"
instance ToJSON CaptionSource where
toJSON CaptionSource{..} = object
[ "Key" .= _csKey
, "Language" .= _csLanguage
, "TimeOffset" .= _csTimeOffset
, "Label" .= _csLabel
, "Encryption" .= _csEncryption
]
data Artwork = Artwork
{ _aAlbumArtFormat :: Maybe Text
, _aEncryption :: Maybe Encryption
, _aInputKey :: Maybe Text
, _aMaxHeight :: Maybe Text
, _aMaxWidth :: Maybe Text
, _aPaddingPolicy :: Maybe Text
, _aSizingPolicy :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'Artwork' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'aAlbumArtFormat' @::@ 'Maybe' 'Text'
--
-- * 'aEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'aInputKey' @::@ 'Maybe' 'Text'
--
-- * 'aMaxHeight' @::@ 'Maybe' 'Text'
--
-- * 'aMaxWidth' @::@ 'Maybe' 'Text'
--
-- * 'aPaddingPolicy' @::@ 'Maybe' 'Text'
--
-- * 'aSizingPolicy' @::@ 'Maybe' 'Text'
--
artwork :: Artwork
artwork = Artwork
{ _aInputKey = Nothing
, _aMaxWidth = Nothing
, _aMaxHeight = Nothing
, _aSizingPolicy = Nothing
, _aPaddingPolicy = Nothing
, _aAlbumArtFormat = Nothing
, _aEncryption = Nothing
}
-- | The format of album art, if any. Valid formats are '.jpg' and '.png'.
aAlbumArtFormat :: Lens' Artwork (Maybe Text)
aAlbumArtFormat = lens _aAlbumArtFormat (\s a -> s { _aAlbumArtFormat = a })
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your artwork.
aEncryption :: Lens' Artwork (Maybe Encryption)
aEncryption = lens _aEncryption (\s a -> s { _aEncryption = a })
-- | The name of the file to be used as album art. To determine which Amazon S3
-- bucket contains the specified file, Elastic Transcoder checks the pipeline
-- specified by 'PipelineId'; the 'InputBucket' object in that pipeline identifies
-- the bucket.
--
-- If the file name includes a prefix, for example, 'cooking/pie.jpg', include
-- the prefix in the key. If the file isn't in the specified bucket, Elastic
-- Transcoder returns an error.
aInputKey :: Lens' Artwork (Maybe Text)
aInputKey = lens _aInputKey (\s a -> s { _aInputKey = a })
-- | The maximum height of the output album art in pixels. If you specify 'auto',
-- Elastic Transcoder uses 600 as the default value. If you specify a numeric
-- value, enter an even integer between 32 and 3072, inclusive.
aMaxHeight :: Lens' Artwork (Maybe Text)
aMaxHeight = lens _aMaxHeight (\s a -> s { _aMaxHeight = a })
-- | The maximum width of the output album art in pixels. If you specify 'auto',
-- Elastic Transcoder uses 600 as the default value. If you specify a numeric
-- value, enter an even integer between 32 and 4096, inclusive.
aMaxWidth :: Lens' Artwork (Maybe Text)
aMaxWidth = lens _aMaxWidth (\s a -> s { _aMaxWidth = a })
-- | When you set 'PaddingPolicy' to 'Pad', Elastic Transcoder may add white bars to
-- the top and bottom and/or left and right sides of the output album art to
-- make the total size of the output art match the values that you specified for 'MaxWidth' and 'MaxHeight'.
aPaddingPolicy :: Lens' Artwork (Maybe Text)
aPaddingPolicy = lens _aPaddingPolicy (\s a -> s { _aPaddingPolicy = a })
-- | Specify one of the following values to control scaling of the output album
-- art:
--
-- 'Fit:' Elastic Transcoder scales the output art so it matches the value
-- that you specified in either 'MaxWidth' or 'MaxHeight' without exceeding the
-- other value. 'Fill:' Elastic Transcoder scales the output art so it matches
-- the value that you specified in either 'MaxWidth' or 'MaxHeight' and matches or
-- exceeds the other value. Elastic Transcoder centers the output art and then
-- crops it in the dimension (if any) that exceeds the maximum value. 'Stretch:'
-- Elastic Transcoder stretches the output art to match the values that you
-- specified for 'MaxWidth' and 'MaxHeight'. If the relative proportions of the
-- input art and the output art are different, the output art will be distorted.
-- 'Keep:' Elastic Transcoder does not scale the output art. If either dimension
-- of the input art exceeds the values that you specified for 'MaxWidth' and 'MaxHeight', Elastic Transcoder crops the output art. 'ShrinkToFit:' Elastic Transcoder
-- scales the output art down so that its dimensions match the values that you
-- specified for at least one of 'MaxWidth' and 'MaxHeight' without exceeding either
-- value. If you specify this option, Elastic Transcoder does not scale the art
-- up. 'ShrinkToFill' Elastic Transcoder scales the output art down so that its
-- dimensions match the values that you specified for at least one of 'MaxWidth'
-- and 'MaxHeight' without dropping below either value. If you specify this
-- option, Elastic Transcoder does not scale the art up.
aSizingPolicy :: Lens' Artwork (Maybe Text)
aSizingPolicy = lens _aSizingPolicy (\s a -> s { _aSizingPolicy = a })
instance FromJSON Artwork where
parseJSON = withObject "Artwork" $ \o -> Artwork
<$> o .:? "AlbumArtFormat"
<*> o .:? "Encryption"
<*> o .:? "InputKey"
<*> o .:? "MaxHeight"
<*> o .:? "MaxWidth"
<*> o .:? "PaddingPolicy"
<*> o .:? "SizingPolicy"
instance ToJSON Artwork where
toJSON Artwork{..} = object
[ "InputKey" .= _aInputKey
, "MaxWidth" .= _aMaxWidth
, "MaxHeight" .= _aMaxHeight
, "SizingPolicy" .= _aSizingPolicy
, "PaddingPolicy" .= _aPaddingPolicy
, "AlbumArtFormat" .= _aAlbumArtFormat
, "Encryption" .= _aEncryption
]
data TimeSpan = TimeSpan
{ _tsDuration :: Maybe Text
, _tsStartTime :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'TimeSpan' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tsDuration' @::@ 'Maybe' 'Text'
--
-- * 'tsStartTime' @::@ 'Maybe' 'Text'
--
timeSpan :: TimeSpan
timeSpan = TimeSpan
{ _tsStartTime = Nothing
, _tsDuration = Nothing
}
-- | The duration of the clip. The format can be either HH:mm:ss.SSS (maximum
-- value: 23:59:59.999; SSS is thousandths of a second) or sssss.SSS (maximum
-- value: 86399.999). If you don't specify a value, Elastic Transcoder creates
-- an output file from StartTime to the end of the file.
--
-- If you specify a value longer than the duration of the input file, Elastic
-- Transcoder transcodes the file and returns a warning message.
tsDuration :: Lens' TimeSpan (Maybe Text)
tsDuration = lens _tsDuration (\s a -> s { _tsDuration = a })
-- | The place in the input file where you want a clip to start. The format can be
-- either HH:mm:ss.SSS (maximum value: 23:59:59.999; SSS is thousandths of a
-- second) or sssss.SSS (maximum value: 86399.999). If you don't specify a
-- value, Elastic Transcoder starts at the beginning of the input file.
tsStartTime :: Lens' TimeSpan (Maybe Text)
tsStartTime = lens _tsStartTime (\s a -> s { _tsStartTime = a })
instance FromJSON TimeSpan where
parseJSON = withObject "TimeSpan" $ \o -> TimeSpan
<$> o .:? "Duration"
<*> o .:? "StartTime"
instance ToJSON TimeSpan where
toJSON TimeSpan{..} = object
[ "StartTime" .= _tsStartTime
, "Duration" .= _tsDuration
]
data CreateJobOutput = CreateJobOutput
{ _cjoAlbumArt :: Maybe JobAlbumArt
, _cjoCaptions :: Maybe Captions
, _cjoComposition :: List "Composition" Clip
, _cjoEncryption :: Maybe Encryption
, _cjoKey :: Maybe Text
, _cjoPresetId :: Maybe Text
, _cjoRotate :: Maybe Text
, _cjoSegmentDuration :: Maybe Text
, _cjoThumbnailEncryption :: Maybe Encryption
, _cjoThumbnailPattern :: Maybe Text
, _cjoWatermarks :: List "Watermarks" JobWatermark
} deriving (Eq, Read, Show)
-- | 'CreateJobOutput' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cjoAlbumArt' @::@ 'Maybe' 'JobAlbumArt'
--
-- * 'cjoCaptions' @::@ 'Maybe' 'Captions'
--
-- * 'cjoComposition' @::@ ['Clip']
--
-- * 'cjoEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'cjoKey' @::@ 'Maybe' 'Text'
--
-- * 'cjoPresetId' @::@ 'Maybe' 'Text'
--
-- * 'cjoRotate' @::@ 'Maybe' 'Text'
--
-- * 'cjoSegmentDuration' @::@ 'Maybe' 'Text'
--
-- * 'cjoThumbnailEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'cjoThumbnailPattern' @::@ 'Maybe' 'Text'
--
-- * 'cjoWatermarks' @::@ ['JobWatermark']
--
createJobOutput :: CreateJobOutput
createJobOutput = CreateJobOutput
{ _cjoKey = Nothing
, _cjoThumbnailPattern = Nothing
, _cjoThumbnailEncryption = Nothing
, _cjoRotate = Nothing
, _cjoPresetId = Nothing
, _cjoSegmentDuration = Nothing
, _cjoWatermarks = mempty
, _cjoAlbumArt = Nothing
, _cjoComposition = mempty
, _cjoCaptions = Nothing
, _cjoEncryption = Nothing
}
-- | Information about the album art that you want Elastic Transcoder to add to
-- the file during transcoding. You can specify up to twenty album artworks for
-- each output. Settings for each artwork must be defined in the job for the
-- current output.
cjoAlbumArt :: Lens' CreateJobOutput (Maybe JobAlbumArt)
cjoAlbumArt = lens _cjoAlbumArt (\s a -> s { _cjoAlbumArt = a })
-- | You can configure Elastic Transcoder to transcode captions, or subtitles,
-- from one format to another. All captions must be in UTF-8. Elastic Transcoder
-- supports two types of captions:
--
-- Embedded: Embedded captions are included in the same file as the audio and
-- video. Elastic Transcoder supports only one embedded caption per language, to
-- a maximum of 300 embedded captions per file.
--
-- Valid input values include: 'CEA-608 (EIA-608', first non-empty channel only), 'CEA-708 (EIA-708', first non-empty channel only), and 'mov-text'
--
-- Valid outputs include: 'mov-text'
--
-- Elastic Transcoder supports a maximum of one embedded format per output.
--
-- Sidecar: Sidecar captions are kept in a separate metadata file from the
-- audio and video data. Sidecar captions require a player that is capable of
-- understanding the relationship between the video file and the sidecar file.
-- Elastic Transcoder supports only one sidecar caption per language, to a
-- maximum of 20 sidecar captions per file.
--
-- Valid input values include: 'dfxp' (first div element only), 'ebu-tt', 'scc', 'smpt',
-- 'srt', 'ttml' (first div element only), and 'webvtt'
--
-- Valid outputs include: 'dfxp' (first div element only), 'scc', 'srt', and 'webvtt'.
--
-- If you want ttml or smpte-tt compatible captions, specify dfxp as your
-- output format.
--
-- Elastic Transcoder does not support OCR (Optical Character Recognition),
-- does not accept pictures as a valid input for captions, and is not available
-- for audio-only transcoding. Elastic Transcoder does not preserve text
-- formatting (for example, italics) during the transcoding process.
--
-- To remove captions or leave the captions empty, set 'Captions' to null. To
-- pass through existing captions unchanged, set the 'MergePolicy' to 'MergeRetain',
-- and pass in a null 'CaptionSources' array.
--
-- For more information on embedded files, see the Subtitles Wikipedia page.
--
-- For more information on sidecar files, see the Extensible Metadata Platform
-- and Sidecar file Wikipedia pages.
cjoCaptions :: Lens' CreateJobOutput (Maybe Captions)
cjoCaptions = lens _cjoCaptions (\s a -> s { _cjoCaptions = a })
-- | You can create an output file that contains an excerpt from the input file.
-- This excerpt, called a clip, can come from the beginning, middle, or end of
-- the file. The Composition object contains settings for the clips that make up
-- an output file. For the current release, you can only specify settings for a
-- single clip per output file. The Composition object cannot be null.
cjoComposition :: Lens' CreateJobOutput [Clip]
cjoComposition = lens _cjoComposition (\s a -> s { _cjoComposition = a }) . _List
-- | You can specify encryption settings for any output files that you want to use
-- for a transcoding job. This includes the output file and any watermarks,
-- thumbnails, album art, or captions that you want to use. You must specify
-- encryption settings for each file individually.
cjoEncryption :: Lens' CreateJobOutput (Maybe Encryption)
cjoEncryption = lens _cjoEncryption (\s a -> s { _cjoEncryption = a })
-- | The name to assign to the transcoded file. Elastic Transcoder saves the file
-- in the Amazon S3 bucket specified by the 'OutputBucket' object in the pipeline
-- that is specified by the pipeline ID. If a file with the specified name
-- already exists in the output bucket, the job fails.
cjoKey :: Lens' CreateJobOutput (Maybe Text)
cjoKey = lens _cjoKey (\s a -> s { _cjoKey = a })
-- | The 'Id' of the preset to use for this job. The preset determines the audio,
-- video, and thumbnail settings that Elastic Transcoder uses for transcoding.
cjoPresetId :: Lens' CreateJobOutput (Maybe Text)
cjoPresetId = lens _cjoPresetId (\s a -> s { _cjoPresetId = a })
-- | The number of degrees clockwise by which you want Elastic Transcoder to
-- rotate the output relative to the input. Enter one of the following values: 'auto', '0', '90', '180', '270'. The value 'auto' generally works only if the file that
-- you're transcoding contains rotation metadata.
cjoRotate :: Lens' CreateJobOutput (Maybe Text)
cjoRotate = lens _cjoRotate (\s a -> s { _cjoRotate = a })
-- | (Outputs in Fragmented MP4 or MPEG-TS format only.If you specify a preset in 'PresetId' for which the value of 'Container' is 'fmp4' (Fragmented MP4) or 'ts' (MPEG-TS), 'SegmentDuration' is the target maximum duration of each segment in seconds. For 'HLSv3' format
-- playlists, each media segment is stored in a separate '.ts' file. For 'HLSv4' and 'Smooth' playlists, all media segments for an output are stored in a single
-- file. Each segment is approximately the length of the 'SegmentDuration', though
-- individual segments might be shorter or longer.
--
-- The range of valid values is 1 to 60 seconds. If the duration of the video
-- is not evenly divisible by 'SegmentDuration', the duration of the last segment
-- is the remainder of total length/SegmentDuration.
--
-- Elastic Transcoder creates an output-specific playlist for each output 'HLS'
-- output that you specify in OutputKeys. To add an output to the master
-- playlist for this job, include it in the 'OutputKeys' of the associated
-- playlist.
cjoSegmentDuration :: Lens' CreateJobOutput (Maybe Text)
cjoSegmentDuration =
lens _cjoSegmentDuration (\s a -> s { _cjoSegmentDuration = a })
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your thumbnail.
cjoThumbnailEncryption :: Lens' CreateJobOutput (Maybe Encryption)
cjoThumbnailEncryption =
lens _cjoThumbnailEncryption (\s a -> s { _cjoThumbnailEncryption = a })
-- | Whether you want Elastic Transcoder to create thumbnails for your videos and,
-- if so, how you want Elastic Transcoder to name the files.
--
-- If you don't want Elastic Transcoder to create thumbnails, specify "".
--
-- If you do want Elastic Transcoder to create thumbnails, specify the
-- information that you want to include in the file name for each thumbnail. You
-- can specify the following values in any sequence:
--
-- '{count}' (Required): If you want to create thumbnails, you must include '{count}' in the 'ThumbnailPattern' object. Wherever you specify '{count}', Elastic
-- Transcoder adds a five-digit sequence number (beginning with 00001) to
-- thumbnail file names. The number indicates where a given thumbnail appears in
-- the sequence of thumbnails for a transcoded file.
--
-- If you specify a literal value and/or '{resolution}' but you omit '{count}',
-- Elastic Transcoder returns a validation error and does not create the job.
-- Literal values (Optional): You can specify literal values anywhere in the 'ThumbnailPattern' object. For example, you can include them as a file name prefix or as a
-- delimiter between '{resolution}' and '{count}'.
--
-- '{resolution}' (Optional): If you want Elastic Transcoder to include the
-- resolution in the file name, include '{resolution}' in the 'ThumbnailPattern'
-- object.
--
-- When creating thumbnails, Elastic Transcoder automatically saves the files
-- in the format (.jpg or .png) that appears in the preset that you specified in
-- the 'PresetID' value of 'CreateJobOutput'. Elastic Transcoder also appends the
-- applicable file name extension.
cjoThumbnailPattern :: Lens' CreateJobOutput (Maybe Text)
cjoThumbnailPattern =
lens _cjoThumbnailPattern (\s a -> s { _cjoThumbnailPattern = a })
-- | Information about the watermarks that you want Elastic Transcoder to add to
-- the video during transcoding. You can specify up to four watermarks for each
-- output. Settings for each watermark must be defined in the preset for the
-- current output.
cjoWatermarks :: Lens' CreateJobOutput [JobWatermark]
cjoWatermarks = lens _cjoWatermarks (\s a -> s { _cjoWatermarks = a }) . _List
instance FromJSON CreateJobOutput where
parseJSON = withObject "CreateJobOutput" $ \o -> CreateJobOutput
<$> o .:? "AlbumArt"
<*> o .:? "Captions"
<*> o .:? "Composition" .!= mempty
<*> o .:? "Encryption"
<*> o .:? "Key"
<*> o .:? "PresetId"
<*> o .:? "Rotate"
<*> o .:? "SegmentDuration"
<*> o .:? "ThumbnailEncryption"
<*> o .:? "ThumbnailPattern"
<*> o .:? "Watermarks" .!= mempty
instance ToJSON CreateJobOutput where
toJSON CreateJobOutput{..} = object
[ "Key" .= _cjoKey
, "ThumbnailPattern" .= _cjoThumbnailPattern
, "ThumbnailEncryption" .= _cjoThumbnailEncryption
, "Rotate" .= _cjoRotate
, "PresetId" .= _cjoPresetId
, "SegmentDuration" .= _cjoSegmentDuration
, "Watermarks" .= _cjoWatermarks
, "AlbumArt" .= _cjoAlbumArt
, "Composition" .= _cjoComposition
, "Captions" .= _cjoCaptions
, "Encryption" .= _cjoEncryption
]
data AudioParameters = AudioParameters
{ _apBitRate :: Maybe Text
, _apChannels :: Maybe Text
, _apCodec :: Maybe Text
, _apCodecOptions :: Maybe AudioCodecOptions
, _apSampleRate :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'AudioParameters' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'apBitRate' @::@ 'Maybe' 'Text'
--
-- * 'apChannels' @::@ 'Maybe' 'Text'
--
-- * 'apCodec' @::@ 'Maybe' 'Text'
--
-- * 'apCodecOptions' @::@ 'Maybe' 'AudioCodecOptions'
--
-- * 'apSampleRate' @::@ 'Maybe' 'Text'
--
audioParameters :: AudioParameters
audioParameters = AudioParameters
{ _apCodec = Nothing
, _apSampleRate = Nothing
, _apBitRate = Nothing
, _apChannels = Nothing
, _apCodecOptions = Nothing
}
-- | The bit rate of the audio stream in the output file, in kilobits/second.
-- Enter an integer between 64 and 320, inclusive.
apBitRate :: Lens' AudioParameters (Maybe Text)
apBitRate = lens _apBitRate (\s a -> s { _apBitRate = a })
-- | The number of audio channels in the output file. Valid values include:
--
-- 'auto', '0', '1', '2'
--
-- If you specify 'auto', Elastic Transcoder automatically detects the number of
-- channels in the input file.
apChannels :: Lens' AudioParameters (Maybe Text)
apChannels = lens _apChannels (\s a -> s { _apChannels = a })
-- | The audio codec for the output file. Valid values include 'aac', 'mp3', and 'vorbis'
-- .
apCodec :: Lens' AudioParameters (Maybe Text)
apCodec = lens _apCodec (\s a -> s { _apCodec = a })
-- | If you specified 'AAC' for 'Audio:Codec', this is the 'AAC' compression profile to
-- use. Valid values include:
--
-- 'auto', 'AAC-LC', 'HE-AAC', 'HE-AACv2'
--
-- If you specify 'auto', Elastic Transcoder chooses a profile based on the bit
-- rate of the output file.
apCodecOptions :: Lens' AudioParameters (Maybe AudioCodecOptions)
apCodecOptions = lens _apCodecOptions (\s a -> s { _apCodecOptions = a })
-- | The sample rate of the audio stream in the output file, in Hertz. Valid
-- values include:
--
-- 'auto', '22050', '32000', '44100', '48000', '96000'
--
-- If you specify 'auto', Elastic Transcoder automatically detects the sample
-- rate.
apSampleRate :: Lens' AudioParameters (Maybe Text)
apSampleRate = lens _apSampleRate (\s a -> s { _apSampleRate = a })
instance FromJSON AudioParameters where
parseJSON = withObject "AudioParameters" $ \o -> AudioParameters
<$> o .:? "BitRate"
<*> o .:? "Channels"
<*> o .:? "Codec"
<*> o .:? "CodecOptions"
<*> o .:? "SampleRate"
instance ToJSON AudioParameters where
toJSON AudioParameters{..} = object
[ "Codec" .= _apCodec
, "SampleRate" .= _apSampleRate
, "BitRate" .= _apBitRate
, "Channels" .= _apChannels
, "CodecOptions" .= _apCodecOptions
]
data Thumbnails = Thumbnails
{ _tAspectRatio :: Maybe Text
, _tFormat :: Maybe Text
, _tInterval :: Maybe Text
, _tMaxHeight :: Maybe Text
, _tMaxWidth :: Maybe Text
, _tPaddingPolicy :: Maybe Text
, _tResolution :: Maybe Text
, _tSizingPolicy :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Thumbnails' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'tAspectRatio' @::@ 'Maybe' 'Text'
--
-- * 'tFormat' @::@ 'Maybe' 'Text'
--
-- * 'tInterval' @::@ 'Maybe' 'Text'
--
-- * 'tMaxHeight' @::@ 'Maybe' 'Text'
--
-- * 'tMaxWidth' @::@ 'Maybe' 'Text'
--
-- * 'tPaddingPolicy' @::@ 'Maybe' 'Text'
--
-- * 'tResolution' @::@ 'Maybe' 'Text'
--
-- * 'tSizingPolicy' @::@ 'Maybe' 'Text'
--
thumbnails :: Thumbnails
thumbnails = Thumbnails
{ _tFormat = Nothing
, _tInterval = Nothing
, _tResolution = Nothing
, _tAspectRatio = Nothing
, _tMaxWidth = Nothing
, _tMaxHeight = Nothing
, _tSizingPolicy = Nothing
, _tPaddingPolicy = Nothing
}
-- | To better control resolution and aspect ratio of thumbnails, we recommend
-- that you use the values 'MaxWidth', 'MaxHeight', 'SizingPolicy', and 'PaddingPolicy'
-- instead of 'Resolution' and 'AspectRatio'. The two groups of settings are
-- mutually exclusive. Do not use them together.
--
-- The aspect ratio of thumbnails. Valid values include:
--
-- 'auto', '1:1', '4:3', '3:2', '16:9'
--
-- If you specify 'auto', Elastic Transcoder tries to preserve the aspect ratio
-- of the video in the output file.
tAspectRatio :: Lens' Thumbnails (Maybe Text)
tAspectRatio = lens _tAspectRatio (\s a -> s { _tAspectRatio = a })
-- | The format of thumbnails, if any. Valid values are 'jpg' and 'png'.
--
-- You specify whether you want Elastic Transcoder to create thumbnails when
-- you create a job.
tFormat :: Lens' Thumbnails (Maybe Text)
tFormat = lens _tFormat (\s a -> s { _tFormat = a })
-- | The approximate number of seconds between thumbnails. Specify an integer
-- value.
tInterval :: Lens' Thumbnails (Maybe Text)
tInterval = lens _tInterval (\s a -> s { _tInterval = a })
-- | The maximum height of thumbnails in pixels. If you specify auto, Elastic
-- Transcoder uses 1080 (Full HD) as the default value. If you specify a numeric
-- value, enter an even integer between 32 and 3072.
tMaxHeight :: Lens' Thumbnails (Maybe Text)
tMaxHeight = lens _tMaxHeight (\s a -> s { _tMaxHeight = a })
-- | The maximum width of thumbnails in pixels. If you specify auto, Elastic
-- Transcoder uses 1920 (Full HD) as the default value. If you specify a numeric
-- value, enter an even integer between 32 and 4096.
tMaxWidth :: Lens' Thumbnails (Maybe Text)
tMaxWidth = lens _tMaxWidth (\s a -> s { _tMaxWidth = a })
-- | When you set 'PaddingPolicy' to 'Pad', Elastic Transcoder may add black bars to
-- the top and bottom and/or left and right sides of thumbnails to make the
-- total size of the thumbnails match the values that you specified for
-- thumbnail 'MaxWidth' and 'MaxHeight' settings.
tPaddingPolicy :: Lens' Thumbnails (Maybe Text)
tPaddingPolicy = lens _tPaddingPolicy (\s a -> s { _tPaddingPolicy = a })
-- | To better control resolution and aspect ratio of thumbnails, we recommend
-- that you use the values 'MaxWidth', 'MaxHeight', 'SizingPolicy', and 'PaddingPolicy'
-- instead of 'Resolution' and 'AspectRatio'. The two groups of settings are
-- mutually exclusive. Do not use them together.
--
-- The width and height of thumbnail files in pixels. Specify a value in the
-- format '/width/ x '/height/ where both values are even integers. The values cannot
-- exceed the width and height that you specified in the 'Video:Resolution' object.
tResolution :: Lens' Thumbnails (Maybe Text)
tResolution = lens _tResolution (\s a -> s { _tResolution = a })
-- | Specify one of the following values to control scaling of thumbnails:
--
-- 'Fit': Elastic Transcoder scales thumbnails so they match the value that
-- you specified in thumbnail MaxWidth or MaxHeight settings without exceeding
-- the other value. 'Fill': Elastic Transcoder scales thumbnails so they match
-- the value that you specified in thumbnail 'MaxWidth' or 'MaxHeight' settings and
-- matches or exceeds the other value. Elastic Transcoder centers the image in
-- thumbnails and then crops in the dimension (if any) that exceeds the maximum
-- value. 'Stretch': Elastic Transcoder stretches thumbnails to match the values
-- that you specified for thumbnail 'MaxWidth' and 'MaxHeight' settings. If the
-- relative proportions of the input video and thumbnails are different, the
-- thumbnails will be distorted. 'Keep': Elastic Transcoder does not scale
-- thumbnails. If either dimension of the input video exceeds the values that
-- you specified for thumbnail 'MaxWidth' and 'MaxHeight' settings, Elastic
-- Transcoder crops the thumbnails. 'ShrinkToFit': Elastic Transcoder scales
-- thumbnails down so that their dimensions match the values that you specified
-- for at least one of thumbnail 'MaxWidth' and 'MaxHeight' without exceeding either
-- value. If you specify this option, Elastic Transcoder does not scale
-- thumbnails up. 'ShrinkToFill': Elastic Transcoder scales thumbnails down so
-- that their dimensions match the values that you specified for at least one of 'MaxWidth' and 'MaxHeight' without dropping below either value. If you specify
-- this option, Elastic Transcoder does not scale thumbnails up.
tSizingPolicy :: Lens' Thumbnails (Maybe Text)
tSizingPolicy = lens _tSizingPolicy (\s a -> s { _tSizingPolicy = a })
instance FromJSON Thumbnails where
parseJSON = withObject "Thumbnails" $ \o -> Thumbnails
<$> o .:? "AspectRatio"
<*> o .:? "Format"
<*> o .:? "Interval"
<*> o .:? "MaxHeight"
<*> o .:? "MaxWidth"
<*> o .:? "PaddingPolicy"
<*> o .:? "Resolution"
<*> o .:? "SizingPolicy"
instance ToJSON Thumbnails where
toJSON Thumbnails{..} = object
[ "Format" .= _tFormat
, "Interval" .= _tInterval
, "Resolution" .= _tResolution
, "AspectRatio" .= _tAspectRatio
, "MaxWidth" .= _tMaxWidth
, "MaxHeight" .= _tMaxHeight
, "SizingPolicy" .= _tSizingPolicy
, "PaddingPolicy" .= _tPaddingPolicy
]
data Encryption = Encryption
{ _eInitializationVector :: Maybe Text
, _eKey :: Maybe Text
, _eKeyMd5 :: Maybe Text
, _eMode :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Encryption' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'eInitializationVector' @::@ 'Maybe' 'Text'
--
-- * 'eKey' @::@ 'Maybe' 'Text'
--
-- * 'eKeyMd5' @::@ 'Maybe' 'Text'
--
-- * 'eMode' @::@ 'Maybe' 'Text'
--
encryption :: Encryption
encryption = Encryption
{ _eMode = Nothing
, _eKey = Nothing
, _eKeyMd5 = Nothing
, _eInitializationVector = Nothing
}
-- | The series of random bits created by a random bit generator, unique for every
-- encryption operation, that you used to encrypt your input files or that you
-- want Elastic Transcoder to use to encrypt your output files. The
-- initialization vector must be base64-encoded, and it must be exactly 16 bytes
-- long before being base64-encoded.
eInitializationVector :: Lens' Encryption (Maybe Text)
eInitializationVector =
lens _eInitializationVector (\s a -> s { _eInitializationVector = a })
-- | The data encryption key that you want Elastic Transcoder to use to encrypt
-- your output file, or that was used to encrypt your input file. The key must
-- be base64-encoded and it must be one of the following bit lengths before
-- being base64-encoded:
--
-- '128', '192', or '256'.
--
-- The key must also be encrypted by using the Amazon Key Management Service.
eKey :: Lens' Encryption (Maybe Text)
eKey = lens _eKey (\s a -> s { _eKey = a })
-- | The MD5 digest of the key that you used to encrypt your input file, or that
-- you want Elastic Transcoder to use to encrypt your output file. Elastic
-- Transcoder uses the key digest as a checksum to make sure your key was not
-- corrupted in transit. The key MD5 must be base64-encoded, and it must be
-- exactly 16 bytes long before being base64-encoded.
eKeyMd5 :: Lens' Encryption (Maybe Text)
eKeyMd5 = lens _eKeyMd5 (\s a -> s { _eKeyMd5 = a })
-- | The specific server-side encryption mode that you want Elastic Transcoder to
-- use when decrypting your input files or encrypting your output files. Elastic
-- Transcoder supports the following options:
--
-- S3: Amazon S3 creates and manages the keys used for encrypting your files.
--
-- S3-AWS-KMS: Amazon S3 calls the Amazon Key Management Service, which creates
-- and manages the keys that are used for encrypting your files. If you specify 'S3-AWS-KMS' and you don't want to use the default key, you must add the AWS-KMS key that
-- you want to use to your pipeline.
--
-- AES-CBC-PKCS7: A padded cipher-block mode of operation originally used for
-- HLS files.
--
-- AES-CTR: AES Counter Mode.
--
-- AES-GCM: AES Galois Counter Mode, a mode of operation that is an
-- authenticated encryption format, meaning that a file, key, or initialization
-- vector that has been tampered with will fail the decryption process.
--
-- For all three AES options, you must provide the following settings, which
-- must be base64-encoded:
--
-- Key
--
-- Key MD5
--
-- Initialization Vector
--
-- For the AES modes, your private encryption keys and your unencrypted data
-- are never stored by AWS; therefore, it is important that you safely manage
-- your encryption keys. If you lose them, you won't be able to unencrypt your
-- data.
--
eMode :: Lens' Encryption (Maybe Text)
eMode = lens _eMode (\s a -> s { _eMode = a })
instance FromJSON Encryption where
parseJSON = withObject "Encryption" $ \o -> Encryption
<$> o .:? "InitializationVector"
<*> o .:? "Key"
<*> o .:? "KeyMd5"
<*> o .:? "Mode"
instance ToJSON Encryption where
toJSON Encryption{..} = object
[ "Mode" .= _eMode
, "Key" .= _eKey
, "KeyMd5" .= _eKeyMd5
, "InitializationVector" .= _eInitializationVector
]
data JobAlbumArt = JobAlbumArt
{ _jaaArtwork :: List "Artwork" Artwork
, _jaaMergePolicy :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'JobAlbumArt' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'jaaArtwork' @::@ ['Artwork']
--
-- * 'jaaMergePolicy' @::@ 'Maybe' 'Text'
--
jobAlbumArt :: JobAlbumArt
jobAlbumArt = JobAlbumArt
{ _jaaMergePolicy = Nothing
, _jaaArtwork = mempty
}
-- | The file to be used as album art. There can be multiple artworks associated
-- with an audio file, to a maximum of 20. Valid formats are '.jpg' and '.png'
jaaArtwork :: Lens' JobAlbumArt [Artwork]
jaaArtwork = lens _jaaArtwork (\s a -> s { _jaaArtwork = a }) . _List
-- | A policy that determines how Elastic Transcoder will handle the existence of
-- multiple album artwork files.
--
-- 'Replace:' The specified album art will replace any existing album art. 'Prepend:' The specified album art will be placed in front of any existing album art. 'Append:' The specified album art will be placed after any existing album art. 'Fallback:' If the original input file contains artwork, Elastic Transcoder will use
-- that artwork for the output. If the original input does not contain artwork,
-- Elastic Transcoder will use the specified album art file.
jaaMergePolicy :: Lens' JobAlbumArt (Maybe Text)
jaaMergePolicy = lens _jaaMergePolicy (\s a -> s { _jaaMergePolicy = a })
instance FromJSON JobAlbumArt where
parseJSON = withObject "JobAlbumArt" $ \o -> JobAlbumArt
<$> o .:? "Artwork" .!= mempty
<*> o .:? "MergePolicy"
instance ToJSON JobAlbumArt where
toJSON JobAlbumArt{..} = object
[ "MergePolicy" .= _jaaMergePolicy
, "Artwork" .= _jaaArtwork
]
data JobWatermark = JobWatermark
{ _jwEncryption :: Maybe Encryption
, _jwInputKey :: Maybe Text
, _jwPresetWatermarkId :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'JobWatermark' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'jwEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'jwInputKey' @::@ 'Maybe' 'Text'
--
-- * 'jwPresetWatermarkId' @::@ 'Maybe' 'Text'
--
jobWatermark :: JobWatermark
jobWatermark = JobWatermark
{ _jwPresetWatermarkId = Nothing
, _jwInputKey = Nothing
, _jwEncryption = Nothing
}
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your watermarks.
jwEncryption :: Lens' JobWatermark (Maybe Encryption)
jwEncryption = lens _jwEncryption (\s a -> s { _jwEncryption = a })
-- | The name of the .png or .jpg file that you want to use for the watermark. To
-- determine which Amazon S3 bucket contains the specified file, Elastic
-- Transcoder checks the pipeline specified by 'Pipeline'; the 'Input Bucket' object
-- in that pipeline identifies the bucket.
--
-- If the file name includes a prefix, for example, logos/128x64.png, include
-- the prefix in the key. If the file isn't in the specified bucket, Elastic
-- Transcoder returns an error.
jwInputKey :: Lens' JobWatermark (Maybe Text)
jwInputKey = lens _jwInputKey (\s a -> s { _jwInputKey = a })
-- | The ID of the watermark settings that Elastic Transcoder uses to add
-- watermarks to the video during transcoding. The settings are in the preset
-- specified by Preset for the current output. In that preset, the value of
-- Watermarks Id tells Elastic Transcoder which settings to use.
jwPresetWatermarkId :: Lens' JobWatermark (Maybe Text)
jwPresetWatermarkId =
lens _jwPresetWatermarkId (\s a -> s { _jwPresetWatermarkId = a })
instance FromJSON JobWatermark where
parseJSON = withObject "JobWatermark" $ \o -> JobWatermark
<$> o .:? "Encryption"
<*> o .:? "InputKey"
<*> o .:? "PresetWatermarkId"
instance ToJSON JobWatermark where
toJSON JobWatermark{..} = object
[ "PresetWatermarkId" .= _jwPresetWatermarkId
, "InputKey" .= _jwInputKey
, "Encryption" .= _jwEncryption
]
data Pipeline = Pipeline
{ _pArn :: Maybe Text
, _pAwsKmsKeyArn :: Maybe Text
, _pContentConfig :: Maybe PipelineOutputConfig
, _pId :: Maybe Text
, _pInputBucket :: Maybe Text
, _pName :: Maybe Text
, _pNotifications :: Maybe Notifications
, _pOutputBucket :: Maybe Text
, _pRole :: Maybe Text
, _pStatus :: Maybe Text
, _pThumbnailConfig :: Maybe PipelineOutputConfig
} deriving (Eq, Read, Show)
-- | 'Pipeline' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pArn' @::@ 'Maybe' 'Text'
--
-- * 'pAwsKmsKeyArn' @::@ 'Maybe' 'Text'
--
-- * 'pContentConfig' @::@ 'Maybe' 'PipelineOutputConfig'
--
-- * 'pId' @::@ 'Maybe' 'Text'
--
-- * 'pInputBucket' @::@ 'Maybe' 'Text'
--
-- * 'pName' @::@ 'Maybe' 'Text'
--
-- * 'pNotifications' @::@ 'Maybe' 'Notifications'
--
-- * 'pOutputBucket' @::@ 'Maybe' 'Text'
--
-- * 'pRole' @::@ 'Maybe' 'Text'
--
-- * 'pStatus' @::@ 'Maybe' 'Text'
--
-- * 'pThumbnailConfig' @::@ 'Maybe' 'PipelineOutputConfig'
--
pipeline :: Pipeline
pipeline = Pipeline
{ _pId = Nothing
, _pArn = Nothing
, _pName = Nothing
, _pStatus = Nothing
, _pInputBucket = Nothing
, _pOutputBucket = Nothing
, _pRole = Nothing
, _pAwsKmsKeyArn = Nothing
, _pNotifications = Nothing
, _pContentConfig = Nothing
, _pThumbnailConfig = Nothing
}
-- | The Amazon Resource Name (ARN) for the pipeline.
pArn :: Lens' Pipeline (Maybe Text)
pArn = lens _pArn (\s a -> s { _pArn = a })
-- | The AWS Key Management Service (AWS KMS) key that you want to use with this
-- pipeline.
--
-- If you use either 'S3' or 'S3-AWS-KMS' as your 'Encryption:Mode', you don't need
-- to provide a key with your job because a default key, known as an AWS-KMS
-- key, is created for you automatically. You need to provide an AWS-KMS key
-- only if you want to use a non-default AWS-KMS key, or if you are using an 'Encryption:Mode' of 'AES-PKCS7', 'AES-CTR', or 'AES-GCM'.
pAwsKmsKeyArn :: Lens' Pipeline (Maybe Text)
pAwsKmsKeyArn = lens _pAwsKmsKeyArn (\s a -> s { _pAwsKmsKeyArn = a })
-- | Information about the Amazon S3 bucket in which you want Elastic Transcoder
-- to save transcoded files and playlists. Either you specify both 'ContentConfig'
-- and 'ThumbnailConfig', or you specify 'OutputBucket'.
--
-- Bucket: The Amazon S3 bucket in which you want Elastic Transcoder to save
-- transcoded files and playlists. Permissions: A list of the users and/or
-- predefined Amazon S3 groups you want to have access to transcoded files and
-- playlists, and the type of access that you want them to have. GranteeType:
-- The type of value that appears in the 'Grantee' object: 'Canonical': Either the
-- canonical user ID for an AWS account or an origin access identity for an
-- Amazon CloudFront distribution. 'Email': The registered email address of an
-- AWS account. 'Group': One of the following predefined Amazon S3 groups: 'AllUsers', 'AuthenticatedUsers', or 'LogDelivery'. 'Grantee': The AWS user or group that
-- you want to have access to transcoded files and playlists. 'Access': The
-- permission that you want to give to the AWS user that is listed in 'Grantee'.
-- Valid values include: 'READ': The grantee can read the objects and metadata
-- for objects that Elastic Transcoder adds to the Amazon S3 bucket. 'READ_ACP':
-- The grantee can read the object ACL for objects that Elastic Transcoder adds
-- to the Amazon S3 bucket. 'WRITE_ACP': The grantee can write the ACL for the
-- objects that Elastic Transcoder adds to the Amazon S3 bucket. 'FULL_CONTROL':
-- The grantee has 'READ', 'READ_ACP', and 'WRITE_ACP' permissions for the objects
-- that Elastic Transcoder adds to the Amazon S3 bucket. StorageClass: The
-- Amazon S3 storage class, Standard or ReducedRedundancy, that you want Elastic
-- Transcoder to assign to the video files and playlists that it stores in your
-- Amazon S3 bucket.
pContentConfig :: Lens' Pipeline (Maybe PipelineOutputConfig)
pContentConfig = lens _pContentConfig (\s a -> s { _pContentConfig = a })
-- | The identifier for the pipeline. You use this value to identify the pipeline
-- in which you want to perform a variety of operations, such as creating a job
-- or a preset.
pId :: Lens' Pipeline (Maybe Text)
pId = lens _pId (\s a -> s { _pId = a })
-- | The Amazon S3 bucket from which Elastic Transcoder gets media files for
-- transcoding and the graphics files, if any, that you want to use for
-- watermarks.
pInputBucket :: Lens' Pipeline (Maybe Text)
pInputBucket = lens _pInputBucket (\s a -> s { _pInputBucket = a })
-- | The name of the pipeline. We recommend that the name be unique within the AWS
-- account, but uniqueness is not enforced.
--
-- Constraints: Maximum 40 characters
pName :: Lens' Pipeline (Maybe Text)
pName = lens _pName (\s a -> s { _pName = a })
-- | The Amazon Simple Notification Service (Amazon SNS) topic that you want to
-- notify to report job status.
--
-- To receive notifications, you must also subscribe to the new topic in the
-- Amazon SNS console. Progressing (optional): The Amazon Simple Notification
-- Service (Amazon SNS) topic that you want to notify when Elastic Transcoder
-- has started to process the job. Completed (optional): The Amazon SNS topic
-- that you want to notify when Elastic Transcoder has finished processing the
-- job. Warning (optional): The Amazon SNS topic that you want to notify when
-- Elastic Transcoder encounters a warning condition. Error (optional): The
-- Amazon SNS topic that you want to notify when Elastic Transcoder encounters
-- an error condition.
pNotifications :: Lens' Pipeline (Maybe Notifications)
pNotifications = lens _pNotifications (\s a -> s { _pNotifications = a })
-- | The Amazon S3 bucket in which you want Elastic Transcoder to save transcoded
-- files, thumbnails, and playlists. Either you specify this value, or you
-- specify both 'ContentConfig' and 'ThumbnailConfig'.
pOutputBucket :: Lens' Pipeline (Maybe Text)
pOutputBucket = lens _pOutputBucket (\s a -> s { _pOutputBucket = a })
-- | The IAM Amazon Resource Name (ARN) for the role that Elastic Transcoder uses
-- to transcode jobs for this pipeline.
pRole :: Lens' Pipeline (Maybe Text)
pRole = lens _pRole (\s a -> s { _pRole = a })
-- | The current status of the pipeline:
--
-- 'Active': The pipeline is processing jobs. 'Paused': The pipeline is not
-- currently processing jobs.
pStatus :: Lens' Pipeline (Maybe Text)
pStatus = lens _pStatus (\s a -> s { _pStatus = a })
-- | Information about the Amazon S3 bucket in which you want Elastic Transcoder
-- to save thumbnail files. Either you specify both 'ContentConfig' and 'ThumbnailConfig', or you specify 'OutputBucket'.
--
-- 'Bucket': The Amazon S3 bucket in which you want Elastic Transcoder to save
-- thumbnail files. 'Permissions': A list of the users and/or predefined Amazon
-- S3 groups you want to have access to thumbnail files, and the type of access
-- that you want them to have. GranteeType: The type of value that appears in
-- the Grantee object: 'Canonical': Either the canonical user ID for an AWS
-- account or an origin access identity for an Amazon CloudFront distribution. A
-- canonical user ID is not the same as an AWS account number. 'Email': The
-- registered email address of an AWS account. 'Group': One of the following
-- predefined Amazon S3 groups: 'AllUsers', 'AuthenticatedUsers', or 'LogDelivery'. 'Grantee': The AWS user or group that you want to have access to thumbnail
-- files. Access: The permission that you want to give to the AWS user that is
-- listed in Grantee. Valid values include: 'READ': The grantee can read the
-- thumbnails and metadata for thumbnails that Elastic Transcoder adds to the
-- Amazon S3 bucket. 'READ_ACP': The grantee can read the object ACL for
-- thumbnails that Elastic Transcoder adds to the Amazon S3 bucket. 'WRITE_ACP':
-- The grantee can write the ACL for the thumbnails that Elastic Transcoder adds
-- to the Amazon S3 bucket. 'FULL_CONTROL': The grantee has READ, READ_ACP, and
-- WRITE_ACP permissions for the thumbnails that Elastic Transcoder adds to the
-- Amazon S3 bucket. 'StorageClass': The Amazon S3 storage class, 'Standard' or 'ReducedRedundancy', that you want Elastic Transcoder to assign to the
-- thumbnails that it stores in your Amazon S3 bucket.
pThumbnailConfig :: Lens' Pipeline (Maybe PipelineOutputConfig)
pThumbnailConfig = lens _pThumbnailConfig (\s a -> s { _pThumbnailConfig = a })
instance FromJSON Pipeline where
parseJSON = withObject "Pipeline" $ \o -> Pipeline
<$> o .:? "Arn"
<*> o .:? "AwsKmsKeyArn"
<*> o .:? "ContentConfig"
<*> o .:? "Id"
<*> o .:? "InputBucket"
<*> o .:? "Name"
<*> o .:? "Notifications"
<*> o .:? "OutputBucket"
<*> o .:? "Role"
<*> o .:? "Status"
<*> o .:? "ThumbnailConfig"
instance ToJSON Pipeline where
toJSON Pipeline{..} = object
[ "Id" .= _pId
, "Arn" .= _pArn
, "Name" .= _pName
, "Status" .= _pStatus
, "InputBucket" .= _pInputBucket
, "OutputBucket" .= _pOutputBucket
, "Role" .= _pRole
, "AwsKmsKeyArn" .= _pAwsKmsKeyArn
, "Notifications" .= _pNotifications
, "ContentConfig" .= _pContentConfig
, "ThumbnailConfig" .= _pThumbnailConfig
]
data Preset = Preset
{ _p1Arn :: Maybe Text
, _p1Audio :: Maybe AudioParameters
, _p1Container :: Maybe Text
, _p1Description :: Maybe Text
, _p1Id :: Maybe Text
, _p1Name :: Maybe Text
, _p1Thumbnails :: Maybe Thumbnails
, _p1Type :: Maybe Text
, _p1Video :: Maybe VideoParameters
} deriving (Eq, Read, Show)
-- | 'Preset' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'p1Arn' @::@ 'Maybe' 'Text'
--
-- * 'p1Audio' @::@ 'Maybe' 'AudioParameters'
--
-- * 'p1Container' @::@ 'Maybe' 'Text'
--
-- * 'p1Description' @::@ 'Maybe' 'Text'
--
-- * 'p1Id' @::@ 'Maybe' 'Text'
--
-- * 'p1Name' @::@ 'Maybe' 'Text'
--
-- * 'p1Thumbnails' @::@ 'Maybe' 'Thumbnails'
--
-- * 'p1Type' @::@ 'Maybe' 'Text'
--
-- * 'p1Video' @::@ 'Maybe' 'VideoParameters'
--
preset :: Preset
preset = Preset
{ _p1Id = Nothing
, _p1Arn = Nothing
, _p1Name = Nothing
, _p1Description = Nothing
, _p1Container = Nothing
, _p1Audio = Nothing
, _p1Video = Nothing
, _p1Thumbnails = Nothing
, _p1Type = Nothing
}
-- | The Amazon Resource Name (ARN) for the preset.
p1Arn :: Lens' Preset (Maybe Text)
p1Arn = lens _p1Arn (\s a -> s { _p1Arn = a })
-- | A section of the response body that provides information about the audio
-- preset values.
p1Audio :: Lens' Preset (Maybe AudioParameters)
p1Audio = lens _p1Audio (\s a -> s { _p1Audio = a })
-- | The container type for the output file. Valid values include 'fmp4', 'mp3', 'mp4', 'ogg', 'ts', and 'webm'.
p1Container :: Lens' Preset (Maybe Text)
p1Container = lens _p1Container (\s a -> s { _p1Container = a })
-- | A description of the preset.
p1Description :: Lens' Preset (Maybe Text)
p1Description = lens _p1Description (\s a -> s { _p1Description = a })
-- | Identifier for the new preset. You use this value to get settings for the
-- preset or to delete it.
p1Id :: Lens' Preset (Maybe Text)
p1Id = lens _p1Id (\s a -> s { _p1Id = a })
-- | The name of the preset.
p1Name :: Lens' Preset (Maybe Text)
p1Name = lens _p1Name (\s a -> s { _p1Name = a })
-- | A section of the response body that provides information about the thumbnail
-- preset values, if any.
p1Thumbnails :: Lens' Preset (Maybe Thumbnails)
p1Thumbnails = lens _p1Thumbnails (\s a -> s { _p1Thumbnails = a })
-- | Whether the preset is a default preset provided by Elastic Transcoder ('System') or a preset that you have defined (
-- 'Custom').
p1Type :: Lens' Preset (Maybe Text)
p1Type = lens _p1Type (\s a -> s { _p1Type = a })
-- | A section of the response body that provides information about the video
-- preset values.
p1Video :: Lens' Preset (Maybe VideoParameters)
p1Video = lens _p1Video (\s a -> s { _p1Video = a })
instance FromJSON Preset where
parseJSON = withObject "Preset" $ \o -> Preset
<$> o .:? "Arn"
<*> o .:? "Audio"
<*> o .:? "Container"
<*> o .:? "Description"
<*> o .:? "Id"
<*> o .:? "Name"
<*> o .:? "Thumbnails"
<*> o .:? "Type"
<*> o .:? "Video"
instance ToJSON Preset where
toJSON Preset{..} = object
[ "Id" .= _p1Id
, "Arn" .= _p1Arn
, "Name" .= _p1Name
, "Description" .= _p1Description
, "Container" .= _p1Container
, "Audio" .= _p1Audio
, "Video" .= _p1Video
, "Thumbnails" .= _p1Thumbnails
, "Type" .= _p1Type
]
data CaptionFormat = CaptionFormat
{ _cfEncryption :: Maybe Encryption
, _cfFormat :: Maybe Text
, _cfPattern :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'CaptionFormat' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cfEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'cfFormat' @::@ 'Maybe' 'Text'
--
-- * 'cfPattern' @::@ 'Maybe' 'Text'
--
captionFormat :: CaptionFormat
captionFormat = CaptionFormat
{ _cfFormat = Nothing
, _cfPattern = Nothing
, _cfEncryption = Nothing
}
-- | The encryption settings, if any, that you want Elastic Transcoder to apply to
-- your caption formats.
cfEncryption :: Lens' CaptionFormat (Maybe Encryption)
cfEncryption = lens _cfEncryption (\s a -> s { _cfEncryption = a })
-- | The format you specify determines whether Elastic Transcoder generates an
-- embedded or sidecar caption for this output.
--
-- Valid Embedded Caption Formats:
--
-- For MP3: None
--
-- For MP4: mov-text
--
-- For MPEG-TS: None
--
-- For ogg: None
--
-- For webm: None
--
-- Valid Sidecar Caption Formats: Elastic Transcoder supports dfxp (first
-- div element only), scc, srt, and webvtt. If you want ttml or smpte-tt
-- compatible captions, specify dfxp as your output format.
--
-- For FMP4: dfxp
--
-- Non-FMP4 outputs: All sidecar types
--
-- 'fmp4' captions have an extension of '.ismt'
--
--
cfFormat :: Lens' CaptionFormat (Maybe Text)
cfFormat = lens _cfFormat (\s a -> s { _cfFormat = a })
-- | The prefix for caption filenames, in the form /description/-'{language}', where:
--
-- /description/ is a description of the video. '{language}' is a literal value
-- that Elastic Transcoder replaces with the two- or three-letter code for the
-- language of the caption in the output file names. If you don't include '{language}' in the file name pattern, Elastic Transcoder automatically appends "'{language}'" to the value that you specify for the description. In addition, Elastic
-- Transcoder automatically appends the count to the end of the segment files.
--
-- For example, suppose you're transcoding into srt format. When you enter
-- "Sydney-{language}-sunrise", and the language of the captions is English
-- (en), the name of the first caption file will be Sydney-en-sunrise00000.srt.
cfPattern :: Lens' CaptionFormat (Maybe Text)
cfPattern = lens _cfPattern (\s a -> s { _cfPattern = a })
instance FromJSON CaptionFormat where
parseJSON = withObject "CaptionFormat" $ \o -> CaptionFormat
<$> o .:? "Encryption"
<*> o .:? "Format"
<*> o .:? "Pattern"
instance ToJSON CaptionFormat where
toJSON CaptionFormat{..} = object
[ "Format" .= _cfFormat
, "Pattern" .= _cfPattern
, "Encryption" .= _cfEncryption
]
data HlsContentProtection = HlsContentProtection
{ _hcpInitializationVector :: Maybe Text
, _hcpKey :: Maybe Text
, _hcpKeyMd5 :: Maybe Text
, _hcpKeyStoragePolicy :: Maybe Text
, _hcpLicenseAcquisitionUrl :: Maybe Text
, _hcpMethod :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'HlsContentProtection' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'hcpInitializationVector' @::@ 'Maybe' 'Text'
--
-- * 'hcpKey' @::@ 'Maybe' 'Text'
--
-- * 'hcpKeyMd5' @::@ 'Maybe' 'Text'
--
-- * 'hcpKeyStoragePolicy' @::@ 'Maybe' 'Text'
--
-- * 'hcpLicenseAcquisitionUrl' @::@ 'Maybe' 'Text'
--
-- * 'hcpMethod' @::@ 'Maybe' 'Text'
--
hlsContentProtection :: HlsContentProtection
hlsContentProtection = HlsContentProtection
{ _hcpMethod = Nothing
, _hcpKey = Nothing
, _hcpKeyMd5 = Nothing
, _hcpInitializationVector = Nothing
, _hcpLicenseAcquisitionUrl = Nothing
, _hcpKeyStoragePolicy = Nothing
}
-- | If Elastic Transcoder is generating your key for you, you must leave this
-- field blank.
--
-- The series of random bits created by a random bit generator, unique for
-- every encryption operation, that you want Elastic Transcoder to use to
-- encrypt your output files. The initialization vector must be base64-encoded,
-- and it must be exactly 16 bytes before being base64-encoded.
hcpInitializationVector :: Lens' HlsContentProtection (Maybe Text)
hcpInitializationVector =
lens _hcpInitializationVector (\s a -> s { _hcpInitializationVector = a })
-- | If you want Elastic Transcoder to generate a key for you, leave this field
-- blank.
--
-- If you choose to supply your own key, you must encrypt the key by using AWS
-- KMS. The key must be base64-encoded, and it must be one of the following bit
-- lengths before being base64-encoded:
--
-- '128', '192', or '256'.
hcpKey :: Lens' HlsContentProtection (Maybe Text)
hcpKey = lens _hcpKey (\s a -> s { _hcpKey = a })
-- | If Elastic Transcoder is generating your key for you, you must leave this
-- field blank.
--
-- The MD5 digest of the key that you want Elastic Transcoder to use to encrypt
-- your output file, and that you want Elastic Transcoder to use as a checksum
-- to make sure your key was not corrupted in transit. The key MD5 must be
-- base64-encoded, and it must be exactly 16 bytes before being base64- encoded.
hcpKeyMd5 :: Lens' HlsContentProtection (Maybe Text)
hcpKeyMd5 = lens _hcpKeyMd5 (\s a -> s { _hcpKeyMd5 = a })
-- | Specify whether you want Elastic Transcoder to write your HLS license key to
-- an Amazon S3 bucket. If you choose 'WithVariantPlaylists', 'LicenseAcquisitionUrl'
-- must be left blank and Elastic Transcoder writes your data key into the same
-- bucket as the associated playlist.
hcpKeyStoragePolicy :: Lens' HlsContentProtection (Maybe Text)
hcpKeyStoragePolicy =
lens _hcpKeyStoragePolicy (\s a -> s { _hcpKeyStoragePolicy = a })
-- | The location of the license key required to decrypt your HLS playlist. The
-- URL must be an absolute path, and is referenced in the URI attribute of the
-- EXT-X-KEY metadata tag in the playlist file.
hcpLicenseAcquisitionUrl :: Lens' HlsContentProtection (Maybe Text)
hcpLicenseAcquisitionUrl =
lens _hcpLicenseAcquisitionUrl
(\s a -> s { _hcpLicenseAcquisitionUrl = a })
-- | The content protection method for your output. The only valid value is: 'aes-128'.
--
-- This value will be written into the method attribute of the 'EXT-X-KEY'
-- metadata tag in the output playlist.
hcpMethod :: Lens' HlsContentProtection (Maybe Text)
hcpMethod = lens _hcpMethod (\s a -> s { _hcpMethod = a })
instance FromJSON HlsContentProtection where
parseJSON = withObject "HlsContentProtection" $ \o -> HlsContentProtection
<$> o .:? "InitializationVector"
<*> o .:? "Key"
<*> o .:? "KeyMd5"
<*> o .:? "KeyStoragePolicy"
<*> o .:? "LicenseAcquisitionUrl"
<*> o .:? "Method"
instance ToJSON HlsContentProtection where
toJSON HlsContentProtection{..} = object
[ "Method" .= _hcpMethod
, "Key" .= _hcpKey
, "KeyMd5" .= _hcpKeyMd5
, "InitializationVector" .= _hcpInitializationVector
, "LicenseAcquisitionUrl" .= _hcpLicenseAcquisitionUrl
, "KeyStoragePolicy" .= _hcpKeyStoragePolicy
]
data PresetWatermark = PresetWatermark
{ _pwHorizontalAlign :: Maybe Text
, _pwHorizontalOffset :: Maybe Text
, _pwId :: Maybe Text
, _pwMaxHeight :: Maybe Text
, _pwMaxWidth :: Maybe Text
, _pwOpacity :: Maybe Text
, _pwSizingPolicy :: Maybe Text
, _pwTarget :: Maybe Text
, _pwVerticalAlign :: Maybe Text
, _pwVerticalOffset :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'PresetWatermark' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pwHorizontalAlign' @::@ 'Maybe' 'Text'
--
-- * 'pwHorizontalOffset' @::@ 'Maybe' 'Text'
--
-- * 'pwId' @::@ 'Maybe' 'Text'
--
-- * 'pwMaxHeight' @::@ 'Maybe' 'Text'
--
-- * 'pwMaxWidth' @::@ 'Maybe' 'Text'
--
-- * 'pwOpacity' @::@ 'Maybe' 'Text'
--
-- * 'pwSizingPolicy' @::@ 'Maybe' 'Text'
--
-- * 'pwTarget' @::@ 'Maybe' 'Text'
--
-- * 'pwVerticalAlign' @::@ 'Maybe' 'Text'
--
-- * 'pwVerticalOffset' @::@ 'Maybe' 'Text'
--
presetWatermark :: PresetWatermark
presetWatermark = PresetWatermark
{ _pwId = Nothing
, _pwMaxWidth = Nothing
, _pwMaxHeight = Nothing
, _pwSizingPolicy = Nothing
, _pwHorizontalAlign = Nothing
, _pwHorizontalOffset = Nothing
, _pwVerticalAlign = Nothing
, _pwVerticalOffset = Nothing
, _pwOpacity = Nothing
, _pwTarget = Nothing
}
-- | The horizontal position of the watermark unless you specify a non-zero value
-- for 'HorizontalOffset': Left: The left edge of the watermark is aligned with
-- the left border of the video. Right: The right edge of the watermark is
-- aligned with the right border of the video. Center: The watermark is
-- centered between the left and right borders.
pwHorizontalAlign :: Lens' PresetWatermark (Maybe Text)
pwHorizontalAlign =
lens _pwHorizontalAlign (\s a -> s { _pwHorizontalAlign = a })
-- | The amount by which you want the horizontal position of the watermark to be
-- offset from the position specified by HorizontalAlign: number of pixels
-- (px): The minimum value is 0 pixels, and the maximum value is the value of
-- MaxWidth. integer percentage (%): The range of valid values is 0 to 100. For
-- example, if you specify Left for 'HorizontalAlign' and 5px for 'HorizontalOffset', the left side of the watermark appears 5 pixels from the left border of the output video.
--
--
-- 'HorizontalOffset' is only valid when the value of 'HorizontalAlign' is 'Left' or 'Right'. If you specify an offset that causes the watermark to extend beyond the
-- left or right border and Elastic Transcoder has not added black bars, the
-- watermark is cropped. If Elastic Transcoder has added black bars, the
-- watermark extends into the black bars. If the watermark extends beyond the
-- black bars, it is cropped.
--
-- Use the value of 'Target' to specify whether you want to include the black
-- bars that are added by Elastic Transcoder, if any, in the offset calculation.
pwHorizontalOffset :: Lens' PresetWatermark (Maybe Text)
pwHorizontalOffset =
lens _pwHorizontalOffset (\s a -> s { _pwHorizontalOffset = a })
-- | A unique identifier for the settings for one watermark. The value of 'Id' can
-- be up to 40 characters long.
pwId :: Lens' PresetWatermark (Maybe Text)
pwId = lens _pwId (\s a -> s { _pwId = a })
-- | The maximum height of the watermark in one of the following formats: number
-- of pixels (px): The minimum value is 16 pixels, and the maximum value is the
-- value of 'MaxHeight'. integer percentage (%): The range of valid values is 0 to
-- 100. Use the value of 'Target' to specify whether you want Elastic Transcoder
-- to include the black bars that are added by Elastic Transcoder, if any, in
-- the calculation. If you specify the value in pixels, it must be less than or
-- equal to the value of 'MaxHeight'.
pwMaxHeight :: Lens' PresetWatermark (Maybe Text)
pwMaxHeight = lens _pwMaxHeight (\s a -> s { _pwMaxHeight = a })
-- | The maximum width of the watermark in one of the following formats: number
-- of pixels (px): The minimum value is 16 pixels, and the maximum value is the
-- value of 'MaxWidth'. integer percentage (%): The range of valid values is 0 to
-- 100. Use the value of 'Target' to specify whether you want Elastic Transcoder
-- to include the black bars that are added by Elastic Transcoder, if any, in
-- the calculation. If you specify the value in pixels, it must be less than or
-- equal to the value of 'MaxWidth'.
pwMaxWidth :: Lens' PresetWatermark (Maybe Text)
pwMaxWidth = lens _pwMaxWidth (\s a -> s { _pwMaxWidth = a })
-- | A percentage that indicates how much you want a watermark to obscure the
-- video in the location where it appears. Valid values are 0 (the watermark is
-- invisible) to 100 (the watermark completely obscures the video in the
-- specified location). The datatype of 'Opacity' is float.
--
-- Elastic Transcoder supports transparent .png graphics. If you use a
-- transparent .png, the transparent portion of the video appears as if you had
-- specified a value of 0 for 'Opacity'. The .jpg file format doesn't support
-- transparency.
pwOpacity :: Lens' PresetWatermark (Maybe Text)
pwOpacity = lens _pwOpacity (\s a -> s { _pwOpacity = a })
-- | A value that controls scaling of the watermark: Fit: Elastic Transcoder
-- scales the watermark so it matches the value that you specified in either 'MaxWidth' or 'MaxHeight' without exceeding the other value. Stretch: Elastic Transcoder
-- stretches the watermark to match the values that you specified for 'MaxWidth'
-- and 'MaxHeight'. If the relative proportions of the watermark and the values of 'MaxWidth' and 'MaxHeight' are different, the watermark will be distorted. ShrinkToFit
-- : Elastic Transcoder scales the watermark down so that its dimensions match
-- the values that you specified for at least one of 'MaxWidth' and 'MaxHeight'
-- without exceeding either value. If you specify this option, Elastic
-- Transcoder does not scale the watermark up.
pwSizingPolicy :: Lens' PresetWatermark (Maybe Text)
pwSizingPolicy = lens _pwSizingPolicy (\s a -> s { _pwSizingPolicy = a })
-- | A value that determines how Elastic Transcoder interprets values that you
-- specified for 'HorizontalOffset', 'VerticalOffset', 'MaxWidth', and 'MaxHeight': Content
-- : 'HorizontalOffset' and 'VerticalOffset' values are calculated based on the
-- borders of the video excluding black bars added by Elastic Transcoder, if
-- any. In addition, 'MaxWidth' and 'MaxHeight', if specified as a percentage, are
-- calculated based on the borders of the video excluding black bars added by
-- Elastic Transcoder, if any. Frame: 'HorizontalOffset' and 'VerticalOffset'
-- values are calculated based on the borders of the video including black bars
-- added by Elastic Transcoder, if any. In addition, 'MaxWidth' and 'MaxHeight', if
-- specified as a percentage, are calculated based on the borders of the video
-- including black bars added by Elastic Transcoder, if any.
pwTarget :: Lens' PresetWatermark (Maybe Text)
pwTarget = lens _pwTarget (\s a -> s { _pwTarget = a })
-- | The vertical position of the watermark unless you specify a non-zero value
-- for 'VerticalOffset': Top: The top edge of the watermark is aligned with the
-- top border of the video. Bottom: The bottom edge of the watermark is aligned
-- with the bottom border of the video. Center: The watermark is centered
-- between the top and bottom borders.
pwVerticalAlign :: Lens' PresetWatermark (Maybe Text)
pwVerticalAlign = lens _pwVerticalAlign (\s a -> s { _pwVerticalAlign = a })
-- | 'VerticalOffset' The amount by which you want the vertical position of the
-- watermark to be offset from the position specified by VerticalAlign: number
-- of pixels (px): The minimum value is 0 pixels, and the maximum value is the
-- value of 'MaxHeight'. integer percentage (%): The range of valid values is 0 to
-- 100. For example, if you specify 'Top' for 'VerticalAlign' and '5px' for 'VerticalOffset', the top of the watermark appears 5 pixels from the top border of the output
-- video.
--
-- 'VerticalOffset' is only valid when the value of VerticalAlign is Top or
-- Bottom.
--
-- If you specify an offset that causes the watermark to extend beyond the top
-- or bottom border and Elastic Transcoder has not added black bars, the
-- watermark is cropped. If Elastic Transcoder has added black bars, the
-- watermark extends into the black bars. If the watermark extends beyond the
-- black bars, it is cropped.
--
-- Use the value of 'Target' to specify whether you want Elastic Transcoder to
-- include the black bars that are added by Elastic Transcoder, if any, in the
-- offset calculation.
pwVerticalOffset :: Lens' PresetWatermark (Maybe Text)
pwVerticalOffset = lens _pwVerticalOffset (\s a -> s { _pwVerticalOffset = a })
instance FromJSON PresetWatermark where
parseJSON = withObject "PresetWatermark" $ \o -> PresetWatermark
<$> o .:? "HorizontalAlign"
<*> o .:? "HorizontalOffset"
<*> o .:? "Id"
<*> o .:? "MaxHeight"
<*> o .:? "MaxWidth"
<*> o .:? "Opacity"
<*> o .:? "SizingPolicy"
<*> o .:? "Target"
<*> o .:? "VerticalAlign"
<*> o .:? "VerticalOffset"
instance ToJSON PresetWatermark where
toJSON PresetWatermark{..} = object
[ "Id" .= _pwId
, "MaxWidth" .= _pwMaxWidth
, "MaxHeight" .= _pwMaxHeight
, "SizingPolicy" .= _pwSizingPolicy
, "HorizontalAlign" .= _pwHorizontalAlign
, "HorizontalOffset" .= _pwHorizontalOffset
, "VerticalAlign" .= _pwVerticalAlign
, "VerticalOffset" .= _pwVerticalOffset
, "Opacity" .= _pwOpacity
, "Target" .= _pwTarget
]
data Permission = Permission
{ _pAccess :: List "Access" Text
, _pGrantee :: Maybe Text
, _pGranteeType :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Permission' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pAccess' @::@ ['Text']
--
-- * 'pGrantee' @::@ 'Maybe' 'Text'
--
-- * 'pGranteeType' @::@ 'Maybe' 'Text'
--
permission :: Permission
permission = Permission
{ _pGranteeType = Nothing
, _pGrantee = Nothing
, _pAccess = mempty
}
-- | The permission that you want to give to the AWS user that is listed in
-- Grantee. Valid values include: 'READ': The grantee can read the thumbnails
-- and metadata for thumbnails that Elastic Transcoder adds to the Amazon S3
-- bucket. 'READ_ACP': The grantee can read the object ACL for thumbnails that
-- Elastic Transcoder adds to the Amazon S3 bucket. 'WRITE_ACP': The grantee can
-- write the ACL for the thumbnails that Elastic Transcoder adds to the Amazon
-- S3 bucket. 'FULL_CONTROL': The grantee has READ, READ_ACP, and WRITE_ACP
-- permissions for the thumbnails that Elastic Transcoder adds to the Amazon S3
-- bucket.
pAccess :: Lens' Permission [Text]
pAccess = lens _pAccess (\s a -> s { _pAccess = a }) . _List
-- | The AWS user or group that you want to have access to transcoded files and
-- playlists. To identify the user or group, you can specify the canonical user
-- ID for an AWS account, an origin access identity for a CloudFront
-- distribution, the registered email address of an AWS account, or a predefined
-- Amazon S3 group.
pGrantee :: Lens' Permission (Maybe Text)
pGrantee = lens _pGrantee (\s a -> s { _pGrantee = a })
-- | The type of value that appears in the Grantee object: 'Canonical': Either the
-- canonical user ID for an AWS account or an origin access identity for an
-- Amazon CloudFront distribution. A canonical user ID is not the same as an AWS
-- account number. 'Email': The registered email address of an AWS account. 'Group': One of the following predefined Amazon S3 groups: 'AllUsers', 'AuthenticatedUsers', or 'LogDelivery'.
pGranteeType :: Lens' Permission (Maybe Text)
pGranteeType = lens _pGranteeType (\s a -> s { _pGranteeType = a })
instance FromJSON Permission where
parseJSON = withObject "Permission" $ \o -> Permission
<$> o .:? "Access" .!= mempty
<*> o .:? "Grantee"
<*> o .:? "GranteeType"
instance ToJSON Permission where
toJSON Permission{..} = object
[ "GranteeType" .= _pGranteeType
, "Grantee" .= _pGrantee
, "Access" .= _pAccess
]
data VideoParameters = VideoParameters
{ _vpAspectRatio :: Maybe Text
, _vpBitRate :: Maybe Text
, _vpCodec :: Maybe Text
, _vpCodecOptions :: Map Text Text
, _vpDisplayAspectRatio :: Maybe Text
, _vpFixedGOP :: Maybe Text
, _vpFrameRate :: Maybe Text
, _vpKeyframesMaxDist :: Maybe Text
, _vpMaxFrameRate :: Maybe Text
, _vpMaxHeight :: Maybe Text
, _vpMaxWidth :: Maybe Text
, _vpPaddingPolicy :: Maybe Text
, _vpResolution :: Maybe Text
, _vpSizingPolicy :: Maybe Text
, _vpWatermarks :: List "Watermarks" PresetWatermark
} deriving (Eq, Read, Show)
-- | 'VideoParameters' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'vpAspectRatio' @::@ 'Maybe' 'Text'
--
-- * 'vpBitRate' @::@ 'Maybe' 'Text'
--
-- * 'vpCodec' @::@ 'Maybe' 'Text'
--
-- * 'vpCodecOptions' @::@ 'HashMap' 'Text' 'Text'
--
-- * 'vpDisplayAspectRatio' @::@ 'Maybe' 'Text'
--
-- * 'vpFixedGOP' @::@ 'Maybe' 'Text'
--
-- * 'vpFrameRate' @::@ 'Maybe' 'Text'
--
-- * 'vpKeyframesMaxDist' @::@ 'Maybe' 'Text'
--
-- * 'vpMaxFrameRate' @::@ 'Maybe' 'Text'
--
-- * 'vpMaxHeight' @::@ 'Maybe' 'Text'
--
-- * 'vpMaxWidth' @::@ 'Maybe' 'Text'
--
-- * 'vpPaddingPolicy' @::@ 'Maybe' 'Text'
--
-- * 'vpResolution' @::@ 'Maybe' 'Text'
--
-- * 'vpSizingPolicy' @::@ 'Maybe' 'Text'
--
-- * 'vpWatermarks' @::@ ['PresetWatermark']
--
videoParameters :: VideoParameters
videoParameters = VideoParameters
{ _vpCodec = Nothing
, _vpCodecOptions = mempty
, _vpKeyframesMaxDist = Nothing
, _vpFixedGOP = Nothing
, _vpBitRate = Nothing
, _vpFrameRate = Nothing
, _vpMaxFrameRate = Nothing
, _vpResolution = Nothing
, _vpAspectRatio = Nothing
, _vpMaxWidth = Nothing
, _vpMaxHeight = Nothing
, _vpDisplayAspectRatio = Nothing
, _vpSizingPolicy = Nothing
, _vpPaddingPolicy = Nothing
, _vpWatermarks = mempty
}
-- | To better control resolution and aspect ratio of output videos, we recommend
-- that you use the values 'MaxWidth', 'MaxHeight', 'SizingPolicy', 'PaddingPolicy', and 'DisplayAspectRatio' instead of 'Resolution' and 'AspectRatio'. The two groups of
-- settings are mutually exclusive. Do not use them together.
--
-- The display aspect ratio of the video in the output file. Valid values
-- include:
--
-- 'auto', '1:1', '4:3', '3:2', '16:9'
--
-- If you specify 'auto', Elastic Transcoder tries to preserve the aspect ratio
-- of the input file.
--
-- If you specify an aspect ratio for the output file that differs from aspect
-- ratio of the input file, Elastic Transcoder adds pillarboxing (black bars on
-- the sides) or letterboxing (black bars on the top and bottom) to maintain the
-- aspect ratio of the active region of the video.
vpAspectRatio :: Lens' VideoParameters (Maybe Text)
vpAspectRatio = lens _vpAspectRatio (\s a -> s { _vpAspectRatio = a })
-- | The bit rate of the video stream in the output file, in kilobits/second.
-- Valid values depend on the values of 'Level' and 'Profile'. If you specify 'auto',
-- Elastic Transcoder uses the detected bit rate of the input source. If you
-- specify a value other than 'auto', we recommend that you specify a value less
-- than or equal to the maximum H.264-compliant value listed for your level and
-- profile:
--
-- /Level - Maximum video bit rate in kilobits/second (baseline and mainProfile) : maximum video bit rate in kilobits/second (high Profile)/
--
-- 1 - 64 : 80 1b - 128 : 160 1.1 - 192 : 240 1.2 - 384 : 480 1.3 - 768 : 960 2 - 2000 : 2500
-- 3 - 10000 : 12500 3.1 - 14000 : 17500 3.2 - 20000 : 25000 4 - 20000 : 25000 4.1 - 50000 : 62500
--
vpBitRate :: Lens' VideoParameters (Maybe Text)
vpBitRate = lens _vpBitRate (\s a -> s { _vpBitRate = a })
-- | The video codec for the output file. Valid values include 'H.264' and 'vp8'. You
-- can only specify 'vp8' when the container type is 'webm'.
vpCodec :: Lens' VideoParameters (Maybe Text)
vpCodec = lens _vpCodec (\s a -> s { _vpCodec = a })
-- | Profile
--
-- The H.264 profile that you want to use for the output file. Elastic
-- Transcoder supports the following profiles:
--
-- 'baseline': The profile most commonly used for videoconferencing and for
-- mobile applications. 'main': The profile used for standard-definition digital
-- TV broadcasts. 'high': The profile used for high-definition digital TV
-- broadcasts and for Blu-ray discs. Level (H.264 Only)
--
-- The H.264 level that you want to use for the output file. Elastic Transcoder
-- supports the following levels:
--
-- '1', '1b', '1.1', '1.2', '1.3', '2', '2.1', '2.2', '3', '3.1', '3.2', '4', '4.1'
--
-- MaxReferenceFrames (H.264 Only)
--
-- Applicable only when the value of Video:Codec is H.264. The maximum number
-- of previously decoded frames to use as a reference for decoding future
-- frames. Valid values are integers 0 through 16, but we recommend that you not
-- use a value greater than the following:
--
-- 'Min(Floor(Maximum decoded picture buffer in macroblocks * 256 / (Width inpixels * Height in pixels)), 16)'
--
-- where /Width in pixels/ and /Height in pixels/ represent either MaxWidth and
-- MaxHeight, or Resolution. /Maximum decoded picture buffer in macroblocks/
-- depends on the value of the 'Level' object. See the list below. (A macroblock
-- is a block of pixels measuring 16x16.)
--
-- 1 - 396 1b - 396 1.1 - 900 1.2 - 2376 1.3 - 2376 2 - 2376 2.1 - 4752 2.2 -
-- 8100 3 - 8100 3.1 - 18000 3.2 - 20480 4 - 32768 4.1 - 32768 MaxBitRate
--
-- The maximum number of bits per second in a video buffer; the size of the
-- buffer is specified by 'BufferSize'. Specify a value between 16 and 62,500. You
-- can reduce the bandwidth required to stream a video by reducing the maximum
-- bit rate, but this also reduces the quality of the video.
--
-- BufferSize
--
-- The maximum number of bits in any x seconds of the output video. This window
-- is commonly 10 seconds, the standard segment duration when you're using FMP4
-- or MPEG-TS for the container type of the output video. Specify an integer
-- greater than 0. If you specify 'MaxBitRate' and omit 'BufferSize', Elastic
-- Transcoder sets 'BufferSize' to 10 times the value of 'MaxBitRate'.
vpCodecOptions :: Lens' VideoParameters (HashMap Text Text)
vpCodecOptions = lens _vpCodecOptions (\s a -> s { _vpCodecOptions = a }) . _Map
-- | The value that Elastic Transcoder adds to the metadata in the output file.
vpDisplayAspectRatio :: Lens' VideoParameters (Maybe Text)
vpDisplayAspectRatio =
lens _vpDisplayAspectRatio (\s a -> s { _vpDisplayAspectRatio = a })
-- | Whether to use a fixed value for 'FixedGOP'. Valid values are 'true' and 'false':
--
-- 'true': Elastic Transcoder uses the value of 'KeyframesMaxDist' for the
-- distance between key frames (the number of frames in a group of pictures, or
-- GOP). 'false': The distance between key frames can vary. 'FixedGOP' must be set
-- to 'true' for 'fmp4' containers.
--
vpFixedGOP :: Lens' VideoParameters (Maybe Text)
vpFixedGOP = lens _vpFixedGOP (\s a -> s { _vpFixedGOP = a })
-- | The frames per second for the video stream in the output file. Valid values
-- include:
--
-- 'auto', '10', '15', '23.97', '24', '25', '29.97', '30', '60'
--
-- If you specify 'auto', Elastic Transcoder uses the detected frame rate of the
-- input source. If you specify a frame rate, we recommend that you perform the
-- following calculation:
--
-- 'Frame rate = maximum recommended decoding speed in luma samples/second /(width in pixels * height in pixels)'
--
-- where:
--
-- /width in pixels/ and /height in pixels/ represent the Resolution of the
-- output video. /maximum recommended decoding speed in Luma samples/second/ is
-- less than or equal to the maximum value listed in the following table, based
-- on the value that you specified for Level. The maximum recommended decoding
-- speed in Luma samples/second for each level is described in the following
-- list (/Level - Decoding speed/):
--
-- 1 - 380160 1b - 380160 1.1 - 76800 1.2 - 1536000 1.3 - 3041280 2 - 3041280 2.1 - 5068800
-- 2.2 - 5184000 3 - 10368000 3.1 - 27648000 3.2 - 55296000 4 - 62914560 4.1 -
-- 62914560
vpFrameRate :: Lens' VideoParameters (Maybe Text)
vpFrameRate = lens _vpFrameRate (\s a -> s { _vpFrameRate = a })
-- | The maximum number of frames between key frames. Key frames are fully encoded
-- frames; the frames between key frames are encoded based, in part, on the
-- content of the key frames. The value is an integer formatted as a string;
-- valid values are between 1 (every frame is a key frame) and 100000,
-- inclusive. A higher value results in higher compression but may also
-- discernibly decrease video quality.
--
-- For 'Smooth' outputs, the 'FrameRate' must have a constant ratio to the 'KeyframesMaxDist'. This allows 'Smooth' playlists to switch between different quality levels
-- while the file is being played.
--
-- For example, an input file can have a 'FrameRate' of 30 with a 'KeyframesMaxDist'
-- of 90. The output file then needs to have a ratio of 1:3. Valid outputs
-- would have 'FrameRate' of 30, 25, and 10, and 'KeyframesMaxDist' of 90, 75, and
-- 30, respectively.
--
-- Alternately, this can be achieved by setting 'FrameRate' to auto and having
-- the same values for 'MaxFrameRate' and 'KeyframesMaxDist'.
vpKeyframesMaxDist :: Lens' VideoParameters (Maybe Text)
vpKeyframesMaxDist =
lens _vpKeyframesMaxDist (\s a -> s { _vpKeyframesMaxDist = a })
-- | If you specify 'auto' for 'FrameRate', Elastic Transcoder uses the frame rate of
-- the input video for the frame rate of the output video. Specify the maximum
-- frame rate that you want Elastic Transcoder to use when the frame rate of the
-- input video is greater than the desired maximum frame rate of the output
-- video. Valid values include: '10', '15', '23.97', '24', '25', '29.97', '30', '60'.
vpMaxFrameRate :: Lens' VideoParameters (Maybe Text)
vpMaxFrameRate = lens _vpMaxFrameRate (\s a -> s { _vpMaxFrameRate = a })
-- | The maximum height of the output video in pixels. If you specify 'auto',
-- Elastic Transcoder uses 1080 (Full HD) as the default value. If you specify a
-- numeric value, enter an even integer between 96 and 3072.
vpMaxHeight :: Lens' VideoParameters (Maybe Text)
vpMaxHeight = lens _vpMaxHeight (\s a -> s { _vpMaxHeight = a })
-- | The maximum width of the output video in pixels. If you specify 'auto',
-- Elastic Transcoder uses 1920 (Full HD) as the default value. If you specify a
-- numeric value, enter an even integer between 128 and 4096.
vpMaxWidth :: Lens' VideoParameters (Maybe Text)
vpMaxWidth = lens _vpMaxWidth (\s a -> s { _vpMaxWidth = a })
-- | When you set 'PaddingPolicy' to 'Pad', Elastic Transcoder may add black bars to
-- the top and bottom and/or left and right sides of the output video to make
-- the total size of the output video match the values that you specified for 'MaxWidth' and 'MaxHeight'.
vpPaddingPolicy :: Lens' VideoParameters (Maybe Text)
vpPaddingPolicy = lens _vpPaddingPolicy (\s a -> s { _vpPaddingPolicy = a })
-- | To better control resolution and aspect ratio of output videos, we recommend
-- that you use the values 'MaxWidth', 'MaxHeight', 'SizingPolicy', 'PaddingPolicy', and 'DisplayAspectRatio' instead of 'Resolution' and 'AspectRatio'. The two groups of
-- settings are mutually exclusive. Do not use them together.
--
-- The width and height of the video in the output file, in pixels. Valid
-- values are 'auto' and /width/ x /height/:
--
-- 'auto': Elastic Transcoder attempts to preserve the width and height of the
-- input file, subject to the following rules. '/width/ x /height/: The width and
-- height of the output video in pixels. Note the following about specifying
-- the width and height:
--
-- The width must be an even integer between 128 and 4096, inclusive. The
-- height must be an even integer between 96 and 3072, inclusive. If you specify
-- a resolution that is less than the resolution of the input file, Elastic
-- Transcoder rescales the output file to the lower resolution. If you specify a
-- resolution that is greater than the resolution of the input file, Elastic
-- Transcoder rescales the output to the higher resolution. We recommend that
-- you specify a resolution for which the product of width and height is less
-- than or equal to the applicable value in the following list (/List - Max widthx height value/): 1 - 25344 1b - 25344 1.1 - 101376 1.2 - 101376 1.3 - 101376
-- 2 - 101376 2.1 - 202752 2.2 - 404720 3 - 404720 3.1 - 921600 3.2 - 1310720 4
-- - 2097152 4.1 - 2097152
vpResolution :: Lens' VideoParameters (Maybe Text)
vpResolution = lens _vpResolution (\s a -> s { _vpResolution = a })
-- | Specify one of the following values to control scaling of the output video:
--
-- 'Fit': Elastic Transcoder scales the output video so it matches the value
-- that you specified in either 'MaxWidth' or 'MaxHeight' without exceeding the
-- other value. 'Fill': Elastic Transcoder scales the output video so it matches
-- the value that you specified in either 'MaxWidth' or 'MaxHeight' and matches or
-- exceeds the other value. Elastic Transcoder centers the output video and then
-- crops it in the dimension (if any) that exceeds the maximum value. 'Stretch':
-- Elastic Transcoder stretches the output video to match the values that you
-- specified for 'MaxWidth' and 'MaxHeight'. If the relative proportions of the
-- input video and the output video are different, the output video will be
-- distorted. 'Keep': Elastic Transcoder does not scale the output video. If
-- either dimension of the input video exceeds the values that you specified for 'MaxWidth' and 'MaxHeight', Elastic Transcoder crops the output video. 'ShrinkToFit': Elastic Transcoder scales the output video down so that its dimensions
-- match the values that you specified for at least one of 'MaxWidth' and 'MaxHeight'
-- without exceeding either value. If you specify this option, Elastic
-- Transcoder does not scale the video up. 'ShrinkToFill': Elastic Transcoder
-- scales the output video down so that its dimensions match the values that you
-- specified for at least one of 'MaxWidth' and 'MaxHeight' without dropping below
-- either value. If you specify this option, Elastic Transcoder does not scale
-- the video up.
vpSizingPolicy :: Lens' VideoParameters (Maybe Text)
vpSizingPolicy = lens _vpSizingPolicy (\s a -> s { _vpSizingPolicy = a })
-- | Settings for the size, location, and opacity of graphics that you want
-- Elastic Transcoder to overlay over videos that are transcoded using this
-- preset. You can specify settings for up to four watermarks. Watermarks appear
-- in the specified size and location, and with the specified opacity for the
-- duration of the transcoded video.
--
-- Watermarks can be in .png or .jpg format. If you want to display a watermark
-- that is not rectangular, use the .png format, which supports transparency.
--
-- When you create a job that uses this preset, you specify the .png or .jpg
-- graphics that you want Elastic Transcoder to include in the transcoded
-- videos. You can specify fewer graphics in the job than you specify watermark
-- settings in the preset, which allows you to use the same preset for up to
-- four watermarks that have different dimensions.
vpWatermarks :: Lens' VideoParameters [PresetWatermark]
vpWatermarks = lens _vpWatermarks (\s a -> s { _vpWatermarks = a }) . _List
instance FromJSON VideoParameters where
parseJSON = withObject "VideoParameters" $ \o -> VideoParameters
<$> o .:? "AspectRatio"
<*> o .:? "BitRate"
<*> o .:? "Codec"
<*> o .:? "CodecOptions" .!= mempty
<*> o .:? "DisplayAspectRatio"
<*> o .:? "FixedGOP"
<*> o .:? "FrameRate"
<*> o .:? "KeyframesMaxDist"
<*> o .:? "MaxFrameRate"
<*> o .:? "MaxHeight"
<*> o .:? "MaxWidth"
<*> o .:? "PaddingPolicy"
<*> o .:? "Resolution"
<*> o .:? "SizingPolicy"
<*> o .:? "Watermarks" .!= mempty
instance ToJSON VideoParameters where
toJSON VideoParameters{..} = object
[ "Codec" .= _vpCodec
, "CodecOptions" .= _vpCodecOptions
, "KeyframesMaxDist" .= _vpKeyframesMaxDist
, "FixedGOP" .= _vpFixedGOP
, "BitRate" .= _vpBitRate
, "FrameRate" .= _vpFrameRate
, "MaxFrameRate" .= _vpMaxFrameRate
, "Resolution" .= _vpResolution
, "AspectRatio" .= _vpAspectRatio
, "MaxWidth" .= _vpMaxWidth
, "MaxHeight" .= _vpMaxHeight
, "DisplayAspectRatio" .= _vpDisplayAspectRatio
, "SizingPolicy" .= _vpSizingPolicy
, "PaddingPolicy" .= _vpPaddingPolicy
, "Watermarks" .= _vpWatermarks
]
data Playlist = Playlist
{ _p2Format :: Maybe Text
, _p2HlsContentProtection :: Maybe HlsContentProtection
, _p2Name :: Maybe Text
, _p2OutputKeys :: List "OutputKeys" Text
, _p2Status :: Maybe Text
, _p2StatusDetail :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'Playlist' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'p2Format' @::@ 'Maybe' 'Text'
--
-- * 'p2HlsContentProtection' @::@ 'Maybe' 'HlsContentProtection'
--
-- * 'p2Name' @::@ 'Maybe' 'Text'
--
-- * 'p2OutputKeys' @::@ ['Text']
--
-- * 'p2Status' @::@ 'Maybe' 'Text'
--
-- * 'p2StatusDetail' @::@ 'Maybe' 'Text'
--
playlist :: Playlist
playlist = Playlist
{ _p2Name = Nothing
, _p2Format = Nothing
, _p2OutputKeys = mempty
, _p2HlsContentProtection = Nothing
, _p2Status = Nothing
, _p2StatusDetail = Nothing
}
-- | The format of the output playlist. Valid formats include 'HLSv3', 'HLSv4', and 'Smooth'.
p2Format :: Lens' Playlist (Maybe Text)
p2Format = lens _p2Format (\s a -> s { _p2Format = a })
-- | The HLS content protection settings, if any, that you want Elastic Transcoder
-- to apply to the output files associated with this playlist.
p2HlsContentProtection :: Lens' Playlist (Maybe HlsContentProtection)
p2HlsContentProtection =
lens _p2HlsContentProtection (\s a -> s { _p2HlsContentProtection = a })
-- | The name that you want Elastic Transcoder to assign to the master playlist,
-- for example, nyc-vacation.m3u8. If the name includes a '/' character, the
-- section of the name before the last '/' must be identical for all 'Name' objects.
-- If you create more than one master playlist, the values of all 'Name' objects
-- must be unique.
--
-- Note: Elastic Transcoder automatically appends the relevant file extension
-- to the file name ('.m3u8' for 'HLSv3' and 'HLSv4' playlists, and '.ism' and '.ismc' for 'Smooth' playlists). If you include a file extension in 'Name', the file name
-- will have two extensions.
p2Name :: Lens' Playlist (Maybe Text)
p2Name = lens _p2Name (\s a -> s { _p2Name = a })
-- | For each output in this job that you want to include in a master playlist,
-- the value of the Outputs:Key object.
--
-- If your output is not 'HLS' or does not have a segment duration set, the
-- name of the output file is a concatenation of 'OutputKeyPrefix' and 'Outputs:Key':
--
-- OutputKeyPrefix'Outputs:Key'
--
-- If your output is 'HLSv3' and has a segment duration set, or is not included
-- in a playlist, Elastic Transcoder creates an output playlist file with a file
-- extension of '.m3u8', and a series of '.ts' files that include a five-digit
-- sequential counter beginning with 00000:
--
-- OutputKeyPrefix'Outputs:Key'.m3u8
--
-- OutputKeyPrefix'Outputs:Key'00000.ts
--
-- If your output is 'HLSv4', has a segment duration set, and is included in an 'HLSv4' playlist, Elastic Transcoder creates an output playlist file with a
-- file extension of '_v4.m3u8'. If the output is video, Elastic Transcoder also
-- creates an output file with an extension of '_iframe.m3u8':
--
-- OutputKeyPrefix'Outputs:Key'_v4.m3u8
--
-- OutputKeyPrefix'Outputs:Key'_iframe.m3u8
--
-- OutputKeyPrefix'Outputs:Key'.ts
--
-- Elastic Transcoder automatically appends the relevant file extension to
-- the file name. If you include a file extension in Output Key, the file name
-- will have two extensions.
--
-- If you include more than one output in a playlist, any segment duration
-- settings, clip settings, or caption settings must be the same for all outputs
-- in the playlist. For 'Smooth' playlists, the 'Audio:Profile', 'Video:Profile', and 'Video:FrameRate' to 'Video:KeyframesMaxDist' ratio must be the same for all outputs.
p2OutputKeys :: Lens' Playlist [Text]
p2OutputKeys = lens _p2OutputKeys (\s a -> s { _p2OutputKeys = a }) . _List
-- | The status of the job with which the playlist is associated.
p2Status :: Lens' Playlist (Maybe Text)
p2Status = lens _p2Status (\s a -> s { _p2Status = a })
-- | Information that further explains the status.
p2StatusDetail :: Lens' Playlist (Maybe Text)
p2StatusDetail = lens _p2StatusDetail (\s a -> s { _p2StatusDetail = a })
instance FromJSON Playlist where
parseJSON = withObject "Playlist" $ \o -> Playlist
<$> o .:? "Format"
<*> o .:? "HlsContentProtection"
<*> o .:? "Name"
<*> o .:? "OutputKeys" .!= mempty
<*> o .:? "Status"
<*> o .:? "StatusDetail"
instance ToJSON Playlist where
toJSON Playlist{..} = object
[ "Name" .= _p2Name
, "Format" .= _p2Format
, "OutputKeys" .= _p2OutputKeys
, "HlsContentProtection" .= _p2HlsContentProtection
, "Status" .= _p2Status
, "StatusDetail" .= _p2StatusDetail
]
data Notifications = Notifications
{ _nCompleted :: Maybe Text
, _nError :: Maybe Text
, _nProgressing :: Maybe Text
, _nWarning :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Notifications' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'nCompleted' @::@ 'Maybe' 'Text'
--
-- * 'nError' @::@ 'Maybe' 'Text'
--
-- * 'nProgressing' @::@ 'Maybe' 'Text'
--
-- * 'nWarning' @::@ 'Maybe' 'Text'
--
notifications :: Notifications
notifications = Notifications
{ _nProgressing = Nothing
, _nCompleted = Nothing
, _nWarning = Nothing
, _nError = Nothing
}
-- | The Amazon SNS topic that you want to notify when Elastic Transcoder has
-- finished processing the job.
nCompleted :: Lens' Notifications (Maybe Text)
nCompleted = lens _nCompleted (\s a -> s { _nCompleted = a })
-- | The Amazon SNS topic that you want to notify when Elastic Transcoder
-- encounters an error condition.
nError :: Lens' Notifications (Maybe Text)
nError = lens _nError (\s a -> s { _nError = a })
-- | The Amazon Simple Notification Service (Amazon SNS) topic that you want to
-- notify when Elastic Transcoder has started to process the job.
nProgressing :: Lens' Notifications (Maybe Text)
nProgressing = lens _nProgressing (\s a -> s { _nProgressing = a })
-- | The Amazon SNS topic that you want to notify when Elastic Transcoder
-- encounters a warning condition.
nWarning :: Lens' Notifications (Maybe Text)
nWarning = lens _nWarning (\s a -> s { _nWarning = a })
instance FromJSON Notifications where
parseJSON = withObject "Notifications" $ \o -> Notifications
<$> o .:? "Completed"
<*> o .:? "Error"
<*> o .:? "Progressing"
<*> o .:? "Warning"
instance ToJSON Notifications where
toJSON Notifications{..} = object
[ "Progressing" .= _nProgressing
, "Completed" .= _nCompleted
, "Warning" .= _nWarning
, "Error" .= _nError
]
newtype Clip = Clip
{ _cTimeSpan :: Maybe TimeSpan
} deriving (Eq, Read, Show)
-- | 'Clip' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cTimeSpan' @::@ 'Maybe' 'TimeSpan'
--
clip :: Clip
clip = Clip
{ _cTimeSpan = Nothing
}
-- | Settings that determine when a clip begins and how long it lasts.
cTimeSpan :: Lens' Clip (Maybe TimeSpan)
cTimeSpan = lens _cTimeSpan (\s a -> s { _cTimeSpan = a })
instance FromJSON Clip where
parseJSON = withObject "Clip" $ \o -> Clip
<$> o .:? "TimeSpan"
instance ToJSON Clip where
toJSON Clip{..} = object
[ "TimeSpan" .= _cTimeSpan
]
data JobInput = JobInput
{ _jiAspectRatio :: Maybe Text
, _jiContainer :: Maybe Text
, _jiEncryption :: Maybe Encryption
, _jiFrameRate :: Maybe Text
, _jiInterlaced :: Maybe Text
, _jiKey :: Maybe Text
, _jiResolution :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'JobInput' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'jiAspectRatio' @::@ 'Maybe' 'Text'
--
-- * 'jiContainer' @::@ 'Maybe' 'Text'
--
-- * 'jiEncryption' @::@ 'Maybe' 'Encryption'
--
-- * 'jiFrameRate' @::@ 'Maybe' 'Text'
--
-- * 'jiInterlaced' @::@ 'Maybe' 'Text'
--
-- * 'jiKey' @::@ 'Maybe' 'Text'
--
-- * 'jiResolution' @::@ 'Maybe' 'Text'
--
jobInput :: JobInput
jobInput = JobInput
{ _jiKey = Nothing
, _jiFrameRate = Nothing
, _jiResolution = Nothing
, _jiAspectRatio = Nothing
, _jiInterlaced = Nothing
, _jiContainer = Nothing
, _jiEncryption = Nothing
}
-- | The aspect ratio of the input file. If you want Elastic Transcoder to
-- automatically detect the aspect ratio of the input file, specify 'auto'. If you
-- want to specify the aspect ratio for the output file, enter one of the
-- following values:
--
-- '1:1', '4:3', '3:2', '16:9'
--
-- If you specify a value other than 'auto', Elastic Transcoder disables
-- automatic detection of the aspect ratio.
jiAspectRatio :: Lens' JobInput (Maybe Text)
jiAspectRatio = lens _jiAspectRatio (\s a -> s { _jiAspectRatio = a })
-- | The container type for the input file. If you want Elastic Transcoder to
-- automatically detect the container type of the input file, specify 'auto'. If
-- you want to specify the container type for the input file, enter one of the
-- following values:
--
-- '3gp', 'aac', 'asf', 'avi', 'divx', 'flv', 'm4a', 'mkv', 'mov', 'mp3', 'mp4', 'mpeg', 'mpeg-ps', 'mpeg-ts', 'mxf', 'ogg', 'vob', 'wav', 'webm'
jiContainer :: Lens' JobInput (Maybe Text)
jiContainer = lens _jiContainer (\s a -> s { _jiContainer = a })
-- | The encryption settings, if any, that are used for decrypting your input
-- files. If your input file is encrypted, you must specify the mode that
-- Elastic Transcoder will use to decrypt your file.
jiEncryption :: Lens' JobInput (Maybe Encryption)
jiEncryption = lens _jiEncryption (\s a -> s { _jiEncryption = a })
-- | The frame rate of the input file. If you want Elastic Transcoder to
-- automatically detect the frame rate of the input file, specify 'auto'. If you
-- want to specify the frame rate for the input file, enter one of the following
-- values:
--
-- '10', '15', '23.97', '24', '25', '29.97', '30', '60'
--
-- If you specify a value other than 'auto', Elastic Transcoder disables
-- automatic detection of the frame rate.
jiFrameRate :: Lens' JobInput (Maybe Text)
jiFrameRate = lens _jiFrameRate (\s a -> s { _jiFrameRate = a })
-- | Whether the input file is interlaced. If you want Elastic Transcoder to
-- automatically detect whether the input file is interlaced, specify 'auto'. If
-- you want to specify whether the input file is interlaced, enter one of the
-- following values:
--
-- 'true', 'false'
--
-- If you specify a value other than 'auto', Elastic Transcoder disables
-- automatic detection of interlacing.
jiInterlaced :: Lens' JobInput (Maybe Text)
jiInterlaced = lens _jiInterlaced (\s a -> s { _jiInterlaced = a })
-- | The name of the file to transcode. Elsewhere in the body of the JSON block
-- is the the ID of the pipeline to use for processing the job. The 'InputBucket'
-- object in that pipeline tells Elastic Transcoder which Amazon S3 bucket to
-- get the file from.
--
-- If the file name includes a prefix, such as 'cooking/lasagna.mpg', include the
-- prefix in the key. If the file isn't in the specified bucket, Elastic
-- Transcoder returns an error.
jiKey :: Lens' JobInput (Maybe Text)
jiKey = lens _jiKey (\s a -> s { _jiKey = a })
-- | This value must be 'auto', which causes Elastic Transcoder to automatically
-- detect the resolution of the input file.
jiResolution :: Lens' JobInput (Maybe Text)
jiResolution = lens _jiResolution (\s a -> s { _jiResolution = a })
instance FromJSON JobInput where
parseJSON = withObject "JobInput" $ \o -> JobInput
<$> o .:? "AspectRatio"
<*> o .:? "Container"
<*> o .:? "Encryption"
<*> o .:? "FrameRate"
<*> o .:? "Interlaced"
<*> o .:? "Key"
<*> o .:? "Resolution"
instance ToJSON JobInput where
toJSON JobInput{..} = object
[ "Key" .= _jiKey
, "FrameRate" .= _jiFrameRate
, "Resolution" .= _jiResolution
, "AspectRatio" .= _jiAspectRatio
, "Interlaced" .= _jiInterlaced
, "Container" .= _jiContainer
, "Encryption" .= _jiEncryption
]
| dysinger/amazonka | amazonka-elastictranscoder/gen/Network/AWS/ElasticTranscoder/Types.hs | mpl-2.0 | 140,506 | 0 | 43 | 31,060 | 16,841 | 10,092 | 6,749 | -1 | -1 |
--
-- Copyright 2017-2018 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Bolour.Util.FrequencyDistribution (
FrequencyDistribution(..)
, mkFrequencyDistribution
, mkDistribution
, randomValue
, leastFrequentValue
, normalizedFrequencies
) where
import System.Random
import Data.List
import qualified Data.Map as Map
data FrequencyDistribution value = FrequencyDistribution {
frequencies :: [(value, Int)]
, distribution :: [(value, Int)]
, maxDistribution :: Int
, frequencyMap :: Map.Map value Int
}
mkFrequencyDistribution :: Ord value => [(value, Int)] -> FrequencyDistribution value
mkFrequencyDistribution frequencies =
let distribution = mkDistribution frequencies
maxDistribution = snd $ last distribution
frequencyMap = Map.fromList frequencies
in FrequencyDistribution frequencies distribution maxDistribution frequencyMap
mkDistribution :: [(value, Int)] -> [(value, Int)]
mkDistribution frequencies =
let nextDistribution (val, total) (nextVal, frequency) = (nextVal, total + frequency)
in scanl1 nextDistribution frequencies
randomValue :: FrequencyDistribution value -> IO value
randomValue FrequencyDistribution {distribution, maxDistribution} = do
dist <- randomRIO (0, maxDistribution - 1) -- uses closed interval [0, maxDistribution - 1]
-- Invariant dist < maxDistribution => there exists an index whose distribution value is > dist.
let Just index = findIndex ((<) dist . snd) distribution
val = fst $ distribution !! index
return val
-- | Get the least frequent value.
leastFrequentValue :: Ord value => FrequencyDistribution value -> [value] -> Maybe value
leastFrequentValue FrequencyDistribution {frequencyMap} vs =
let freq v = Map.lookup v frequencyMap
minFreq v1 v2 = if freq v1 <= freq v2 then v1 else v2
in case vs of
[] -> Nothing
_ -> Just $ foldl1' minFreq vs
normalizedFrequencies :: Ord value => FrequencyDistribution value -> Int -> ((Map.Map value Int), Int)
normalizedFrequencies FrequencyDistribution {maxDistribution, frequencyMap} roughTotal =
let factor :: Float = fromIntegral roughTotal / fromIntegral maxDistribution
normalizer freq = max 1 (round $ fromIntegral freq * factor)
normalized = normalizer <$> frequencyMap
total = sum $ Map.elems normalized
in (normalized, total)
| azadbolour/boardgame | haskell-server/src/Bolour/Util/FrequencyDistribution.hs | agpl-3.0 | 2,500 | 0 | 13 | 425 | 639 | 341 | 298 | 47 | 3 |
{-# LANGUAGE UnicodeSyntax #-}
-- |
-- Module : Math.Dozenal
-- Copyright : (C) 2014 Siddhanathan Shanmugam
-- License : LGPL (see LICENSE)
-- Maintainer : siddhanathan@gmail.com
-- Portability : very
--
-- Dozenal (Duodecimal) number system, promoting the use of base 12.
--
module Dozenal ( decimalToDozenal
, dozenalToDecimal
, Dozenal(..)
) where
-- | The dozenal number system is superior to the decimal number system
-- which is widely in use today.
--
-- For information on why you should bother, see
-- <http://io9.com/5977095/why-we-should-switch-to-a-base-12-counting-system>
import Prelude.Unicode ( (∘) -- function composition
, (∈) -- `elem`
)
import Numeric (showIntAtBase, readInt)
import Data.Char (intToDigit, digitToInt)
import Data.Maybe (listToMaybe)
newtype Dozenal = Dozenal { number ∷ String } deriving (Show, Eq)
dozenalBase ∷ Int
dozenalBase = 12
dozenalCharacters ∷ String
dozenalCharacters = "0123456789ab"
changeConvention ∷ String → String
changeConvention = map changeCharacters
where changeCharacters 'X' = 'a'
changeCharacters 'E' = 'b'
changeCharacters 'a' = 'X'
changeCharacters 'b' = 'E'
changeCharacters x = x
decimalToDozenal ∷ Int → Dozenal
decimalToDozenal decimal = Dozenal
$ changeConvention
$ showIntAtBase dozenalBase intToDigit decimal ""
dozenalToDecimal ∷ String → Maybe Int
dozenalToDecimal = fmap fst
∘ listToMaybe
∘ readInt dozenalBase (∈ dozenalCharacters) digitToInt
∘ changeConvention
determineMaybe ∷ Maybe a → a
determineMaybe (Just x) = x
determineMaybe Nothing = error "Nothing error on determineMaybe" -- ? TODO
operand = determineMaybe ∘ dozenalToDecimal
instance Num Dozenal where
Dozenal a + Dozenal b = decimalToDozenal $ operand a + operand b
Dozenal a ★ Dozenal b = decimalToDozenal $ operand a ★ operand b
Dozenal a - Dozenal b = decimalToDozenal $ operand a - operand b
abs (Dozenal a) = decimalToDozenal $ operand a
signum (Dozenal a) = decimalToDozenal $ signum $ operand a
fromInteger i = error "FROM INTEGER NOT DEFINED" -- TODO
| siddhanathan/dozenal | Dozenal.hs | lgpl-3.0 | 2,424 | 4 | 8 | 684 | 454 | 248 | 206 | 41 | 5 |
{-#LANGUAGE DeriveDataTypeable#-}
module Data.P440.Domain.TRG where
import Data.P440.Domain.SimpleTypes
import Data.P440.Domain.ComplexTypes
import Data.P440.Domain.ComplexTypesZS hiding (СвПл)
import Data.Typeable (Typeable)
import Data.Text (Text)
-- 2.8 Требование по банковской гарантии
data Файл = Файл {
идЭС :: GUID
,версПрог :: Text
,телОтпр :: Text
,должнОтпр :: Text
,фамОтпр :: Text
,требованиеБГ :: ТребованиеБГ
} deriving (Eq, Show, Typeable)
data ТребованиеБГ = ТребованиеБГ {
номТреб :: Text
,датаПодп :: Date
,номБГ :: Text
,датаБГ :: Date
,номТребУпл :: Text
,датаТребУпл :: Date
,суммаПлат :: Text
,срокПлат :: Date
,кбк :: Text
,октмо :: Text
,фамИсп :: Text
,телИсп :: Text
,свНО :: СвНО
,свПл :: СвПл
,руководитель :: РукНО
} deriving (Eq, Show, Typeable)
data СвПл = СвПл {
плательщик :: ПлЮЛИлиПлИП
,адрПлат :: АдрРФ
} deriving (Eq, Show, Typeable)
data ПлЮЛИлиПлИП = ПлЮЛ' ПлЮЛ
| ПлИП' ПлФЛ
deriving (Eq, Show, Typeable)
| Macil-dev/440P-old | src/Data/P440/Domain/TRG.hs | unlicense | 1,481 | 71 | 8 | 362 | 637 | 346 | 291 | 39 | 0 |
-- xor [False, True, False] == True
-- xor [False, True, False, False, True] == False
xor :: [Bool] -> Bool
xor = odd . foldr (\x acc -> if x then acc + 1 else acc) (0 :: Integer)
-- map' odd [1, 2, 3, 4] == [True, False, True, False]
map' :: (a -> b) -> [a] -> [b]
map' f = foldr (\x acc -> f x : acc) []
| beloglazov/haskell-course | hw4.hs | apache-2.0 | 307 | 0 | 10 | 74 | 116 | 65 | 51 | 4 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QWhatsThis.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:15
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QWhatsThis (
QqWhatsThisCreateAction(..)
,qWhatsThisEnterWhatsThisMode
,qWhatsThisHideText
,qWhatsThisInWhatsThisMode
,qWhatsThisLeaveWhatsThisMode
,QqWhatsThisShowText(..), QqqWhatsThisShowText(..)
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqWhatsThisCreateAction x1 where
qWhatsThisCreateAction :: x1 -> IO (QAction ())
instance QqWhatsThisCreateAction (()) where
qWhatsThisCreateAction ()
= withQActionResult $
qtc_QWhatsThis_createAction
foreign import ccall "qtc_QWhatsThis_createAction" qtc_QWhatsThis_createAction :: IO (Ptr (TQAction ()))
instance QqWhatsThisCreateAction ((QObject t1)) where
qWhatsThisCreateAction (x1)
= withQActionResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QWhatsThis_createAction1 cobj_x1
foreign import ccall "qtc_QWhatsThis_createAction1" qtc_QWhatsThis_createAction1 :: Ptr (TQObject t1) -> IO (Ptr (TQAction ()))
qWhatsThisEnterWhatsThisMode :: (()) -> IO ()
qWhatsThisEnterWhatsThisMode ()
= qtc_QWhatsThis_enterWhatsThisMode
foreign import ccall "qtc_QWhatsThis_enterWhatsThisMode" qtc_QWhatsThis_enterWhatsThisMode :: IO ()
qWhatsThisHideText :: (()) -> IO ()
qWhatsThisHideText ()
= qtc_QWhatsThis_hideText
foreign import ccall "qtc_QWhatsThis_hideText" qtc_QWhatsThis_hideText :: IO ()
qWhatsThisInWhatsThisMode :: (()) -> IO (Bool)
qWhatsThisInWhatsThisMode ()
= withBoolResult $
qtc_QWhatsThis_inWhatsThisMode
foreign import ccall "qtc_QWhatsThis_inWhatsThisMode" qtc_QWhatsThis_inWhatsThisMode :: IO CBool
qWhatsThisLeaveWhatsThisMode :: (()) -> IO ()
qWhatsThisLeaveWhatsThisMode ()
= qtc_QWhatsThis_leaveWhatsThisMode
foreign import ccall "qtc_QWhatsThis_leaveWhatsThisMode" qtc_QWhatsThis_leaveWhatsThisMode :: IO ()
class QqWhatsThisShowText x1 where
qWhatsThisShowText :: x1 -> IO ()
class QqqWhatsThisShowText x1 where
qqWhatsThisShowText :: x1 -> IO ()
instance QqWhatsThisShowText ((Point, String)) where
qWhatsThisShowText (x1, x2)
= withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
withCWString x2 $ \cstr_x2 ->
qtc_QWhatsThis_showText_qth cpoint_x1_x cpoint_x1_y cstr_x2
foreign import ccall "qtc_QWhatsThis_showText_qth" qtc_QWhatsThis_showText_qth :: CInt -> CInt -> CWString -> IO ()
instance QqWhatsThisShowText ((Point, String, QWidget t3)) where
qWhatsThisShowText (x1, x2, x3)
= withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
withCWString x2 $ \cstr_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QWhatsThis_showText1_qth cpoint_x1_x cpoint_x1_y cstr_x2 cobj_x3
foreign import ccall "qtc_QWhatsThis_showText1_qth" qtc_QWhatsThis_showText1_qth :: CInt -> CInt -> CWString -> Ptr (TQWidget t3) -> IO ()
instance QqqWhatsThisShowText ((QPoint t1, String)) where
qqWhatsThisShowText (x1, x2)
= withObjectPtr x1 $ \cobj_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QWhatsThis_showText cobj_x1 cstr_x2
foreign import ccall "qtc_QWhatsThis_showText" qtc_QWhatsThis_showText :: Ptr (TQPoint t1) -> CWString -> IO ()
instance QqqWhatsThisShowText ((QPoint t1, String, QWidget t3)) where
qqWhatsThisShowText (x1, x2, x3)
= withObjectPtr x1 $ \cobj_x1 ->
withCWString x2 $ \cstr_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QWhatsThis_showText1 cobj_x1 cstr_x2 cobj_x3
foreign import ccall "qtc_QWhatsThis_showText1" qtc_QWhatsThis_showText1 :: Ptr (TQPoint t1) -> CWString -> Ptr (TQWidget t3) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QWhatsThis.hs | bsd-2-clause | 4,006 | 0 | 12 | 567 | 965 | 515 | 450 | 78 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Parse.Comment where
import Text.Parsec (Stream, ParsecT, many, try, (<|>))
import Text.Parsec.Char (char, string)
import Text.Parsec.Combinator (count, manyTill, between, notFollowedBy, many1, lookAhead)
import Parse.SourceCharacter (sourceCharacter)
import Parse.Whitespace (lineTerminator)
import Parse.Language (symbol)
comment :: Stream s m Char => ParsecT s u m ()
comment = singleLineComment <|> multiLineComment
singleLineComment :: Stream s m Char => ParsecT s u m ()
singleLineComment = try $ do
count 2 (char '/')
manyTill sourceCharacter lineTerminator
return ()
multiLineComment :: Stream s m Char => ParsecT s u m ()
multiLineComment = do
string "/*"
manyTill sourceCharacter $ try (string "*/")
return ()
| michaelbjames/ecma6-parser | Parse/Comment.hs | bsd-2-clause | 790 | 0 | 10 | 133 | 268 | 142 | 126 | 20 | 1 |
import Control.Monad(foldM_)
import qualified System.Random as R
import qualified Bingo as B
import qualified Lottery as Lot
main = test
test = do
(card1, card2, card3, ss) <- prepareBingo 25
foldM_ (\acc i -> doBingo i card1 card2 card3 acc) ss [1..20]
prepareBingo :: Int -> IO (B.Card,B.Card,B.Card,B.State)
prepareBingo n = do
g <- R.newStdGen
let cs = B.newCandidate g n
g <- R.newStdGen
let card1 = B.newCard g n cs
g <- R.newStdGen
let card2 = B.newCard g n cs
g <- R.newStdGen
let card3 = B.newCard g n cs
putStrLn $ "cs: " ++ (show cs)
putStrLn $ "card1: " ++ (show card1)
putStrLn $ "card2: " ++ (show card2)
putStrLn $ "card3: " ++ (show card3)
return (card1,card2,card3,(cs,[]))
doBingo :: Int -> B.Card -> B.Card -> B.Card -> B.State -> IO B.State
doBingo n card1 card2 card3 st = do
g <- R.newStdGen
let (x,st'@(_,ss)) = Lot.draw g st
let r1 = B.processCard card1 ss
let r2 = B.processCard card2 ss
let r3 = B.processCard card3 ss
putStrLn $ (show n) ++ ": x: " ++ (show x)
putStrLn $ (show n) ++ ": ss: " ++ (show ss)
putStrLn $ (show n) ++ ": r1: " ++ (show $ sbl r1)
putStrLn $ (show n) ++ ": r2: " ++ (show $ sbl r2)
putStrLn $ (show n) ++ ": r3: " ++ (show $ sbl r3)
putStrLn $ (show n) ++ ": e1: " ++ (show $ B.evalCard r1)
putStrLn $ (show n) ++ ": e2: " ++ (show $ B.evalCard r2)
putStrLn $ (show n) ++ ": e3: " ++ (show $ B.evalCard r3)
return st'
where
sbl = map (\b -> case b of
True -> 1
False -> 0)
| mitsuji/bingo | app/Console.hs | bsd-3-clause | 1,564 | 0 | 12 | 425 | 768 | 382 | 386 | 42 | 2 |
module ExternChecks (exitPermissions) where
import System.Exit (ExitCode (ExitFailure), exitWith)
exitPermissions :: FilePath -> IO ()
exitPermissions path
= do
putStrLn $ "Bad permissions on file: " ++ path
exitWith (ExitFailure 1)
| qfjp/csce_dfa_project_test | src/ExternChecks.hs | bsd-3-clause | 259 | 0 | 9 | 57 | 71 | 38 | 33 | 7 | 1 |
module Syntax where
import Control.Monad.Reader
import Control.Monad.State.Lazy
import Data.Char
import qualified Data.Set as S
import Data.List hiding (partition)
import Debug.Trace
import Text.Parsec.Pos
type Name = String
-- Variable convention: word begins with upper-case represents constant and
-- lower-case represents variable, lower-case constant represent eigenvariable
data Exp = Var Name SourcePos
| Star
| Const Name SourcePos
| App Exp Exp
| TApp Exp Exp
| Lambda Exp Exp
| Imply Exp Exp
| Forall Name Exp
| Abs Name Exp
| Case Exp [(Exp, Exp)]
| Let [(Exp, Exp)] Exp
| Ann Exp Exp
deriving (Show, Eq, Ord)
data Decl = DataDecl Exp Exp [(Exp, Exp)]
| FunDecl Exp Exp [([Exp], Exp)]
| Prim Exp Exp
| Syn Exp Exp Exp
| TypeOperatorDecl String Int String
| ProgOperatorDecl String Int String
deriving (Show)
dummyPos = initialPos "dummy"
-- free variable of a type/kind exp
freeVars = S.toList . freeVar
freeVar (Var x _) = S.insert x S.empty
freeVar (Const x _) = S.empty
freeVar Star = S.empty
freeVar (App f1 f2) = (freeVar f1) `S.union` (freeVar f2)
freeVar (Forall x f) = S.delete x (freeVar f)
freeVar (Lambda p f) = freeVar f `S.difference` freeVar p
freeVar (Imply b h) = freeVar b `S.union` freeVar h
-- eigen variable of a type exp
eigenVar = S.toList . eigen
eigen Star = S.empty
eigen (Var x _) = S.empty
eigen (Const x _) = if isLower (head x) then S.insert x S.empty else S.empty
eigen (App f1 f2) = (eigen f1) `S.union` (eigen f2)
eigen (Forall x f) = S.delete x (eigen f)
eigen (Imply b h) = eigen b `S.union` eigen h
eigen (Lambda p f) = eigen f
flatten :: Exp -> [Exp]
flatten (App f1 f2) = flatten f1 ++ [f2]
flatten a = [a]
flattenT :: Exp -> [Exp]
flattenT (TApp f1 f2) = flattenT f1 ++ [f2]
flattenT a = [a]
getHB :: Exp -> ([Exp],Exp)
getHB (Imply x y) = let (bs, t') = getHB y in (x:bs, t')
getHB t = ([], t)
getVars :: Exp -> ([Name],Exp)
getVars (Forall x t) = let (xs, t') = getVars t in (x:xs, t')
getVars t = ([], t)
separate f =
let (vars, imp) = getVars f
(bs, h) = getHB imp
in (vars, h, bs)
isAtom (Const x _) = True
isAtom (Var _ _) = True
isAtom _ = False
isVar (Var _ _) = True
isVar _ = False
erase (Const x p) = Const x p
erase (Var x p) = Var x p
erase (Abs x e) = erase e
erase (TApp e t) = erase e
erase (App a1 a2) = App (erase a1) (erase a2)
getName (Const x _) = x
getName (Var x _) = x
getName a = error ("from getName: " ++ show a)
getPos (Const _ x) = x
getPos (Var _ x) = x
getPos a = error ("from getPos: " ++ show a)
newtype Subst = Subst [(String, Exp)] deriving (Show, Eq)
-- applying a substitution to a type or mixed type/term expression
-- the substitution is blind, i.e. no renaming of bound variables
apply :: Subst -> Exp -> Exp
apply (Subst s) (Var x p) =
case lookup x s of
Nothing -> Var x p
Just t -> t
apply s a@(Const _ _) = a
apply s (App f1 f2) = App (apply s f1) (apply s f2)
apply s (TApp f1 f2) = TApp (apply s f1) (apply s f2)
apply s (Imply f1 f2) = Imply (apply s f1) (apply s f2)
apply s (Forall x f2) = Forall x (apply (minus s [x]) f2)
apply s (Abs x f2) = Abs x (apply (minus s [x]) f2)
apply s (Lambda (Ann p t) f2) =
Lambda (Ann (apply s p) (apply s t)) (apply s f2)
-- type level lambda
apply s (Lambda (Var x p) f2) =
Lambda (Var x p) (apply (minus s [x]) f2)
-- apply s (Lambda x f2) = Lambda x (apply (minus s $ freeVars x) f2)
apply s Star = Star
apply s (Case e cons) = Case (apply s e) cons'
where cons' = map (\(p,exp) -> (p, apply s exp)) cons
apply s (Let defs e) = Let def' (apply s e)
where def' = map (\(Ann p t, exp) -> (Ann p (apply s t), apply s exp)) defs
apply s (Ann x e) = Ann (apply s x) (apply s e)
-- apply s e = error $ show e ++ "from apply"
minus (Subst sub) fv = Subst $ [ (x, e) | (x, e) <- sub, not $ x `elem` fv]
extend :: Subst -> Subst -> Subst
extend (Subst s1) (Subst s2) =
Subst $ [(x, normalize $ apply (Subst s1) e) | (x, e) <- s2] ++ s1
eta (Lambda (Var x p) t) =
case eta t of
App t' e' | isVar e' && getName e' == x ->
let fv = freeVars t' in
if not (x `elem` fv) then t'
else Lambda (Var x p) (App t' e')
c -> Lambda (Var x p) c
eta (App e1 e2) = App (eta e1) (eta e2)
eta a = a
-- normalize a type/mixed term expression without type definitions
normalize :: Exp -> Exp
normalize t = eta $ norm [] t
norm g Star = Star
norm g (Var a p) = Var a p
norm g (Const a p) =
case lookup a g of
Nothing -> Const a p
Just b -> norm g b
norm g (Ann a t) = Ann (norm g a) (norm g t)
norm g (Lambda x t) = Lambda (norm g x) (norm g t)
norm g (Abs x t) = Abs x (norm g t)
norm g (TApp t1 t2) = TApp (norm g t1) (norm g t2)
norm g (App (Lambda (Var x p) t') t) = norm g $ runSubst t (Var x p) t'
norm g (App t' t) =
case (App (norm g t') (norm g t)) of
a@(App (Lambda x t') t) -> norm g a
b -> b
norm g (Imply t t') = Imply (norm g t) (norm g t')
norm g (Forall x t) = Forall x (norm g t)
norm g (Case e alts) = Case (norm g e) alts'
where alts' = map (\(p, exp) -> (norm g p, norm g exp)) alts
norm g (Let alts e) = Let alts' (norm g e)
where alts' = map (\(p, exp) -> (norm g p, norm g exp)) alts
-- normalizeTy t g | trace ("normalizeTy " ++show ("hi") ++"\n") False = undefined
normalizeTy t g = eta $ norm g t
type GVar a = State Int a
runSubst :: Exp -> Exp -> Exp -> Exp
runSubst t x t1 = fst $ runState (subst t x t1) 0
subst :: Exp -> Exp -> Exp -> GVar Exp
subst s (Var x _) (Const y p) = return $ Const y p
subst s (Var x p1) (Var y p2) =
if x == y then return s else return $ Var y p2
subst s (Var x p) (Imply f1 f2) = do
c1 <- subst s (Var x p) f1
c2 <- subst s (Var x p) f2
return $ Imply c1 c2
subst s (Var x p) (App f1 f2) = do
c1 <- subst s (Var x p) f1
c2 <- subst s (Var x p) f2
return $ App c1 c2
subst s (Var x p) (Forall a f) =
if x == a || not (x `elem` freeVars f) then return $ Forall a f
else if not (a `elem` freeVars s)
then do
c <- subst s (Var x p) f
return $ Forall a c
else do
n <- get
modify (+1)
c1 <- subst (Var (a++ show n++"#") p) (Var a p) f
subst s (Var x p) (Forall (a ++ show n ++"#") c1)
subst s (Var x p1) (Lambda (Var a p2) f) =
if x == a || not (x `elem` freeVars f) then return $ Lambda (Var a p2) f
else if not (a `elem` freeVars s)
then do
c <- subst s (Var x p1) f
return $ Lambda (Var a p2) c
else do
n <- get
modify (+1)
c1 <- subst (Var (a++ show n++ "#") p2) (Var a p2) f
subst s (Var x p1) (Lambda (Var (a ++ show n++"#") p2) c1)
data Nameless = V Int
| C Name
| ALL Nameless
| AP Nameless Nameless
| IMP Nameless Nameless
| LAM Nameless
| S
deriving (Show, Eq)
type BindCxt a = Reader [(Name, Int)] a
-- debruijn representation of a type expression
debruijn :: Exp -> BindCxt Nameless
debruijn (Const x _) = return $ C x
debruijn (Star) = return $ S
debruijn (Var x _) =
do n' <- asks (lookup x)
case n' of
Just n -> return $ V n
Nothing -> error $ show x ++ "from debruijn"
debruijn (Forall x f) =
do a <- local (((x,0):) . plus1) $ debruijn f
return $ ALL a
debruijn (App b1 b2) =
do a <- debruijn b1
a1 <- debruijn b2
return $ AP a a1
debruijn (Imply b1 b2) =
do a <- debruijn b1
a1 <- debruijn b2
return $ IMP a a1
debruijn (Lambda (Var x _) f) =
do a <- local (((x,0):) . plus1) $ debruijn f
return $ LAM a
debruijn a = error $ show a
plus1 = map $ \(x, y) -> (x, y + 1)
-- alpha equivalence of two type expressions
alphaEq :: Exp -> Exp -> Bool
alphaEq t1 t2 =
let t1' = foldl' (\t x -> Forall x t) t1 (freeVars t1)
t2' = foldl' (\t x -> Forall x t) t2 (freeVars t2) in
runReader (debruijn t1') [] == runReader (debruijn t2') []
| Fermat/higher-rank | src/Syntax.hs | bsd-3-clause | 8,154 | 0 | 17 | 2,416 | 4,261 | 2,164 | 2,097 | 212 | 6 |
module Main where
import Options.Applicative
main :: IO ()
main = getOpts >>= printArg
printArg :: String -> IO ()
printArg "" = printArg "y"
printArg arg = putStrLn arg >>
printArg arg
getOpts :: IO String
getOpts = execParser $ info (helper <*> args)
(fullDesc
<> header "yes - output a string repeatedly until killed"
<> progDesc "Repeatedly output a line with all specified STRING(s), or 'y'"
)
args :: Parser String
args = unwords <$> many (argument str (metavar "STRING"))
| DavidTruby/haskell-coreutils | src/yes.hs | bsd-3-clause | 514 | 0 | 10 | 117 | 151 | 76 | 75 | 15 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.