code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fexcess-precision -XBangPatterns#-}
-----------------------------------------------------------------------------
{-| Module : Vec.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:42
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qt.Glome.Vec where
type Flt = Double
sane_mod :: Int -> Int -> Int
sane_mod a b =
let modres = mod a b in
if modres < 0
then modres + b
else modres
infinity :: Flt
infinity = 1000000.0
deg :: Flt -> Flt
deg !x = (x*3.1415926535897)/180
rad :: Flt -> Flt
rad !x = x
rot :: Flt -> Flt
rot !x = x*3.1415926535897*2
{-abs_sub :: Flt -> Flt -> Flt
abs_sub a b =
if a > 0.0
then
if b < a
then a-b
else 0.0
else
if b < (-a)
then a+b
else 0.0
-}
clamp :: Flt -> Flt -> Flt -> Flt
clamp !min !x !max
| x < min = min
| x > max = max
| otherwise = x
delta = 0.0001 :: Flt
fmin :: Flt -> Flt -> Flt
fmin !a !b = if a > b then b else a
fmax :: Flt -> Flt -> Flt
fmax !a !b = if a > b then a else b
fmin3 :: Flt -> Flt -> Flt -> Flt
fmin3 !a !b !c = if a > b
then if b > c
then c
else b
else if a > c
then c
else a
fmax3 :: Flt -> Flt -> Flt -> Flt
fmax3 !a !b !c = if a > b
then if a > c
then a
else c
else if b > c
then b
else c
fmin4 :: Flt -> Flt -> Flt -> Flt -> Flt
fmin4 !a !b !c !d = fmin (fmin a b) (fmin c d)
fmax4 :: Flt -> Flt -> Flt -> Flt -> Flt
fmax4 !a !b !c !d = fmax (fmax a b) (fmax c d)
fabs :: Flt -> Flt
fabs !a =
if a < 0 then (-a) else a
iabs :: Int -> Int
iabs !a =
if a < 0 then (-a) else a
abs a = error "use non-polymorphic version, fabs"
about_equal :: Flt -> Flt -> Bool
about_equal !a !b =
if a > 1
then
fabs (1 - (a/b)) < (delta*10)
else
(fabs $ a - b) < (delta*10)
data Vec = Vec {x, y, z :: !Flt} deriving Show
data Ray = Ray {origin, dir :: !Vec} deriving Show
vec !x !y !z = (Vec x y z)
vzero = Vec 0.0 0.0 0.0
vunit = vx
vx = Vec 1 0 0
vy = Vec 0 1 0
vz = Vec 0 0 1
nvx = Vec (-1) 0 0
nvy = Vec 0 (-1) 0
nvz = Vec 0 0 (-1)
va :: Vec -> Int -> Flt
va !(Vec x y z) !n =
case n of
0 -> x
1 -> y
2 -> z
vset :: Vec -> Int -> Flt -> Vec
vset !(Vec x y z) !i !f =
case i of
0 -> Vec f y z
1 -> Vec x f z
2 -> Vec x y f
vdot :: Vec -> Vec -> Flt
vdot !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
(x1*x2)+(y1*y2)+(z1*z2)
vcross :: Vec -> Vec -> Vec
vcross !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec
((y1 * z2) - (z1 * y2))
((z1 * x2) - (x1 * z2))
((x1 * y2) - (y1 * x2))
vmap :: (Flt -> Flt) -> Vec -> Vec
vmap f !v1 =
Vec (f (x v1)) (f (y v1)) (f (z v1))
vmap2 :: (Flt -> Flt -> Flt) -> Vec -> Vec -> Vec
vmap2 f !v1 !v2 =
Vec (f (x v1) (x v2))
(f (y v1) (y v2))
(f (z v1) (z v2))
vinvert :: Vec -> Vec
vinvert !(Vec x1 y1 z1) =
Vec (-x1) (-y1) (-z1)
vlensqr :: Vec -> Flt
vlensqr !v1 = vdot v1 v1
vlen :: Vec -> Flt
vlen !v1 = sqrt (vdot v1 v1)
vadd :: Vec -> Vec -> Vec
vadd !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec (x1 + x2)
(y1 + y2)
(z1 + z2)
vadd3 :: Vec -> Vec -> Vec -> Vec
vadd3 !(Vec x1 y1 z1) !(Vec x2 y2 z2) !(Vec x3 y3 z3) =
Vec (x1 + x2 + x3)
(y1 + y2 + y3)
(z1 + z2 + z3)
vsub :: Vec -> Vec -> Vec
vsub !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec (x1 - x2)
(y1 - y2)
(z1 - z2)
vmul :: Vec -> Vec -> Vec
vmul !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec (x1 * x2)
(y1 * y2)
(z1 * z2)
vinc :: Vec -> Flt -> Vec
vinc !(Vec x y z) !n =
Vec (x + n)
(y + n)
(z + n)
vdec :: Vec -> Flt -> Vec
vdec !(Vec x y z) !n =
Vec (x - n)
(y - n)
(z - n)
vmax :: Vec -> Vec -> Vec
vmax !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec (fmax x1 x2)
(fmax y1 y2)
(fmax z1 z2)
vmin :: Vec -> Vec -> Vec
vmin !(Vec x1 y1 z1) !(Vec x2 y2 z2) =
Vec (fmin x1 x2)
(fmin y1 y2)
(fmin z1 z2)
vmaxaxis :: Vec -> Int
vmaxaxis !(Vec x y z) =
if (x > y)
then if (x > z)
then 0
else 2
else if (y > z)
then 1
else 2
vscale :: Vec -> Flt -> Vec
vscale !(Vec x y z) !fac =
Vec (x * fac)
(y * fac)
(z * fac)
vscaleadd :: Vec -> Vec -> Flt -> Vec
vscaleadd !(Vec x1 y1 z1) !(Vec x2 y2 z2) fac =
Vec (x1 + (x2 * fac))
(y1 + (y2 * fac))
(z1 + (z2 * fac))
vnorm :: Vec -> Vec
vnorm !(Vec x1 y1 z1) =
let invlen = 1.0 / (sqrt ((x1*x1)+(y1*y1)+(z1*z1))) in
Vec (x1*invlen) (y1*invlen) (z1*invlen)
assert_norm :: Vec -> Vec
assert_norm v =
let l = vdot v v
in if l > (1+delta)
then error $ "vector too long" ++ (show v)
else if l < (1-delta)
then error $ "vector too short: " ++ (show v)
else v
bisect :: Vec -> Vec -> Vec
bisect !v1 !v2 = vnorm (vadd v1 v2)
vdist :: Vec -> Vec -> Flt
vdist v1 v2 =
let d = vsub v2 v1 in vlen d
reflect :: Vec -> Vec -> Vec
reflect !v !norm =
-- vadd v $ vscale norm $ (-2) * (vdot v norm)
vscaleadd v norm $ (-2) * (vdot v norm)
vrcp :: Vec -> Vec
vrcp !(Vec x y z) =
Vec (1/x) (1/y) (1/z)
veq :: Vec -> Vec -> Bool
veq !(Vec ax ay az) !(Vec bx by bz) =
(about_equal ax bx) && (about_equal ay by) && (about_equal az bz)
veqsign :: Vec -> Vec -> Bool
veqsign !(Vec ax ay az) !(Vec bx by bz) =
ax*bx > 0 && ay*by > 0 && az*bz > 0
ray_move :: Ray -> Flt -> Ray
ray_move !(Ray orig dir) !d =
(Ray (vscaleadd orig dir d) dir)
orth :: Vec -> (Vec,Vec)
orth v1 =
if about_equal (vdot v1 v1) 1
then
let x = (Vec 1 0 0)
y = (Vec 0 1 0)
dvx = vdot v1 x
v2 = if dvx < 0.8 && dvx > (-0.8) -- don't want to cross with a
then vnorm $ vcross v1 x -- vector that's too similar
else vnorm $ vcross v1 y
v3 = vcross v1 v2
in (v2,v3)
else error $ "orth: unnormalized vector" ++ (show v1)
plane_int :: Ray -> Vec -> Vec -> Vec
plane_int !(Ray orig dir) !p !norm =
let newo = vsub orig p
dist = -(vdot norm newo) / (vdot norm dir)
in vscaleadd orig dir dist
plane_int_dist :: Ray -> Vec -> Vec -> Flt
plane_int_dist !(Ray orig dir) !p !norm =
let newo = vsub orig p
in -(vdot norm newo) / (vdot norm dir)
data Matrix = Matrix !Flt !Flt !Flt !Flt
!Flt !Flt !Flt !Flt
!Flt !Flt !Flt !Flt deriving Show
data Xfm = Xfm !Matrix !Matrix deriving Show
ident_matrix = (Matrix 1 0 0 0 0 1 0 0 0 0 1 0)
ident_xfm = Xfm ident_matrix ident_matrix
mat_mult :: Matrix -> Matrix -> Matrix
mat_mult (Matrix a00 a01 a02 a03 a10 a11 a12 a13 a20 a21 a22 a23)
(Matrix b00 b01 b02 b03 b10 b11 b12 b13 b20 b21 b22 b23) =
Matrix
(a00*b00 + a01*b10 + a02*b20)
(a00*b01 + a01*b11 + a02*b21)
(a00*b02 + a01*b12 + a02*b22)
(a00*b03 + a01*b13 + a02*b23 + a03)
(a10*b00 + a11*b10 + a12*b20)
(a10*b01 + a11*b11 + a12*b21)
(a10*b02 + a11*b12 + a12*b22)
(a10*b03 + a11*b13 + a12*b23 + a13)
(a20*b00 + a21*b10 + a22*b20)
(a20*b01 + a21*b11 + a22*b21)
(a20*b02 + a21*b12 + a22*b22)
(a20*b03 + a21*b13 + a22*b23 + a23)
xfm_mult :: Xfm -> Xfm -> Xfm
xfm_mult (Xfm a inva) (Xfm b invb) =
Xfm (mat_mult a b) (mat_mult invb inva)
compose :: [Xfm] -> Xfm
compose xfms = check_xfm $ foldr xfm_mult ident_xfm (reverse xfms)
check_xfm :: Xfm -> Xfm
check_xfm (Xfm m i) =
let (Matrix m00 m01 m02 m03
m10 m11 m12 m13
m20 m21 m22 m23) = mat_mult m i
ae = about_equal
in
if ae m00 1 && ae m01 0 && ae m02 0 && ae m03 0 &&
ae m10 0 && ae m11 1 && ae m12 0 && ae m13 0 &&
ae m20 0 && ae m21 0 && ae m22 1 && ae m23 0
then (Xfm m i)
else error $ "corrupt matrix " ++ (show (Xfm m i)) ++ "\n" ++ (show (mat_mult m i))
vrotate :: Vec -> Ray -> Flt -> Vec
vrotate pt (Ray orig axis_) angle =
let axis = assert_norm axis_
transform = compose [ translate (vinvert orig)
, rotate axis angle
, translate orig
]
new_pt = xfm_point transform pt
in if about_equal (vlen (vsub orig pt)) (vlen (vsub orig new_pt))
then new_pt
else error $ "something is wrong with vrotate" ++
(show $ vlen (vsub orig pt)) ++ " " ++
(show $ vlen (vsub orig new_pt))
xfm_point :: Xfm -> Vec -> Vec
xfm_point !(Xfm (Matrix m00 m01 m02 m03
m10 m11 m12 m13
m20 m21 m22 m23) inv)
!(Vec x y z) =
Vec (m00*x + m01*y + m02*z + m03)
(m10*x + m11*y + m12*z + m13)
(m20*x + m21*y + m22*z + m23)
invxfm_point :: Xfm -> Vec -> Vec
invxfm_point !(Xfm fwd (Matrix i00 i01 i02 i03
i10 i11 i12 i13
i20 i21 i22 i23))
!(Vec x y z) =
Vec (i00*x + i01*y + i02*z + i03)
(i10*x + i11*y + i12*z + i13)
(i20*x + i21*y + i22*z + i23)
xfm_vec :: Xfm -> Vec -> Vec
xfm_vec !(Xfm (Matrix m00 m01 m02 m03
m10 m11 m12 m13
m20 m21 m22 m23) inv)
!(Vec x y z) =
Vec (m00*x + m01*y + m02*z)
(m10*x + m11*y + m12*z)
(m20*x + m21*y + m22*z)
invxfm_vec :: Xfm -> Vec -> Vec
invxfm_vec !(Xfm fwd (Matrix i00 i01 i02 i03
i10 i11 i12 i13
i20 i21 i22 i23))
!(Vec x y z) =
Vec (i00*x + i01*y + i02*z)
(i10*x + i11*y + i12*z)
(i20*x + i21*y + i22*z)
invxfm_norm :: Xfm -> Vec -> Vec
invxfm_norm !(Xfm fwd (Matrix i00 i01 i02 i03
i10 i11 i12 i13
i20 i21 i22 i23))
!(Vec x y z) =
Vec (i00*x + i10*y + i20*z)
(i01*x + i11*y + i21*z)
(i02*x + i12*y + i22*z)
xfm_ray :: Xfm -> Ray -> Ray
xfm_ray !xfm !(Ray orig dir) =
Ray (xfm_point xfm orig) (vnorm (xfm_vec xfm dir))
invxfm_ray !xfm !(Ray orig dir) =
Ray (invxfm_point xfm orig) (vnorm (invxfm_vec xfm dir))
translate :: Vec -> Xfm
translate (Vec x y z) =
check_xfm $ Xfm (Matrix 1 0 0 x 0 1 0 y 0 0 1 z)
(Matrix 1 0 0 (-x) 0 1 0 (-y) 0 0 1 (-z))
scale :: Vec -> Xfm
scale (Vec x y z) =
check_xfm $ Xfm (Matrix x 0 0 0 0 y 0 0 0 0 z 0)
(Matrix (1/x) 0 0 0 0 (1/y) 0 0 0 0 (1/z) 0)
rotate :: Vec -> Flt -> Xfm
rotate (Vec x y z) angle =
if not $ (vlen (Vec x y z)) `about_equal` 1
then error $ "please use a normalized vector for rotation: " ++ (show (vlen (Vec x y z)))
else
let s = sin angle
c = cos angle
m00 = ((x*x)+((1-(x*x))*c))
m01 = (((x*y)*(1-c))-(z*s))
m02 = ((x*z*(1-c))+(y*s))
m10 = (((x*y)*(1-c))+(z*s))
m11 = ((y*y)+((1-(y*y))*c))
m12 = ((y*z*(1-c))-(x*s))
m20 = ((x*z*(1-c))-(y*s))
m21 = ((y*z*(1-c))+(x*s))
m22 = ((z*z)+((1-(z*z))*c))
in
check_xfm $ Xfm (Matrix m00 m01 m02 0 m10 m11 m12 0 m20 m21 m22 0)
(Matrix m00 m10 m20 0 m01 m11 m21 0 m02 m12 m22 0)
xyz_to_uvw :: Vec -> Vec -> Vec -> Xfm
xyz_to_uvw u v w =
let Vec ux uy uz = u
Vec vx vy vz = v
Vec wx wy wz = w
in if (vdot u u) `about_equal` 1
then
if (vdot v v) `about_equal` 1
then
if (vdot w w) `about_equal` 1
then
if ((vdot u v) `about_equal` 0) &&
((vdot u w) `about_equal` 0) &&
((vdot v w) `about_equal` 0)
then
check_xfm $ Xfm (Matrix ux vx wx 0 uy vy wy 0 uz vz wz 0)
(Matrix ux uy uz 0 vx vy vz 0 wx wy wz 0)
else error "vectors aren't orthogonal"
else error $ "unnormalized w " ++ (show w)
else error $ "unnormalized v " ++ (show v)
else error $ "unnormalized u " ++ (show u)
uvw_to_xyz :: Vec -> Vec -> Vec -> Xfm
uvw_to_xyz (Vec ux uy uz) (Vec vx vy vz) (Vec wx wy wz) =
check_xfm $ Xfm (Matrix ux uy uz 0 vx vy vz 0 wx wy wz 0)
(Matrix ux vx wx 0 uy vy wy 0 uz vz wz 0)
| keera-studios/hsQt | extra-pkgs/Glome/Qt/Glome/Vec.hs | bsd-2-clause | 12,073 | 0 | 19 | 4,124 | 6,367 | 3,247 | 3,120 | -1 | -1 |
module Tests.Network where
import Hans.Network.Types
import Test.QuickCheck (Gen,arbitraryBoundedRandom)
arbitraryProtocol :: Gen NetworkProtocol
arbitraryProtocol = arbitraryBoundedRandom
| GaloisInc/HaNS | tests/Tests/Network.hs | bsd-3-clause | 194 | 0 | 5 | 20 | 38 | 23 | 15 | 5 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
-------------------------------------------------------------------------------
-- |
-- Module : Database.Bloodhound.Types
-- Copyright : (C) 2014 Chris Allen
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Chris Allen <cma@bitemyapp.com
-- Stability : provisional
-- Portability : DeriveGeneric, RecordWildCards
--
-- Data types for describing actions and data structures performed to interact
-- with Elasticsearch. The two main buckets your queries against Elasticsearch
-- will fall into are 'Query's and 'Filter's. 'Filter's are more like
-- traditional database constraints and often have preferable performance
-- properties. 'Query's support human-written textual queries, such as fuzzy
-- queries.
-------------------------------------------------------------------------------
module Database.Bloodhound.Types
( defaultCache
, defaultIndexSettings
, defaultIndexDocumentSettings
, mkSort
, showText
, unpackId
, mkMatchQuery
, mkMultiMatchQuery
, mkBoolQuery
, mkRangeQuery
, mkQueryStringQuery
, mkAggregations
, mkTermsAggregation
, mkTermsScriptAggregation
, mkDateHistogram
, mkDocVersion
, docVersionNumber
, toTerms
, toDateHistogram
, omitNulls
, BH(..)
, runBH
, BHEnv(..)
, MonadBH(..)
, Version(..)
, Status(..)
, Existence(..)
, NullValue(..)
, IndexSettings(..)
, IndexTemplate(..)
, Server(..)
, Reply
, EsResult(..)
, EsResultFound(..)
, DocVersion
, ExternalDocVersion(..)
, VersionControl(..)
, DocumentParent(..)
, IndexDocumentSettings(..)
, Query(..)
, Search(..)
, SearchType(..)
, SearchResult(..)
, ScrollId
, SearchHits(..)
, TrackSortScores
, From(..)
, Size(..)
, Source(..)
, PatternOrPatterns(..)
, Include(..)
, Exclude(..)
, Pattern(..)
, ShardResult(..)
, Hit(..)
, Filter(..)
, Seminearring(..)
, BoolMatch(..)
, Term(..)
, GeoPoint(..)
, GeoBoundingBoxConstraint(..)
, GeoBoundingBox(..)
, GeoFilterType(..)
, Distance(..)
, DistanceUnit(..)
, DistanceType(..)
, DistanceRange(..)
, OptimizeBbox(..)
, LatLon(..)
, RangeValue(..)
, RangeExecution(..)
, LessThan(..)
, LessThanEq(..)
, GreaterThan(..)
, GreaterThanEq(..)
, LessThanD(..)
, LessThanEqD(..)
, GreaterThanD(..)
, GreaterThanEqD(..)
, Regexp(..)
, RegexpFlags(..)
, RegexpFlag(..)
, FieldName(..)
, Script(..)
, IndexName(..)
, TemplateName(..)
, TemplatePattern(..)
, MappingName(..)
, DocId(..)
, CacheName(..)
, CacheKey(..)
, BulkOperation(..)
, ReplicaCount(..)
, ShardCount(..)
, Sort
, SortMode(..)
, SortOrder(..)
, SortSpec(..)
, DefaultSort(..)
, Missing(..)
, OpenCloseIndex(..)
, Method
, Boost(..)
, MatchQuery(..)
, MultiMatchQuery(..)
, BoolQuery(..)
, BoostingQuery(..)
, CommonTermsQuery(..)
, DisMaxQuery(..)
, FilteredQuery(..)
, FuzzyLikeThisQuery(..)
, FuzzyLikeFieldQuery(..)
, FuzzyQuery(..)
, HasChildQuery(..)
, HasParentQuery(..)
, IndicesQuery(..)
, MoreLikeThisQuery(..)
, MoreLikeThisFieldQuery(..)
, NestedQuery(..)
, PrefixQuery(..)
, QueryStringQuery(..)
, SimpleQueryStringQuery(..)
, RangeQuery(..)
, RegexpQuery(..)
, QueryString(..)
, BooleanOperator(..)
, ZeroTermsQuery(..)
, CutoffFrequency(..)
, Analyzer(..)
, MaxExpansions(..)
, Lenient(..)
, MatchQueryType(..)
, MultiMatchQueryType(..)
, Tiebreaker(..)
, MinimumMatch(..)
, DisableCoord(..)
, CommonMinimumMatch(..)
, MinimumMatchHighLow(..)
, PrefixLength(..)
, Fuzziness(..)
, IgnoreTermFrequency(..)
, MaxQueryTerms(..)
, ScoreType(..)
, Score
, Cache
, TypeName(..)
, BoostTerms(..)
, MaxWordLength(..)
, MinWordLength(..)
, MaxDocFrequency(..)
, MinDocFrequency(..)
, PhraseSlop(..)
, StopWord(..)
, QueryPath(..)
, MinimumTermFrequency(..)
, PercentMatch(..)
, FieldDefinition(..)
, MappingField(..)
, Mapping(..)
, AllowLeadingWildcard(..)
, LowercaseExpanded(..)
, GeneratePhraseQueries(..)
, Locale(..)
, AnalyzeWildcard(..)
, EnablePositionIncrements(..)
, SimpleQueryFlag(..)
, FieldOrFields(..)
, Monoid(..)
, ToJSON(..)
, Interval(..)
, TimeInterval(..)
, ExecutionHint(..)
, CollectionMode(..)
, TermOrder(..)
, TermInclusion(..)
, Aggregation(..)
, Aggregations
, AggregationResults
, Bucket(..)
, BucketAggregation(..)
, TermsAggregation(..)
, ValueCountAggregation(..)
, FilterAggregation(..)
, DateHistogramAggregation(..)
, Highlights(..)
, FieldHighlight(..)
, HighlightSettings(..)
, PlainHighlight(..)
, PostingsHighlight(..)
, FastVectorHighlight(..)
, CommonHighlight(..)
, NonPostings(..)
, HighlightEncoder(..)
, HighlightTag(..)
, HitHighlight
, TermsResult(..)
, DateHistogramResult(..)
) where
import Control.Applicative
import Control.Monad.Catch
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Data.Aeson
import Data.Aeson.Types (Pair, emptyObject, parseMaybe)
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.HashMap.Strict as HM (union)
import Data.List (foldl', nub)
import Data.List.NonEmpty (NonEmpty (..), toList)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime)
import qualified Data.Vector as V
import GHC.Enum
import GHC.Generics (Generic)
import Network.HTTP.Client
import qualified Network.HTTP.Types.Method as NHTM
import Database.Bloodhound.Types.Class
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Database.Bloodhound
-- >>> let testServer = (Server "http://localhost:9200")
-- >>> let testIndex = IndexName "twitter"
-- >>> let testMapping = MappingName "tweet"
-- >>> let defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
-- defaultIndexSettings is exported by Database.Bloodhound as well
-- no trailing slashes in servers, library handles building the path.
{-| Common environment for Elasticsearch calls. Connections will be
pipelined according to the provided HTTP connection manager.
-}
data BHEnv = BHEnv { bhServer :: Server
, bhManager :: Manager
}
{-| All API calls to Elasticsearch operate within
MonadBH. The idea is that it can be easily embedded in your
own monad transformer stack. A default instance for a ReaderT and
alias 'BH' is provided for the simple case.
-}
class (Functor m, Applicative m, MonadIO m) => MonadBH m where
getBHEnv :: m BHEnv
newtype BH m a = BH {
unBH :: ReaderT BHEnv m a
} deriving ( Functor
, Applicative
, Monad
, MonadIO
, MonadState s
, MonadWriter w
, MonadError e
, Alternative
, MonadPlus
, MonadFix
, MonadThrow
, MonadCatch
, MonadMask)
instance MonadTrans BH where
lift = BH . lift
instance (MonadReader r m) => MonadReader r (BH m) where
ask = lift ask
local f (BH (ReaderT m)) = BH $ ReaderT $ \r ->
local f (m r)
instance (Functor m, Applicative m, MonadIO m) => MonadBH (BH m) where
getBHEnv = BH getBHEnv
instance (Functor m, Applicative m, MonadIO m) => MonadBH (ReaderT BHEnv m) where
getBHEnv = ask
runBH :: BHEnv -> BH m a -> m a
runBH e f = runReaderT (unBH f) e
{-| 'Version' is embedded in 'Status' -}
data Version = Version { number :: Text
, build_hash :: Text
, build_timestamp :: UTCTime
, build_snapshot :: Bool
, lucene_version :: Text } deriving (Eq, Show, Generic)
{-| 'Status' is a data type for describing the JSON body returned by
Elasticsearch when you query its status. This was deprecated in 1.2.0.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-status.html#indices-status>
-}
data Status = Status { ok :: Maybe Bool
, status :: Int
, name :: Text
, version :: Version
, tagline :: Text } deriving (Eq, Show)
{-| 'IndexSettings' is used to configure the shards and replicas when you create
an Elasticsearch Index.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-}
data IndexSettings =
IndexSettings { indexShards :: ShardCount
, indexReplicas :: ReplicaCount } deriving (Eq, Show)
{-| 'defaultIndexSettings' is an 'IndexSettings' with 3 shards and 2 replicas. -}
defaultIndexSettings :: IndexSettings
defaultIndexSettings = IndexSettings (ShardCount 3) (ReplicaCount 2)
{-| 'Reply' and 'Method' are type synonyms from 'Network.HTTP.Types.Method.Method' -}
type Reply = Network.HTTP.Client.Response L.ByteString
type Method = NHTM.Method
{-| 'OpenCloseIndex' is a sum type for opening and closing indices.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-open-close.html>
-}
data OpenCloseIndex = OpenIndex | CloseIndex deriving (Eq, Show)
data FieldType = GeoPointType
| GeoShapeType
| FloatType
| IntegerType
| LongType
| ShortType
| ByteType deriving (Eq, Show)
data FieldDefinition =
FieldDefinition { fieldType :: FieldType } deriving (Eq, Show)
{-| An 'IndexTemplate' defines a template that will automatically be
applied to new indices created. The templates include both
'IndexSettings' and mappings, and a simple 'TemplatePattern' that
controls if the template will be applied to the index created.
Specify mappings as follows: @[toJSON TweetMapping, ...]@
https://www.elastic.co/guide/en/elasticsearch/reference/1.7/indices-templates.html
-}
data IndexTemplate =
IndexTemplate { templatePattern :: TemplatePattern
, templateSettings :: Maybe IndexSettings
, templateMappings :: [Value]
}
data MappingField =
MappingField { mappingFieldName :: FieldName
, fieldDefinition :: FieldDefinition } deriving (Eq, Show)
{-| Support for type reification of 'Mapping's is currently incomplete, for
now the mapping API verbiage expects a 'ToJSON'able blob.
Indexes have mappings, mappings are schemas for the documents contained in the
index. I'd recommend having only one mapping per index, always having a mapping,
and keeping different kinds of documents separated if possible.
-}
data Mapping = Mapping { typeName :: TypeName
, mappingFields :: [MappingField] } deriving (Eq, Show)
{-| 'BulkOperation' is a sum type for expressing the four kinds of bulk
operation index, create, delete, and update. 'BulkIndex' behaves like an
"upsert", 'BulkCreate' will fail if a document already exists at the DocId.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html#docs-bulk>
-}
data BulkOperation =
BulkIndex IndexName MappingName DocId Value
| BulkCreate IndexName MappingName DocId Value
| BulkDelete IndexName MappingName DocId
| BulkUpdate IndexName MappingName DocId Value deriving (Eq, Show)
{-| 'EsResult' describes the standard wrapper JSON document that you see in
successful Elasticsearch lookups or lookups that couldn't find the document.
-}
data EsResult a = EsResult { _index :: Text
, _type :: Text
, _id :: Text
, foundResult :: Maybe (EsResultFound a)} deriving (Eq, Show)
{-| 'EsResultFound' contains the document and its metadata inside of an
'EsResult' when the document was successfully found.
-}
data EsResultFound a = EsResultFound { _version :: DocVersion
, _source :: a } deriving (Eq, Show)
{-| 'DocVersion' is an integer version number for a document between 1
and 9.2e+18 used for <<https://www.elastic.co/guide/en/elasticsearch/guide/current/optimistic-concurrency-control.html optimistic concurrency control>>.
-}
newtype DocVersion = DocVersion {
docVersionNumber :: Int
} deriving (Eq, Show, Ord, ToJSON)
-- | Smart constructor for in-range doc version
mkDocVersion :: Int -> Maybe DocVersion
mkDocVersion i
| i >= (docVersionNumber minBound) && i <= (docVersionNumber maxBound) =
Just $ DocVersion i
| otherwise = Nothing
{-| 'ExternalDocVersion' is a convenience wrapper if your code uses its
own version numbers instead of ones from ES.
-}
newtype ExternalDocVersion = ExternalDocVersion DocVersion
deriving (Eq, Show, Ord, Bounded, Enum, ToJSON)
{-| 'VersionControl' is specified when indexing documents as a
optimistic concurrency control.
-}
data VersionControl = NoVersionControl
-- ^ Don't send a version. This is a pure overwrite.
| InternalVersion DocVersion
-- ^ Use the default ES versioning scheme. Only
-- index the document if the version is the same
-- as the one specified. Only applicable to
-- updates, as you should be getting Version from
-- a search result.
| ExternalGT ExternalDocVersion
-- ^ Use your own version numbering. Only index
-- the document if the version is strictly higher
-- OR the document doesn't exist. The given
-- version will be used as the new version number
-- for the stored document. N.B. All updates must
-- increment this number, meaning there is some
-- global, external ordering of updates.
| ExternalGTE ExternalDocVersion
-- ^ Use your own version numbering. Only index
-- the document if the version is equal or higher
-- than the stored version. Will succeed if there
-- is no existing document. The given version will
-- be used as the new version number for the
-- stored document. Use with care, as this could
-- result in data loss.
| ForceVersion ExternalDocVersion
-- ^ The document will always be indexed and the
-- given version will be the new version. This is
-- typically used for correcting errors. Use with
-- care, as this could result in data loss.
deriving (Show, Eq, Ord)
{-| 'DocumentParent' is used to specify a parent document.
-}
newtype DocumentParent = DocumentParent DocId
deriving (Eq, Show)
{-| 'IndexDocumentSettings' are special settings supplied when indexing
a document. For the best backwards compatiblity when new fields are
added, you should probably prefer to start with 'defaultIndexDocumentSettings'
-}
data IndexDocumentSettings =
IndexDocumentSettings { idsVersionControl :: VersionControl
, idsParent :: Maybe DocumentParent
} deriving (Eq, Show)
{-| Reasonable default settings. Chooses no version control and no parent.
-}
defaultIndexDocumentSettings :: IndexDocumentSettings
defaultIndexDocumentSettings = IndexDocumentSettings NoVersionControl Nothing
{-| 'Sort' is a synonym for a list of 'SortSpec's. Sort behavior is order
dependent with later sorts acting as tie-breakers for earlier sorts.
-}
type Sort = [SortSpec]
{-| The two main kinds of 'SortSpec' are 'DefaultSortSpec' and
'GeoDistanceSortSpec'. The latter takes a 'SortOrder', 'GeoPoint', and
'DistanceUnit' to express "nearness" to a single geographical point as a
sort specification.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortSpec = DefaultSortSpec DefaultSort
| GeoDistanceSortSpec SortOrder GeoPoint DistanceUnit deriving (Eq, Show)
{-| 'DefaultSort' is usually the kind of 'SortSpec' you'll want. There's a
'mkSort' convenience function for when you want to specify only the most
common parameters.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data DefaultSort =
DefaultSort { sortFieldName :: FieldName
, sortOrder :: SortOrder
-- default False
, ignoreUnmapped :: Bool
, sortMode :: Maybe SortMode
, missingSort :: Maybe Missing
, nestedFilter :: Maybe Filter } deriving (Eq, Show)
{-| 'SortOrder' is 'Ascending' or 'Descending', as you might expect. These get
encoded into "asc" or "desc" when turned into JSON.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort>
-}
data SortOrder = Ascending
| Descending deriving (Eq, Show)
{-| 'Missing' prescribes how to handle missing fields. A missing field can be
sorted last, first, or using a custom value as a substitute.
<http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_missing_values>
-}
data Missing = LastMissing
| FirstMissing
| CustomMissing Text deriving (Eq, Show)
{-| 'SortMode' prescribes how to handle sorting array/multi-valued fields.
http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#_sort_mode_option
-}
data SortMode = SortMin
| SortMax
| SortSum
| SortAvg deriving (Eq, Show)
{-| 'mkSort' defaults everything but the 'FieldName' and the 'SortOrder' so
that you can concisely describe the usual kind of 'SortSpec's you want.
-}
mkSort :: FieldName -> SortOrder -> DefaultSort
mkSort fieldName sOrder = DefaultSort fieldName sOrder False Nothing Nothing Nothing
{-| 'Cache' is for telling ES whether it should cache a 'Filter' not.
'Query's cannot be cached.
-}
type Cache = Bool -- caching on/off
defaultCache :: Cache
defaultCache = False
{-| 'PrefixValue' is used in 'PrefixQuery' as the main query component.
-}
type PrefixValue = Text
{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible
JSON encoding baked in. Used all over the place.
-}
data BooleanOperator = And | Or deriving (Eq, Show)
{-| 'ShardCount' is part of 'IndexSettings'
-}
newtype ShardCount = ShardCount Int deriving (Eq, Show, Generic)
{-| 'ReplicaCount' is part of 'IndexSettings'
-}
newtype ReplicaCount = ReplicaCount Int deriving (Eq, Show, Generic)
{-| 'Server' is used with the client functions to point at the ES instance
-}
newtype Server = Server Text deriving (Eq, Show)
{-| 'IndexName' is used to describe which index to query/create/delete
-}
newtype IndexName = IndexName Text deriving (Eq, Generic, Show)
{-| 'TemplateName' is used to describe which template to query/create/delete
-}
newtype TemplateName = TemplateName Text deriving (Eq, Show, Generic)
{-| 'TemplatePattern' represents a pattern which is matched against index names
-}
newtype TemplatePattern = TemplatePattern Text deriving (Eq, Show, Generic)
{-| 'MappingName' is part of mappings which are how ES describes and schematizes
the data in the indices.
-}
newtype MappingName = MappingName Text deriving (Eq, Generic, Show)
{-| 'DocId' is a generic wrapper value for expressing unique Document IDs.
Can be set by the user or created by ES itself. Often used in client
functions for poking at specific documents.
-}
newtype DocId = DocId Text deriving (Eq, Generic, Show)
{-| 'QueryString' is used to wrap query text bodies, be they human written or not.
-}
newtype QueryString = QueryString Text deriving (Eq, Generic, Show)
{-| 'FieldName' is used all over the place wherever a specific field within
a document needs to be specified, usually in 'Query's or 'Filter's.
-}
newtype FieldName = FieldName Text deriving (Eq, Show)
{-| 'Script' is often used in place of 'FieldName' to specify more
complex ways of extracting a value from a document.
-}
newtype Script = Script { scriptText :: Text } deriving (Eq, Show)
{-| 'CacheName' is used in 'RegexpFilter' for describing the
'CacheKey' keyed caching behavior.
-}
newtype CacheName = CacheName Text deriving (Eq, Show)
{-| 'CacheKey' is used in 'RegexpFilter' to key regex caching.
-}
newtype CacheKey =
CacheKey Text deriving (Eq, Show)
newtype Existence =
Existence Bool deriving (Eq, Show)
newtype NullValue =
NullValue Bool deriving (Eq, Show)
newtype CutoffFrequency =
CutoffFrequency Double deriving (Eq, Show, Generic)
newtype Analyzer =
Analyzer Text deriving (Eq, Show, Generic)
newtype MaxExpansions =
MaxExpansions Int deriving (Eq, Show, Generic)
{-| 'Lenient', if set to true, will cause format based failures to be
ignored. I don't know what the bloody default is, Elasticsearch
documentation didn't say what it was. Let me know if you figure it out.
-}
newtype Lenient =
Lenient Bool deriving (Eq, Show, Generic)
newtype Tiebreaker =
Tiebreaker Double deriving (Eq, Show, Generic)
newtype Boost =
Boost Double deriving (Eq, Show, Generic)
newtype BoostTerms =
BoostTerms Double deriving (Eq, Show, Generic)
{-| 'MinimumMatch' controls how many should clauses in the bool query should
match. Can be an absolute value (2) or a percentage (30%) or a
combination of both.
-}
newtype MinimumMatch =
MinimumMatch Int deriving (Eq, Show, Generic)
newtype MinimumMatchText =
MinimumMatchText Text deriving (Eq, Show)
newtype DisableCoord =
DisableCoord Bool deriving (Eq, Show, Generic)
newtype IgnoreTermFrequency =
IgnoreTermFrequency Bool deriving (Eq, Show, Generic)
newtype MinimumTermFrequency =
MinimumTermFrequency Int deriving (Eq, Show, Generic)
newtype MaxQueryTerms =
MaxQueryTerms Int deriving (Eq, Show, Generic)
newtype Fuzziness =
Fuzziness Double deriving (Eq, Show, Generic)
{-| 'PrefixLength' is the prefix length used in queries, defaults to 0. -}
newtype PrefixLength =
PrefixLength Int deriving (Eq, Show, Generic)
newtype TypeName =
TypeName Text deriving (Eq, Show, Generic)
newtype PercentMatch =
PercentMatch Double deriving (Eq, Show, Generic)
newtype StopWord =
StopWord Text deriving (Eq, Show, Generic)
newtype QueryPath =
QueryPath Text deriving (Eq, Show, Generic)
{-| Allowing a wildcard at the beginning of a word (eg "*ing") is particularly
heavy, because all terms in the index need to be examined, just in case
they match. Leading wildcards can be disabled by setting
'AllowLeadingWildcard' to false. -}
newtype AllowLeadingWildcard =
AllowLeadingWildcard Bool deriving (Eq, Show, Generic)
newtype LowercaseExpanded =
LowercaseExpanded Bool deriving (Eq, Show, Generic)
newtype EnablePositionIncrements =
EnablePositionIncrements Bool deriving (Eq, Show, Generic)
{-| By default, wildcard terms in a query are not analyzed.
Setting 'AnalyzeWildcard' to true enables best-effort analysis.
-}
newtype AnalyzeWildcard = AnalyzeWildcard Bool deriving (Eq, Show, Generic)
{-| 'GeneratePhraseQueries' defaults to false.
-}
newtype GeneratePhraseQueries =
GeneratePhraseQueries Bool deriving (Eq, Show, Generic)
{-| 'Locale' is used for string conversions - defaults to ROOT.
-}
newtype Locale = Locale Text deriving (Eq, Show, Generic)
newtype MaxWordLength = MaxWordLength Int deriving (Eq, Show, Generic)
newtype MinWordLength = MinWordLength Int deriving (Eq, Show, Generic)
{-| 'PhraseSlop' sets the default slop for phrases, 0 means exact
phrase matches. Default is 0.
-}
newtype PhraseSlop = PhraseSlop Int deriving (Eq, Show, Generic)
newtype MinDocFrequency = MinDocFrequency Int deriving (Eq, Show, Generic)
newtype MaxDocFrequency = MaxDocFrequency Int deriving (Eq, Show, Generic)
{-| 'unpackId' is a silly convenience function that gets used once.
-}
unpackId :: DocId -> Text
unpackId (DocId docId) = docId
type TrackSortScores = Bool
newtype From = From Int deriving (Eq, Show, ToJSON)
newtype Size = Size Int deriving (Eq, Show, ToJSON)
data Search = Search { queryBody :: Maybe Query
, filterBody :: Maybe Filter
, sortBody :: Maybe Sort
, aggBody :: Maybe Aggregations
, highlight :: Maybe Highlights
-- default False
, trackSortScores :: TrackSortScores
, from :: From
, size :: Size
, searchType :: SearchType
, fields :: Maybe [FieldName]
, source :: Maybe Source } deriving (Eq, Show)
data SearchType = SearchTypeQueryThenFetch
| SearchTypeDfsQueryThenFetch
| SearchTypeCount
| SearchTypeScan
| SearchTypeQueryAndFetch
| SearchTypeDfsQueryAndFetch
deriving (Eq, Show)
data Source =
NoSource
| SourcePatterns PatternOrPatterns
| SourceIncludeExclude Include Exclude
deriving (Show, Eq)
data PatternOrPatterns = PopPattern Pattern
| PopPatterns [Pattern] deriving (Eq, Show)
data Include = Include [Pattern] deriving (Eq, Show)
data Exclude = Exclude [Pattern] deriving (Eq, Show)
newtype Pattern = Pattern Text deriving (Eq, Show)
data Highlights = Highlights { globalsettings :: Maybe HighlightSettings
, highlightFields :: [FieldHighlight]
} deriving (Show, Eq)
data FieldHighlight = FieldHighlight FieldName (Maybe HighlightSettings)
deriving (Show, Eq)
data HighlightSettings = Plain PlainHighlight
| Postings PostingsHighlight
| FastVector FastVectorHighlight
deriving (Show, Eq)
data PlainHighlight =
PlainHighlight { plainCommon :: Maybe CommonHighlight
, plainNonPost :: Maybe NonPostings } deriving (Show, Eq)
-- This requires that index_options are set to 'offset' in the mapping.
data PostingsHighlight = PostingsHighlight (Maybe CommonHighlight) deriving (Show, Eq)
-- This requires that term_vector is set to 'with_positions_offsets' in the mapping.
data FastVectorHighlight =
FastVectorHighlight { fvCommon :: Maybe CommonHighlight
, fvNonPostSettings :: Maybe NonPostings
, boundaryChars :: Maybe Text
, boundaryMaxScan :: Maybe Int
, fragmentOffset :: Maybe Int
, matchedFields :: [Text]
, phraseLimit :: Maybe Int
} deriving (Show, Eq)
data CommonHighlight =
CommonHighlight { order :: Maybe Text
, forceSource :: Maybe Bool
, tag :: Maybe HighlightTag
, encoder :: Maybe HighlightEncoder
, noMatchSize :: Maybe Int
, highlightQuery :: Maybe Query
, requireFieldMatch :: Maybe Bool
} deriving (Show, Eq)
-- Settings that are only applicable to FastVector and Plain highlighters.
data NonPostings =
NonPostings { fragmentSize :: Maybe Int
, numberOfFragments :: Maybe Int} deriving (Show, Eq)
data HighlightEncoder = DefaultEncoder
| HTMLEncoder
deriving (Show, Eq)
-- NOTE: Should the tags use some kind of HTML type, rather than Text?
data HighlightTag = TagSchema Text
| CustomTags ([Text], [Text]) -- Only uses more than the first value in the lists if fvh
deriving (Show, Eq)
data Query =
TermQuery Term (Maybe Boost)
| TermsQuery (NonEmpty Term)
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreFilter Filter Boost
| ConstantScoreQuery Query Boost
| QueryDisMaxQuery DisMaxQuery
| QueryFilteredQuery FilteredQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
| QueryHasChildQuery HasChildQuery
| QueryHasParentQuery HasParentQuery
| IdsQuery MappingName [DocId]
| QueryIndicesQuery IndicesQuery
| MatchAllQuery (Maybe Boost)
| QueryMoreLikeThisQuery MoreLikeThisQuery
| QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery
| QueryNestedQuery NestedQuery
| QueryPrefixQuery PrefixQuery
| QueryQueryStringQuery QueryStringQuery
| QuerySimpleQueryStringQuery SimpleQueryStringQuery
| QueryRangeQuery RangeQuery
| QueryRegexpQuery RegexpQuery
deriving (Eq, Show)
data RegexpQuery =
RegexpQuery { regexpQueryField :: FieldName
, regexpQuery :: Regexp
, regexpQueryFlags :: RegexpFlags
, regexpQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data RangeQuery =
RangeQuery { rangeQueryField :: FieldName
, rangeQueryRange :: RangeValue
, rangeQueryBoost :: Boost } deriving (Eq, Show)
mkRangeQuery :: FieldName -> RangeValue -> RangeQuery
mkRangeQuery f r = RangeQuery f r (Boost 1.0)
data SimpleQueryStringQuery =
SimpleQueryStringQuery
{ simpleQueryStringQuery :: QueryString
, simpleQueryStringField :: Maybe FieldOrFields
, simpleQueryStringOperator :: Maybe BooleanOperator
, simpleQueryStringAnalyzer :: Maybe Analyzer
, simpleQueryStringFlags :: Maybe [SimpleQueryFlag]
, simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded
, simpleQueryStringLocale :: Maybe Locale
} deriving (Eq, Show)
data SimpleQueryFlag =
SimpleQueryAll
| SimpleQueryNone
| SimpleQueryAnd
| SimpleQueryOr
| SimpleQueryPrefix
| SimpleQueryPhrase
| SimpleQueryPrecedence
| SimpleQueryEscape
| SimpleQueryWhitespace
| SimpleQueryFuzzy
| SimpleQueryNear
| SimpleQuerySlop deriving (Eq, Show)
-- use_dis_max and tie_breaker when fields are plural?
data QueryStringQuery =
QueryStringQuery
{ queryStringQuery :: QueryString
, queryStringDefaultField :: Maybe FieldName
, queryStringOperator :: Maybe BooleanOperator
, queryStringAnalyzer :: Maybe Analyzer
, queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard
, queryStringLowercaseExpanded :: Maybe LowercaseExpanded
, queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements
, queryStringFuzzyMaxExpansions :: Maybe MaxExpansions
, queryStringFuzziness :: Maybe Fuzziness
, queryStringFuzzyPrefixLength :: Maybe PrefixLength
, queryStringPhraseSlop :: Maybe PhraseSlop
, queryStringBoost :: Maybe Boost
, queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard
, queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries
, queryStringMinimumShouldMatch :: Maybe MinimumMatch
, queryStringLenient :: Maybe Lenient
, queryStringLocale :: Maybe Locale
} deriving (Eq, Show)
mkQueryStringQuery :: QueryString -> QueryStringQuery
mkQueryStringQuery qs =
QueryStringQuery qs Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing
data FieldOrFields = FofField FieldName
| FofFields [FieldName] deriving (Eq, Show)
data PrefixQuery =
PrefixQuery
{ prefixQueryField :: FieldName
, prefixQueryPrefixValue :: Text
, prefixQueryBoost :: Maybe Boost } deriving (Eq, Show)
data NestedQuery =
NestedQuery
{ nestedQueryPath :: QueryPath
, nestedQueryScoreType :: ScoreType
, nestedQuery :: Query } deriving (Eq, Show)
data MoreLikeThisFieldQuery =
MoreLikeThisFieldQuery
{ moreLikeThisFieldText :: Text
, moreLikeThisFieldFields :: FieldName
-- default 0.3 (30%)
, moreLikeThisFieldPercentMatch :: Maybe PercentMatch
, moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisFieldStopWords :: Maybe [StopWord]
, moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisFieldMinWordLength :: Maybe MinWordLength
, moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength
, moreLikeThisFieldBoostTerms :: Maybe BoostTerms
, moreLikeThisFieldBoost :: Maybe Boost
, moreLikeThisFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data MoreLikeThisQuery =
MoreLikeThisQuery
{ moreLikeThisText :: Text
, moreLikeThisFields :: Maybe [FieldName]
-- default 0.3 (30%)
, moreLikeThisPercentMatch :: Maybe PercentMatch
, moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisStopWords :: Maybe [StopWord]
, moreLikeThisMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisMinWordLength :: Maybe MinWordLength
, moreLikeThisMaxWordLength :: Maybe MaxWordLength
, moreLikeThisBoostTerms :: Maybe BoostTerms
, moreLikeThisBoost :: Maybe Boost
, moreLikeThisAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data IndicesQuery =
IndicesQuery
{ indicesQueryIndices :: [IndexName]
, indicesQuery :: Query
-- default "all"
, indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show)
data HasParentQuery =
HasParentQuery
{ hasParentQueryType :: TypeName
, hasParentQuery :: Query
, hasParentQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data HasChildQuery =
HasChildQuery
{ hasChildQueryType :: TypeName
, hasChildQuery :: Query
, hasChildQueryScoreType :: Maybe ScoreType } deriving (Eq, Show)
data ScoreType =
ScoreTypeMax
| ScoreTypeSum
| ScoreTypeAvg
| ScoreTypeNone deriving (Eq, Show)
data FuzzyQuery =
FuzzyQuery { fuzzyQueryField :: FieldName
, fuzzyQueryValue :: Text
, fuzzyQueryPrefixLength :: PrefixLength
, fuzzyQueryMaxExpansions :: MaxExpansions
, fuzzyQueryFuzziness :: Fuzziness
, fuzzyQueryBoost :: Maybe Boost
} deriving (Eq, Show)
data FuzzyLikeFieldQuery =
FuzzyLikeFieldQuery
{ fuzzyLikeField :: FieldName
-- anaphora is good for the soul.
, fuzzyLikeFieldText :: Text
, fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFieldFuzziness :: Fuzziness
, fuzzyLikeFieldPrefixLength :: PrefixLength
, fuzzyLikeFieldBoost :: Boost
, fuzzyLikeFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FuzzyLikeThisQuery =
FuzzyLikeThisQuery
{ fuzzyLikeFields :: [FieldName]
, fuzzyLikeText :: Text
, fuzzyLikeMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFuzziness :: Fuzziness
, fuzzyLikePrefixLength :: PrefixLength
, fuzzyLikeBoost :: Boost
, fuzzyLikeAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
data FilteredQuery =
FilteredQuery
{ filteredQuery :: Query
, filteredFilter :: Filter } deriving (Eq, Show)
data DisMaxQuery =
DisMaxQuery { disMaxQueries :: [Query]
-- default 0.0
, disMaxTiebreaker :: Tiebreaker
, disMaxBoost :: Maybe Boost
} deriving (Eq, Show)
data MatchQuery =
MatchQuery { matchQueryField :: FieldName
, matchQueryQueryString :: QueryString
, matchQueryOperator :: BooleanOperator
, matchQueryZeroTerms :: ZeroTermsQuery
, matchQueryCutoffFrequency :: Maybe CutoffFrequency
, matchQueryMatchType :: Maybe MatchQueryType
, matchQueryAnalyzer :: Maybe Analyzer
, matchQueryMaxExpansions :: Maybe MaxExpansions
, matchQueryLenient :: Maybe Lenient
, matchQueryBoost :: Maybe Boost } deriving (Eq, Show)
{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery'
-}
mkMatchQuery :: FieldName -> QueryString -> MatchQuery
mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MatchQueryType =
MatchPhrase
| MatchPhrasePrefix deriving (Eq, Show)
data MultiMatchQuery =
MultiMatchQuery { multiMatchQueryFields :: [FieldName]
, multiMatchQueryString :: QueryString
, multiMatchQueryOperator :: BooleanOperator
, multiMatchQueryZeroTerms :: ZeroTermsQuery
, multiMatchQueryTiebreaker :: Maybe Tiebreaker
, multiMatchQueryType :: Maybe MultiMatchQueryType
, multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency
, multiMatchQueryAnalyzer :: Maybe Analyzer
, multiMatchQueryMaxExpansions :: Maybe MaxExpansions
, multiMatchQueryLenient :: Maybe Lenient } deriving (Eq, Show)
{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the list of 'FieldName's and 'QueryString' to
make a 'MultiMatchQuery'.
-}
mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery
mkMultiMatchQuery matchFields query =
MultiMatchQuery matchFields query
Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MultiMatchQueryType =
MultiMatchBestFields
| MultiMatchMostFields
| MultiMatchCrossFields
| MultiMatchPhrase
| MultiMatchPhrasePrefix deriving (Eq, Show)
data BoolQuery =
BoolQuery { boolQueryMustMatch :: [Query]
, boolQueryMustNotMatch :: [Query]
, boolQueryShouldMatch :: [Query]
, boolQueryMinimumShouldMatch :: Maybe MinimumMatch
, boolQueryBoost :: Maybe Boost
, boolQueryDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
mkBoolQuery :: [Query] -> [Query] -> [Query] -> BoolQuery
mkBoolQuery must mustNot should =
BoolQuery must mustNot should Nothing Nothing Nothing
data BoostingQuery =
BoostingQuery { positiveQuery :: Query
, negativeQuery :: Query
, negativeBoost :: Boost } deriving (Eq, Show)
data CommonTermsQuery =
CommonTermsQuery { commonField :: FieldName
, commonQuery :: QueryString
, commonCutoffFrequency :: CutoffFrequency
, commonLowFreqOperator :: BooleanOperator
, commonHighFreqOperator :: BooleanOperator
, commonMinimumShouldMatch :: Maybe CommonMinimumMatch
, commonBoost :: Maybe Boost
, commonAnalyzer :: Maybe Analyzer
, commonDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
data CommonMinimumMatch =
CommonMinimumMatchHighLow MinimumMatchHighLow
| CommonMinimumMatch MinimumMatch
deriving (Eq, Show)
data MinimumMatchHighLow =
MinimumMatchHighLow { lowFreq :: MinimumMatch
, highFreq :: MinimumMatch } deriving (Eq, Show)
data Filter = AndFilter [Filter] Cache
| OrFilter [Filter] Cache
| NotFilter Filter Cache
| IdentityFilter
| BoolFilter BoolMatch
| ExistsFilter FieldName -- always cached
| GeoBoundingBoxFilter GeoBoundingBoxConstraint
| GeoDistanceFilter GeoPoint Distance DistanceType OptimizeBbox Cache
| GeoDistanceRangeFilter GeoPoint DistanceRange
| GeoPolygonFilter FieldName [LatLon]
| IdsFilter MappingName [DocId]
| LimitFilter Int
| MissingFilter FieldName Existence NullValue
| PrefixFilter FieldName PrefixValue Cache
| QueryFilter Query Cache
| RangeFilter FieldName RangeValue RangeExecution Cache
| RegexpFilter FieldName Regexp RegexpFlags CacheName Cache CacheKey
| TermFilter Term Cache
deriving (Eq, Show)
data ZeroTermsQuery = ZeroTermsNone
| ZeroTermsAll deriving (Eq, Show)
data RangeExecution = RangeExecutionIndex
| RangeExecutionFielddata deriving (Eq, Show)
newtype Regexp = Regexp Text deriving (Eq, Show)
data RegexpFlags = AllRegexpFlags
| NoRegexpFlags
| SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show)
data RegexpFlag = AnyString
| Automaton
| Complement
| Empty
| Intersection
| Interval deriving (Eq, Show)
newtype LessThan = LessThan Double deriving (Eq, Show)
newtype LessThanEq = LessThanEq Double deriving (Eq, Show)
newtype GreaterThan = GreaterThan Double deriving (Eq, Show)
newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show)
newtype LessThanD = LessThanD UTCTime deriving (Eq, Show)
newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show)
newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show)
newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show)
data RangeValue = RangeDateLte LessThanEqD
| RangeDateLt LessThanD
| RangeDateGte GreaterThanEqD
| RangeDateGt GreaterThanD
| RangeDateGtLt GreaterThanD LessThanD
| RangeDateGteLte GreaterThanEqD LessThanEqD
| RangeDateGteLt GreaterThanEqD LessThanD
| RangeDateGtLte GreaterThanD LessThanEqD
| RangeDoubleLte LessThanEq
| RangeDoubleLt LessThan
| RangeDoubleGte GreaterThanEq
| RangeDoubleGt GreaterThan
| RangeDoubleGtLt GreaterThan LessThan
| RangeDoubleGteLte GreaterThanEq LessThanEq
| RangeDoubleGteLt GreaterThanEq LessThan
| RangeDoubleGtLte GreaterThan LessThanEq
deriving (Eq, Show)
rangeValueToPair :: RangeValue -> [Pair]
rangeValueToPair rv = case rv of
RangeDateLte (LessThanEqD t) -> ["lte" .= t]
RangeDateGte (GreaterThanEqD t) -> ["gte" .= t]
RangeDateLt (LessThanD t) -> ["lt" .= t]
RangeDateGt (GreaterThanD t) -> ["gt" .= t]
RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g]
RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g]
RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g]
RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g]
RangeDoubleLte (LessThanEq t) -> ["lte" .= t]
RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t]
RangeDoubleLt (LessThan t) -> ["lt" .= t]
RangeDoubleGt (GreaterThan t) -> ["gt" .= t]
RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g]
RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g]
RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g]
RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g]
data Term = Term { termField :: Text
, termValue :: Text } deriving (Eq, Show)
data BoolMatch = MustMatch Term Cache
| MustNotMatch Term Cache
| ShouldMatch [Term] Cache deriving (Eq, Show)
-- "memory" or "indexed"
data GeoFilterType = GeoFilterMemory
| GeoFilterIndexed deriving (Eq, Show)
data LatLon = LatLon { lat :: Double
, lon :: Double } deriving (Eq, Show)
data GeoBoundingBox =
GeoBoundingBox { topLeft :: LatLon
, bottomRight :: LatLon } deriving (Eq, Show)
data GeoBoundingBoxConstraint =
GeoBoundingBoxConstraint { geoBBField :: FieldName
, constraintBox :: GeoBoundingBox
, bbConstraintcache :: Cache
, geoType :: GeoFilterType
} deriving (Eq, Show)
data GeoPoint =
GeoPoint { geoField :: FieldName
, latLon :: LatLon} deriving (Eq, Show)
data DistanceUnit = Miles
| Yards
| Feet
| Inches
| Kilometers
| Meters
| Centimeters
| Millimeters
| NauticalMiles deriving (Eq, Show)
data DistanceType = Arc
| SloppyArc -- doesn't exist <1.0
| Plane deriving (Eq, Show)
data OptimizeBbox = OptimizeGeoFilterType GeoFilterType
| NoOptimizeBbox deriving (Eq, Show)
data Distance =
Distance { coefficient :: Double
, unit :: DistanceUnit } deriving (Eq, Show)
data DistanceRange =
DistanceRange { distanceFrom :: Distance
, distanceTo :: Distance } deriving (Eq, Show)
data SearchResult a =
SearchResult { took :: Int
, timedOut :: Bool
, shards :: ShardResult
, searchHits :: SearchHits a
, aggregations :: Maybe AggregationResults
, scrollId :: Maybe ScrollId } deriving (Eq, Show)
type ScrollId = Text -- Fixme: Newtype
type Score = Maybe Double
data SearchHits a =
SearchHits { hitsTotal :: Int
, maxScore :: Score
, hits :: [Hit a] } deriving (Eq, Show)
instance Monoid (SearchHits a) where
mempty = SearchHits 0 Nothing mempty
mappend (SearchHits ta ma ha) (SearchHits tb mb hb) =
SearchHits (ta + tb) (max ma mb) (ha <> hb)
data Hit a =
Hit { hitIndex :: IndexName
, hitType :: MappingName
, hitDocId :: DocId
, hitScore :: Score
, hitSource :: a
, hitHighlight :: Maybe HitHighlight } deriving (Eq, Show)
data ShardResult =
ShardResult { shardTotal :: Int
, shardsSuccessful :: Int
, shardsFailed :: Int } deriving (Eq, Show, Generic)
type HitHighlight = M.Map Text [Text]
showText :: Show a => a -> Text
showText = T.pack . show
type Aggregations = M.Map Text Aggregation
emptyAggregations :: Aggregations
emptyAggregations = M.empty
mkAggregations :: Text -> Aggregation -> Aggregations
mkAggregations name aggregation = M.insert name aggregation emptyAggregations
data TermOrder = TermOrder{ termSortField :: Text
, termSortOrder :: SortOrder } deriving (Eq, Show)
data TermInclusion = TermInclusion Text
| TermPattern Text Text deriving (Eq, Show)
data CollectionMode = BreadthFirst
| DepthFirst deriving (Eq, Show)
data ExecutionHint = Ordinals
| GlobalOrdinals
| GlobalOrdinalsHash
| GlobalOrdinalsLowCardinality
| Map deriving (Eq, Show)
data TimeInterval = Weeks
| Days
| Hours
| Minutes
| Seconds deriving (Eq)
data Interval = Year
| Quarter
| Month
| Week
| Day
| Hour
| Minute
| Second
| FractionalInterval Float TimeInterval deriving (Eq, Show)
data Aggregation = TermsAgg TermsAggregation
| DateHistogramAgg DateHistogramAggregation
| ValueCountAgg ValueCountAggregation
| FilterAgg FilterAggregation deriving (Eq, Show)
data TermsAggregation = TermsAggregation { term :: Either Text Text
, termInclude :: Maybe TermInclusion
, termExclude :: Maybe TermInclusion
, termOrder :: Maybe TermOrder
, termMinDocCount :: Maybe Int
, termSize :: Maybe Int
, termShardSize :: Maybe Int
, termCollectMode :: Maybe CollectionMode
, termExecutionHint :: Maybe ExecutionHint
, termAggs :: Maybe Aggregations
} deriving (Eq, Show)
data DateHistogramAggregation = DateHistogramAggregation { dateField :: FieldName
, dateInterval :: Interval
, dateFormat :: Maybe Text
-- pre and post deprecated in 1.5
, datePreZone :: Maybe Text
, datePostZone :: Maybe Text
, datePreOffset :: Maybe Text
, datePostOffset :: Maybe Text
, dateAggs :: Maybe Aggregations
} deriving (Eq, Show)
-- | See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-valuecount-aggregation.html> for more information.
data ValueCountAggregation = FieldValueCount FieldName
| ScriptValueCount Script deriving (Eq, Show)
-- | Single-bucket filter aggregations. See <https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-filter-aggregation.html#search-aggregations-bucket-filter-aggregation> for more information.
data FilterAggregation = FilterAggregation { faFilter :: Filter
, faAggs :: Maybe Aggregations} deriving (Eq, Show)
mkTermsAggregation :: Text -> TermsAggregation
mkTermsAggregation t = TermsAggregation (Left t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkTermsScriptAggregation :: Text -> TermsAggregation
mkTermsScriptAggregation t = TermsAggregation (Right t) Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
mkDateHistogram :: FieldName -> Interval -> DateHistogramAggregation
mkDateHistogram t i = DateHistogramAggregation t i Nothing Nothing Nothing Nothing Nothing Nothing
instance ToJSON TermOrder where
toJSON (TermOrder termSortField termSortOrder) = object [termSortField .= termSortOrder]
instance ToJSON TermInclusion where
toJSON (TermInclusion x) = toJSON x
toJSON (TermPattern pattern flags) = omitNulls [ "pattern" .= pattern,
"flags" .= flags]
instance ToJSON CollectionMode where
toJSON BreadthFirst = "breadth_first"
toJSON DepthFirst = "depth_first"
instance ToJSON ExecutionHint where
toJSON Ordinals = "ordinals"
toJSON GlobalOrdinals = "global_ordinals"
toJSON GlobalOrdinalsHash = "global_ordinals_hash"
toJSON GlobalOrdinalsLowCardinality = "global_ordinals_low_cardinality"
toJSON Map = "map"
instance ToJSON Interval where
toJSON Year = "year"
toJSON Quarter = "quarter"
toJSON Month = "month"
toJSON Week = "week"
toJSON Day = "day"
toJSON Hour = "hour"
toJSON Minute = "minute"
toJSON Second = "second"
toJSON (FractionalInterval fraction interval) = toJSON $ show fraction ++ show interval
instance Show TimeInterval where
show Weeks = "w"
show Days = "d"
show Hours = "h"
show Minutes = "m"
show Seconds = "s"
instance ToJSON Aggregation where
toJSON (TermsAgg (TermsAggregation term include exclude order minDocCount size shardSize collectMode executionHint termAggs)) =
omitNulls ["terms" .= omitNulls [ toJSON' term,
"include" .= include,
"exclude" .= exclude,
"order" .= order,
"min_doc_count" .= minDocCount,
"size" .= size,
"shard_size" .= shardSize,
"collect_mode" .= collectMode,
"execution_hint" .= executionHint
],
"aggs" .= termAggs ]
where
toJSON' x = case x of { Left y -> "field" .= y; Right y -> "script" .= y }
toJSON (DateHistogramAgg (DateHistogramAggregation field interval format preZone postZone preOffset postOffset dateHistoAggs)) =
omitNulls ["date_histogram" .= omitNulls [ "field" .= field,
"interval" .= interval,
"format" .= format,
"pre_zone" .= preZone,
"post_zone" .= postZone,
"pre_offset" .= preOffset,
"post_offset" .= postOffset
],
"aggs" .= dateHistoAggs ]
toJSON (ValueCountAgg a) = object ["value_count" .= v]
where v = case a of
(FieldValueCount (FieldName n)) -> object ["field" .= n]
(ScriptValueCount (Script s)) -> object ["script" .= s]
toJSON (FilterAgg (FilterAggregation filt ags)) =
omitNulls [ "filter" .= filt
, "aggs" .= ags]
type AggregationResults = M.Map Text Value
class BucketAggregation a where
key :: a -> Text
docCount :: a -> Int
aggs :: a -> Maybe AggregationResults
data Bucket a = Bucket { buckets :: [a]} deriving (Show)
data TermsResult = TermsResult { termKey :: Text
, termsDocCount :: Int
, termsAggs :: Maybe AggregationResults } deriving (Show)
data DateHistogramResult = DateHistogramResult { dateKey :: Int
, dateKeyStr :: Maybe Text
, dateDocCount :: Int
, dateHistogramAggs :: Maybe AggregationResults } deriving (Show)
toTerms :: Text -> AggregationResults -> Maybe (Bucket TermsResult)
toTerms t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
toDateHistogram :: Text -> AggregationResults -> Maybe (Bucket DateHistogramResult)
toDateHistogram t a = M.lookup t a >>= deserialize
where deserialize = parseMaybe parseJSON
instance BucketAggregation TermsResult where
key = termKey
docCount = termsDocCount
aggs = termsAggs
instance BucketAggregation DateHistogramResult where
key = showText . dateKey
docCount = dateDocCount
aggs = dateHistogramAggs
instance (FromJSON a, BucketAggregation a) => FromJSON (Bucket a) where
parseJSON (Object v) = Bucket <$>
v .: "buckets"
parseJSON _ = mempty
instance FromJSON TermsResult where
parseJSON (Object v) = TermsResult <$>
v .: "key" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance FromJSON DateHistogramResult where
parseJSON (Object v) = DateHistogramResult <$>
v .: "key" <*>
v .:? "key_as_string" <*>
v .: "doc_count" <*>
v .:? "aggregations"
parseJSON _ = mempty
instance Monoid Filter where
mempty = IdentityFilter
mappend a b = AndFilter [a, b] defaultCache
instance Seminearring Filter where
a <||> b = OrFilter [a, b] defaultCache
instance ToJSON Filter where
toJSON (AndFilter filters cache) =
object ["and" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (OrFilter filters cache) =
object ["or" .=
object [ "filters" .= fmap toJSON filters
, "_cache" .= cache]]
toJSON (NotFilter notFilter cache) =
object ["not" .=
object ["filter" .= notFilter
, "_cache" .= cache]]
toJSON (IdentityFilter) =
object ["match_all" .= object []]
toJSON (TermFilter (Term termFilterField termFilterValue) cache) =
object ["term" .= object base]
where base = [termFilterField .= termFilterValue,
"_cache" .= cache]
toJSON (ExistsFilter (FieldName fieldName)) =
object ["exists" .= object
["field" .= fieldName]]
toJSON (BoolFilter boolMatch) =
object ["bool" .= boolMatch]
toJSON (GeoBoundingBoxFilter bbConstraint) =
object ["geo_bounding_box" .= bbConstraint]
toJSON (GeoDistanceFilter (GeoPoint (FieldName distanceGeoField) geoDistLatLon)
distance distanceType optimizeBbox cache) =
object ["geo_distance" .=
object ["distance" .= distance
, "distance_type" .= distanceType
, "optimize_bbox" .= optimizeBbox
, distanceGeoField .= geoDistLatLon
, "_cache" .= cache]]
toJSON (GeoDistanceRangeFilter (GeoPoint (FieldName gddrField) drLatLon)
(DistanceRange geoDistRangeDistFrom drDistanceTo)) =
object ["geo_distance_range" .=
object ["from" .= geoDistRangeDistFrom
, "to" .= drDistanceTo
, gddrField .= drLatLon]]
toJSON (GeoPolygonFilter (FieldName geoPolygonFilterField) latLons) =
object ["geo_polygon" .=
object [geoPolygonFilterField .=
object ["points" .= fmap toJSON latLons]]]
toJSON (IdsFilter (MappingName mappingName) values) =
object ["ids" .=
object ["type" .= mappingName
, "values" .= fmap unpackId values]]
toJSON (LimitFilter limit) =
object ["limit" .= object ["value" .= limit]]
toJSON (MissingFilter (FieldName fieldName) (Existence existence) (NullValue nullValue)) =
object ["missing" .=
object ["field" .= fieldName
, "existence" .= existence
, "null_value" .= nullValue]]
toJSON (PrefixFilter (FieldName fieldName) fieldValue cache) =
object ["prefix" .=
object [fieldName .= fieldValue
, "_cache" .= cache]]
toJSON (QueryFilter query False) =
object ["query" .= toJSON query ]
toJSON (QueryFilter query True) =
object ["fquery" .=
object [ "query" .= toJSON query
, "_cache" .= True ]]
toJSON (RangeFilter (FieldName fieldName) rangeValue rangeExecution cache) =
object ["range" .=
object [ fieldName .= object (rangeValueToPair rangeValue)
, "execution" .= rangeExecution
, "_cache" .= cache]]
toJSON (RegexpFilter (FieldName fieldName)
(Regexp regexText) flags (CacheName cacheName) cache (CacheKey cacheKey)) =
object ["regexp" .=
object [fieldName .=
object ["value" .= regexText
, "flags" .= flags]
, "_name" .= cacheName
, "_cache" .= cache
, "_cache_key" .= cacheKey]]
instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) =
object [ geoPointField .= geoPointLatLon ]
instance ToJSON Query where
toJSON (TermQuery (Term termQueryField termQueryValue) boost) =
object [ "term" .=
object [termQueryField .= object merged]]
where
base = [ "value" .= termQueryValue ]
boosted = maybe [] (return . ("boost" .=)) boost
merged = mappend base boosted
toJSON (TermsQuery terms) =
object [ "terms" .= object conjoined ]
where conjoined = [ getTermsField terms .=
fmap (toJSON . getTermValue) (toList terms)]
getTermsField ((Term f _ ) :| _) = f
getTermValue (Term _ v) = v
toJSON (IdsQuery idsQueryMappingName docIds) =
object [ "ids" .= object conjoined ]
where conjoined = [ "type" .= idsQueryMappingName
, "values" .= fmap toJSON docIds ]
toJSON (QueryQueryStringQuery qQueryStringQuery) =
object [ "query_string" .= qQueryStringQuery ]
toJSON (QueryMatchQuery matchQuery) =
object [ "match" .= matchQuery ]
toJSON (QueryMultiMatchQuery multiMatchQuery) =
toJSON multiMatchQuery
toJSON (QueryBoolQuery boolQuery) =
object [ "bool" .= boolQuery ]
toJSON (QueryBoostingQuery boostingQuery) =
object [ "boosting" .= boostingQuery ]
toJSON (QueryCommonTermsQuery commonTermsQuery) =
object [ "common" .= commonTermsQuery ]
toJSON (ConstantScoreFilter csFilter boost) =
object [ "constant_score" .= csFilter
, "boost" .= boost]
toJSON (ConstantScoreQuery query boost) =
object [ "constant_score" .= query
, "boost" .= boost]
toJSON (QueryDisMaxQuery disMaxQuery) =
object [ "dis_max" .= disMaxQuery ]
toJSON (QueryFilteredQuery qFilteredQuery) =
object [ "filtered" .= qFilteredQuery ]
toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) =
object [ "fuzzy_like_this" .= fuzzyQuery ]
toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) =
object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ]
toJSON (QueryFuzzyQuery fuzzyQuery) =
object [ "fuzzy" .= fuzzyQuery ]
toJSON (QueryHasChildQuery childQuery) =
object [ "has_child" .= childQuery ]
toJSON (QueryHasParentQuery parentQuery) =
object [ "has_parent" .= parentQuery ]
toJSON (QueryIndicesQuery qIndicesQuery) =
object [ "indices" .= qIndicesQuery ]
toJSON (MatchAllQuery boost) =
object [ "match_all" .= omitNulls [ "boost" .= boost ] ]
toJSON (QueryMoreLikeThisQuery query) =
object [ "more_like_this" .= query ]
toJSON (QueryMoreLikeThisFieldQuery query) =
object [ "more_like_this_field" .= query ]
toJSON (QueryNestedQuery query) =
object [ "nested" .= query ]
toJSON (QueryPrefixQuery query) =
object [ "prefix" .= query ]
toJSON (QueryRangeQuery query) =
object [ "range" .= query ]
toJSON (QueryRegexpQuery query) =
object [ "regexp" .= query ]
toJSON (QuerySimpleQueryStringQuery query) =
object [ "simple_query_string" .= query ]
omitNulls :: [(Text, Value)] -> Value
omitNulls = object . filter notNull where
notNull (_, Null) = False
notNull (_, Array a) = (not . V.null) a
notNull _ = True
instance ToJSON SimpleQueryStringQuery where
toJSON SimpleQueryStringQuery {..} =
omitNulls (base ++ maybeAdd)
where base = [ "query" .= simpleQueryStringQuery ]
maybeAdd = [ "fields" .= simpleQueryStringField
, "default_operator" .= simpleQueryStringOperator
, "analyzer" .= simpleQueryStringAnalyzer
, "flags" .= simpleQueryStringFlags
, "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded
, "locale" .= simpleQueryStringLocale ]
instance ToJSON FieldOrFields where
toJSON (FofField fieldName) =
toJSON fieldName
toJSON (FofFields fieldNames) =
toJSON fieldNames
instance ToJSON SimpleQueryFlag where
toJSON SimpleQueryAll = "ALL"
toJSON SimpleQueryNone = "NONE"
toJSON SimpleQueryAnd = "AND"
toJSON SimpleQueryOr = "OR"
toJSON SimpleQueryPrefix = "PREFIX"
toJSON SimpleQueryPhrase = "PHRASE"
toJSON SimpleQueryPrecedence = "PRECEDENCE"
toJSON SimpleQueryEscape = "ESCAPE"
toJSON SimpleQueryWhitespace = "WHITESPACE"
toJSON SimpleQueryFuzzy = "FUZZY"
toJSON SimpleQueryNear = "NEAR"
toJSON SimpleQuerySlop = "SLOP"
instance ToJSON RegexpQuery where
toJSON (RegexpQuery (FieldName rqQueryField)
(Regexp regexpQueryQuery) rqQueryFlags
rqQueryBoost) =
object [ rqQueryField .= omitNulls base ]
where base = [ "value" .= regexpQueryQuery
, "flags" .= rqQueryFlags
, "boost" .= rqQueryBoost ]
instance ToJSON QueryStringQuery where
toJSON (QueryStringQuery qsQueryString
qsDefaultField qsOperator
qsAnalyzer qsAllowWildcard
qsLowercaseExpanded qsEnablePositionIncrements
qsFuzzyMaxExpansions qsFuzziness
qsFuzzyPrefixLength qsPhraseSlop
qsBoost qsAnalyzeWildcard
qsGeneratePhraseQueries qsMinimumShouldMatch
qsLenient qsLocale) =
omitNulls base
where
base = [ "query" .= qsQueryString
, "default_field" .= qsDefaultField
, "default_operator" .= qsOperator
, "analyzer" .= qsAnalyzer
, "allow_leading_wildcard" .= qsAllowWildcard
, "lowercase_expanded_terms" .= qsLowercaseExpanded
, "enable_position_increments" .= qsEnablePositionIncrements
, "fuzzy_max_expansions" .= qsFuzzyMaxExpansions
, "fuzziness" .= qsFuzziness
, "fuzzy_prefix_length" .= qsFuzzyPrefixLength
, "phrase_slop" .= qsPhraseSlop
, "boost" .= qsBoost
, "analyze_wildcard" .= qsAnalyzeWildcard
, "auto_generate_phrase_queries" .= qsGeneratePhraseQueries
, "minimum_should_match" .= qsMinimumShouldMatch
, "lenient" .= qsLenient
, "locale" .= qsLocale ]
instance ToJSON RangeQuery where
toJSON (RangeQuery (FieldName fieldName) range boost) =
object [ fieldName .= conjoined ]
where conjoined = [ "boost" .= boost ] ++ (rangeValueToPair range)
instance ToJSON PrefixQuery where
toJSON (PrefixQuery (FieldName fieldName) queryValue boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryValue
, "boost" .= boost ]
instance ToJSON NestedQuery where
toJSON (NestedQuery nqPath nqScoreType nqQuery) =
object [ "path" .= nqPath
, "score_mode" .= nqScoreType
, "query" .= nqQuery ]
instance ToJSON MoreLikeThisFieldQuery where
toJSON (MoreLikeThisFieldQuery text (FieldName fieldName)
percent mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
object [ fieldName .= omitNulls base ]
where base = [ "like_text" .= text
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON MoreLikeThisQuery where
toJSON (MoreLikeThisQuery text fields percent
mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
omitNulls base
where base = [ "like_text" .= text
, "fields" .= fields
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance ToJSON IndicesQuery where
toJSON (IndicesQuery indices query noMatch) =
omitNulls [ "indices" .= indices
, "no_match_query" .= noMatch
, "query" .= query ]
instance ToJSON HasParentQuery where
toJSON (HasParentQuery queryType query scoreType) =
omitNulls [ "parent_type" .= queryType
, "score_type" .= scoreType
, "query" .= query ]
instance ToJSON HasChildQuery where
toJSON (HasChildQuery queryType query scoreType) =
omitNulls [ "query" .= query
, "score_type" .= scoreType
, "type" .= queryType ]
instance ToJSON FuzzyQuery where
toJSON (FuzzyQuery (FieldName fieldName) queryText
prefixLength maxEx fuzziness boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryText
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "boost" .= boost
, "max_expansions" .= maxEx ]
instance ToJSON FuzzyLikeFieldQuery where
toJSON (FuzzyLikeFieldQuery (FieldName fieldName)
fieldText maxTerms ignoreFreq fuzziness prefixLength
boost analyzer) =
object [ fieldName .=
omitNulls [ "like_text" .= fieldText
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]]
instance ToJSON FuzzyLikeThisQuery where
toJSON (FuzzyLikeThisQuery fields text maxTerms
ignoreFreq fuzziness prefixLength boost analyzer) =
omitNulls base
where base = [ "fields" .= fields
, "like_text" .= text
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]
instance ToJSON FilteredQuery where
toJSON (FilteredQuery query fFilter) =
object [ "query" .= query
, "filter" .= fFilter ]
instance ToJSON DisMaxQuery where
toJSON (DisMaxQuery queries tiebreaker boost) =
omitNulls base
where base = [ "queries" .= queries
, "boost" .= boost
, "tie_breaker" .= tiebreaker ]
instance ToJSON CommonTermsQuery where
toJSON (CommonTermsQuery (FieldName fieldName)
(QueryString query) cf lfo hfo msm
boost analyzer disableCoord) =
object [fieldName .= omitNulls base ]
where base = [ "query" .= query
, "cutoff_frequency" .= cf
, "low_freq_operator" .= lfo
, "minimum_should_match" .= msm
, "boost" .= boost
, "analyzer" .= analyzer
, "disable_coord" .= disableCoord
, "high_freq_operator" .= hfo ]
instance ToJSON CommonMinimumMatch where
toJSON (CommonMinimumMatch mm) = toJSON mm
toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) =
object [ "low_freq" .= lowF
, "high_freq" .= highF ]
instance ToJSON BoostingQuery where
toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) =
object [ "positive" .= bqPositiveQuery
, "negative" .= bqNegativeQuery
, "negative_boost" .= bqNegativeBoost ]
instance ToJSON BoolQuery where
toJSON (BoolQuery mustM notM shouldM bqMin boost disableCoord) =
omitNulls base
where base = [ "must" .= mustM
, "must_not" .= notM
, "should" .= shouldM
, "minimum_should_match" .= bqMin
, "boost" .= boost
, "disable_coord" .= disableCoord ]
instance ToJSON MatchQuery where
toJSON (MatchQuery (FieldName fieldName)
(QueryString mqQueryString) booleanOperator
zeroTermsQuery cutoffFrequency matchQueryType
analyzer maxExpansions lenient boost) =
object [ fieldName .= omitNulls base ]
where base = [ "query" .= mqQueryString
, "operator" .= booleanOperator
, "zero_terms_query" .= zeroTermsQuery
, "cutoff_frequency" .= cutoffFrequency
, "type" .= matchQueryType
, "analyzer" .= analyzer
, "max_expansions" .= maxExpansions
, "lenient" .= lenient
, "boost" .= boost ]
instance ToJSON MultiMatchQuery where
toJSON (MultiMatchQuery fields (QueryString query) boolOp
ztQ tb mmqt cf analyzer maxEx lenient) =
object ["multi_match" .= omitNulls base]
where base = [ "fields" .= fmap toJSON fields
, "query" .= query
, "operator" .= boolOp
, "zero_terms_query" .= ztQ
, "tiebreaker" .= tb
, "type" .= mmqt
, "cutoff_frequency" .= cf
, "analyzer" .= analyzer
, "max_expansions" .= maxEx
, "lenient" .= lenient ]
instance ToJSON MultiMatchQueryType where
toJSON MultiMatchBestFields = "best_fields"
toJSON MultiMatchMostFields = "most_fields"
toJSON MultiMatchCrossFields = "cross_fields"
toJSON MultiMatchPhrase = "phrase"
toJSON MultiMatchPhrasePrefix = "phrase_prefix"
instance ToJSON BooleanOperator where
toJSON And = String "and"
toJSON Or = String "or"
instance ToJSON ZeroTermsQuery where
toJSON ZeroTermsNone = String "none"
toJSON ZeroTermsAll = String "all"
instance ToJSON MatchQueryType where
toJSON MatchPhrase = "phrase"
toJSON MatchPhrasePrefix = "phrase_prefix"
instance ToJSON FieldName where
toJSON (FieldName fieldName) = String fieldName
instance ToJSON ReplicaCount
instance ToJSON ShardCount
instance ToJSON CutoffFrequency
instance ToJSON Analyzer
instance ToJSON MaxExpansions
instance ToJSON Lenient
instance ToJSON Boost
instance ToJSON Version
instance ToJSON Tiebreaker
instance ToJSON MinimumMatch
instance ToJSON DisableCoord
instance ToJSON PrefixLength
instance ToJSON Fuzziness
instance ToJSON IgnoreTermFrequency
instance ToJSON MaxQueryTerms
instance ToJSON TypeName
instance ToJSON IndexName
instance ToJSON TemplateName
instance ToJSON TemplatePattern
instance ToJSON BoostTerms
instance ToJSON MaxWordLength
instance ToJSON MinWordLength
instance ToJSON MaxDocFrequency
instance ToJSON MinDocFrequency
instance ToJSON PhraseSlop
instance ToJSON StopWord
instance ToJSON QueryPath
instance ToJSON MinimumTermFrequency
instance ToJSON PercentMatch
instance ToJSON MappingName
instance ToJSON DocId
instance ToJSON QueryString
instance ToJSON AllowLeadingWildcard
instance ToJSON LowercaseExpanded
instance ToJSON AnalyzeWildcard
instance ToJSON GeneratePhraseQueries
instance ToJSON Locale
instance ToJSON EnablePositionIncrements
instance FromJSON Version
instance FromJSON IndexName
instance FromJSON MappingName
instance FromJSON DocId
instance FromJSON Status where
parseJSON (Object v) = Status <$>
v .:? "ok" <*>
v .: "status" <*>
v .: "name" <*>
v .: "version" <*>
v .: "tagline"
parseJSON _ = empty
instance ToJSON IndexSettings where
toJSON (IndexSettings s r) = object ["settings" .=
object ["index" .=
object ["number_of_shards" .= s, "number_of_replicas" .= r]
]
]
instance ToJSON IndexTemplate where
toJSON (IndexTemplate p s m) = merge
(object [ "template" .= p
, "mappings" .= foldl' merge (object []) m
])
(toJSON s)
where
merge (Object o1) (Object o2) = toJSON $ HM.union o1 o2
merge o Null = o
merge _ _ = undefined
instance (FromJSON a) => FromJSON (EsResult a) where
parseJSON jsonVal@(Object v) = do
found <- v .:? "found" .!= False
fr <- if found
then parseJSON jsonVal
else return Nothing
EsResult <$> v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
pure fr
parseJSON _ = empty
instance (FromJSON a) => FromJSON (EsResultFound a) where
parseJSON (Object v) = EsResultFound <$>
v .: "_version" <*>
v .: "_source"
parseJSON _ = empty
instance ToJSON Search where
toJSON (Search query sFilter sort searchAggs highlight sTrackSortScores sFrom sSize _ sFields sSource) =
omitNulls [ "query" .= query
, "filter" .= sFilter
, "sort" .= sort
, "aggregations" .= searchAggs
, "highlight" .= highlight
, "from" .= sFrom
, "size" .= sSize
, "track_scores" .= sTrackSortScores
, "fields" .= sFields
, "_source" .= sSource]
instance ToJSON Source where
toJSON NoSource = toJSON False
toJSON (SourcePatterns patterns) = toJSON patterns
toJSON (SourceIncludeExclude incl excl) = object [ "include" .= incl, "exclude" .= excl ]
instance ToJSON PatternOrPatterns where
toJSON (PopPattern pattern) = toJSON pattern
toJSON (PopPatterns patterns) = toJSON patterns
instance ToJSON Include where
toJSON (Include patterns) = toJSON patterns
instance ToJSON Exclude where
toJSON (Exclude patterns) = toJSON patterns
instance ToJSON Pattern where
toJSON (Pattern pattern) = toJSON pattern
instance ToJSON FieldHighlight where
toJSON (FieldHighlight (FieldName fName) (Just fSettings)) =
object [ fName .= fSettings ]
toJSON (FieldHighlight (FieldName fName) Nothing) =
object [ fName .= emptyObject ]
instance ToJSON Highlights where
toJSON (Highlights global fields) =
omitNulls (("fields" .= fields)
: highlightSettingsPairs global)
instance ToJSON HighlightSettings where
toJSON hs = omitNulls (highlightSettingsPairs (Just hs))
highlightSettingsPairs :: Maybe HighlightSettings -> [Pair]
highlightSettingsPairs Nothing = []
highlightSettingsPairs (Just (Plain plh)) = plainHighPairs (Just plh)
highlightSettingsPairs (Just (Postings ph)) = postHighPairs (Just ph)
highlightSettingsPairs (Just (FastVector fvh)) = fastVectorHighPairs (Just fvh)
plainHighPairs :: Maybe PlainHighlight -> [Pair]
plainHighPairs Nothing = []
plainHighPairs (Just (PlainHighlight plCom plNonPost)) =
[ "type" .= String "plain"]
++ commonHighlightPairs plCom
++ nonPostingsToPairs plNonPost
postHighPairs :: Maybe PostingsHighlight -> [Pair]
postHighPairs Nothing = []
postHighPairs (Just (PostingsHighlight pCom)) =
("type" .= String "postings")
: commonHighlightPairs pCom
fastVectorHighPairs :: Maybe FastVectorHighlight -> [Pair]
fastVectorHighPairs Nothing = []
fastVectorHighPairs (Just
(FastVectorHighlight fvCom fvNonPostSettings fvBoundChars
fvBoundMaxScan fvFragOff fvMatchedFields
fvPhraseLim)) =
[ "type" .= String "fvh"
, "boundary_chars" .= fvBoundChars
, "boundary_max_scan" .= fvBoundMaxScan
, "fragment_offset" .= fvFragOff
, "matched_fields" .= fvMatchedFields
, "phraseLimit" .= fvPhraseLim]
++ commonHighlightPairs fvCom
++ nonPostingsToPairs fvNonPostSettings
commonHighlightPairs :: Maybe CommonHighlight -> [Pair]
commonHighlightPairs Nothing = []
commonHighlightPairs (Just (CommonHighlight chScore chForceSource chTag chEncoder
chNoMatchSize chHighlightQuery
chRequireFieldMatch)) =
[ "order" .= chScore
, "force_source" .= chForceSource
, "encoder" .= chEncoder
, "no_match_size" .= chNoMatchSize
, "highlight_query" .= chHighlightQuery
, "require_fieldMatch" .= chRequireFieldMatch]
++ highlightTagToPairs chTag
nonPostingsToPairs :: Maybe NonPostings -> [Pair]
nonPostingsToPairs Nothing = []
nonPostingsToPairs (Just (NonPostings npFragSize npNumOfFrags)) =
[ "fragment_size" .= npFragSize
, "number_of_fragments" .= npNumOfFrags]
instance ToJSON HighlightEncoder where
toJSON DefaultEncoder = String "default"
toJSON HTMLEncoder = String "html"
highlightTagToPairs :: Maybe HighlightTag -> [Pair]
highlightTagToPairs (Just (TagSchema _)) = [ "scheme" .= String "default"]
highlightTagToPairs (Just (CustomTags (pre, post))) = [ "pre_tags" .= pre
, "post_tags" .= post]
highlightTagToPairs Nothing = []
instance ToJSON SortSpec where
toJSON (DefaultSortSpec
(DefaultSort (FieldName dsSortFieldName) dsSortOrder dsIgnoreUnmapped
dsSortMode dsMissingSort dsNestedFilter)) =
object [dsSortFieldName .= omitNulls base] where
base = [ "order" .= dsSortOrder
, "ignore_unmapped" .= dsIgnoreUnmapped
, "mode" .= dsSortMode
, "missing" .= dsMissingSort
, "nested_filter" .= dsNestedFilter ]
toJSON (GeoDistanceSortSpec gdsSortOrder (GeoPoint (FieldName field) gdsLatLon) units) =
object [ "unit" .= units
, field .= gdsLatLon
, "order" .= gdsSortOrder ]
instance ToJSON SortOrder where
toJSON Ascending = String "asc"
toJSON Descending = String "desc"
instance ToJSON SortMode where
toJSON SortMin = String "min"
toJSON SortMax = String "max"
toJSON SortSum = String "sum"
toJSON SortAvg = String "avg"
instance ToJSON Missing where
toJSON LastMissing = String "_last"
toJSON FirstMissing = String "_first"
toJSON (CustomMissing txt) = String txt
instance ToJSON ScoreType where
toJSON ScoreTypeMax = "max"
toJSON ScoreTypeAvg = "avg"
toJSON ScoreTypeSum = "sum"
toJSON ScoreTypeNone = "none"
instance ToJSON Distance where
toJSON (Distance dCoefficient dUnit) =
String boltedTogether where
coefText = showText dCoefficient
(String unitText) = toJSON dUnit
boltedTogether = mappend coefText unitText
instance ToJSON DistanceUnit where
toJSON Miles = String "mi"
toJSON Yards = String "yd"
toJSON Feet = String "ft"
toJSON Inches = String "in"
toJSON Kilometers = String "km"
toJSON Meters = String "m"
toJSON Centimeters = String "cm"
toJSON Millimeters = String "mm"
toJSON NauticalMiles = String "nmi"
instance ToJSON DistanceType where
toJSON Arc = String "arc"
toJSON SloppyArc = String "sloppy_arc"
toJSON Plane = String "plane"
instance ToJSON OptimizeBbox where
toJSON NoOptimizeBbox = String "none"
toJSON (OptimizeGeoFilterType gft) = toJSON gft
instance ToJSON GeoBoundingBoxConstraint where
toJSON (GeoBoundingBoxConstraint
(FieldName gbbcGeoBBField) gbbcConstraintBox cache type') =
object [gbbcGeoBBField .= gbbcConstraintBox
, "_cache" .= cache
, "type" .= type']
instance ToJSON GeoFilterType where
toJSON GeoFilterMemory = String "memory"
toJSON GeoFilterIndexed = String "indexed"
instance ToJSON GeoBoundingBox where
toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) =
object ["top_left" .= gbbTopLeft
, "bottom_right" .= gbbBottomRight]
instance ToJSON LatLon where
toJSON (LatLon lLat lLon) =
object ["lat" .= lLat
, "lon" .= lLon]
-- index for smaller ranges, fielddata for longer ranges
instance ToJSON RangeExecution where
toJSON RangeExecutionIndex = "index"
toJSON RangeExecutionFielddata = "fielddata"
instance ToJSON RegexpFlags where
toJSON AllRegexpFlags = String "ALL"
toJSON NoRegexpFlags = String "NONE"
toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs
where flagStrs = map flagStr . nub $ h:fs
flagStr AnyString = "ANYSTRING"
flagStr Automaton = "AUTOMATON"
flagStr Complement = "COMPLEMENT"
flagStr Empty = "EMPTY"
flagStr Intersection = "INTERSECTION"
flagStr Interval = "INTERVAL"
instance ToJSON Term where
toJSON (Term field value) = object ["term" .= object
[field .= value]]
instance ToJSON BoolMatch where
toJSON (MustMatch term cache) = object ["must" .= term,
"_cache" .= cache]
toJSON (MustNotMatch term cache) = object ["must_not" .= term,
"_cache" .= cache]
toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms,
"_cache" .= cache]
instance (FromJSON a) => FromJSON (SearchResult a) where
parseJSON (Object v) = SearchResult <$>
v .: "took" <*>
v .: "timed_out" <*>
v .: "_shards" <*>
v .: "hits" <*>
v .:? "aggregations" <*>
v .:? "_scroll_id"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (SearchHits a) where
parseJSON (Object v) = SearchHits <$>
v .: "total" <*>
v .: "max_score" <*>
v .: "hits"
parseJSON _ = empty
instance (FromJSON a) => FromJSON (Hit a) where
parseJSON (Object v) = Hit <$>
v .: "_index" <*>
v .: "_type" <*>
v .: "_id" <*>
v .: "_score" <*>
v .: "_source" <*>
v .:? "highlight"
parseJSON _ = empty
instance FromJSON ShardResult where
parseJSON (Object v) = ShardResult <$>
v .: "total" <*>
v .: "successful" <*>
v .: "failed"
parseJSON _ = empty
instance FromJSON DocVersion where
parseJSON v = do
i <- parseJSON v
maybe (fail "DocVersion out of range") return $ mkDocVersion i
instance Bounded DocVersion where
minBound = DocVersion 1
maxBound = DocVersion 9200000000000000000 -- 9.2e+18
instance Enum DocVersion where
succ x
| x /= maxBound = DocVersion (succ $ docVersionNumber x)
| otherwise = succError "DocVersion"
pred x
| x /= minBound = DocVersion (pred $ docVersionNumber x)
| otherwise = predError "DocVersion"
toEnum i =
fromMaybe (error $ show i ++ " out of DocVersion range") $ mkDocVersion i
fromEnum = docVersionNumber
enumFrom = boundedEnumFrom
enumFromThen = boundedEnumFromThen
| sjakobi/bloodhound | src/Database/Bloodhound/Types.hs | bsd-3-clause | 90,900 | 0 | 17 | 28,086 | 18,263 | 10,078 | 8,185 | 1,802 | 16 |
{- |
Module : $Header$
Description : Test case for CSMOF parsing, parses a file and shows the resulting CSMOF metamodel
Copyright : (c) Daniel Calegari Universidad de la Republica, Uruguay 2013
License : GPLv2 or higher, see LICENSE.txt
Maintainer : dcalegar@fing.edu.uy
Stability : provisional
Portability : portable
-}
-- From the CSMOF folder run: ghc -i.. -o main Test_Parser.hs
import CSMOF.As
import CSMOF.Parser
import CSMOF.Print
import Text.XML.Light
import System.IO
import Common.Doc
import Common.DocUtils
main :: IO ()
main = do
handle <- openFile "RDBMSWMult_TechRep.xmi" ReadMode
contents <- hGetContents handle
case parseXMLDoc contents of
Nothing -> putStr "VACIO"
Just el ->
{- handle2 <- openFile "./tests/RDBMSWMult_TechRep_EXIT.xmi" WriteMode
hPutStr handle2 (show el)
hClose handle2 -}
print $ pretty $ parseCSMOF el
hClose handle
| keithodulaigh/Hets | CSMOF/tests/Test_Parser.hs | gpl-2.0 | 1,009 | 0 | 11 | 278 | 122 | 61 | 61 | 16 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Compat.ReadP
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Portability : portable
--
-- This code was originally in Distribution.Compat.ReadP. Please see that file
-- for provenace. The tests have been integrated into the test framework.
-- Some properties cannot be tested, as they hold over arbitrary ReadP values,
-- and we don't have a good Arbitrary instance (nor Show instance) for ReadP.
--
module UnitTests.Distribution.Compat.ReadP
( tests
-- * Properties
-- $properties
) where
import Data.List
import Distribution.Compat.ReadP
import Test.Framework
import Test.Framework.Providers.QuickCheck2
tests :: [Test]
tests =
[ testProperty "Get Nil" prop_Get_Nil
, testProperty "Get Cons" prop_Get_Cons
, testProperty "Look" prop_Look
, testProperty "Fail" prop_Fail
, testProperty "Return" prop_Return
--, testProperty "Bind" prop_Bind
--, testProperty "Plus" prop_Plus
--, testProperty "LeftPlus" prop_LeftPlus
--, testProperty "Gather" prop_Gather
, testProperty "String Yes" prop_String_Yes
, testProperty "String Maybe" prop_String_Maybe
, testProperty "Munch" (prop_Munch evenChar)
, testProperty "Munch1" (prop_Munch1 evenChar)
--, testProperty "Choice" prop_Choice
--, testProperty "ReadS" prop_ReadS
]
-- ---------------------------------------------------------------------------
-- QuickCheck properties that hold for the combinators
{- $properties
The following are QuickCheck specifications of what the combinators do.
These can be seen as formal specifications of the behavior of the
combinators.
We use bags to give semantics to the combinators.
-}
type Bag a = [a]
-- Equality on bags does not care about the order of elements.
(=~) :: Ord a => Bag a -> Bag a -> Bool
xs =~ ys = sort xs == sort ys
-- A special equality operator to avoid unresolved overloading
-- when testing the properties.
(=~.) :: Bag (Int,String) -> Bag (Int,String) -> Bool
(=~.) = (=~)
-- Here follow the properties:
prop_Get_Nil :: Bool
prop_Get_Nil =
readP_to_S get [] =~ []
prop_Get_Cons :: Char -> [Char] -> Bool
prop_Get_Cons c s =
readP_to_S get (c:s) =~ [(c,s)]
prop_Look :: String -> Bool
prop_Look s =
readP_to_S look s =~ [(s,s)]
prop_Fail :: String -> Bool
prop_Fail s =
readP_to_S pfail s =~. []
prop_Return :: Int -> String -> Bool
prop_Return x s =
readP_to_S (return x) s =~. [(x,s)]
{-
prop_Bind p k s =
readP_to_S (p >>= k) s =~.
[ ys''
| (x,s') <- readP_to_S p s
, ys'' <- readP_to_S (k (x::Int)) s'
]
prop_Plus :: ReadP Int Int -> ReadP Int Int -> String -> Bool
prop_Plus p q s =
readP_to_S (p +++ q) s =~.
(readP_to_S p s ++ readP_to_S q s)
prop_LeftPlus :: ReadP Int Int -> ReadP Int Int -> String -> Bool
prop_LeftPlus p q s =
readP_to_S (p <++ q) s =~.
(readP_to_S p s +<+ readP_to_S q s)
where
[] +<+ ys = ys
xs +<+ _ = xs
prop_Gather s =
forAll readPWithoutReadS $ \p ->
readP_to_S (gather p) s =~
[ ((pre,x::Int),s')
| (x,s') <- readP_to_S p s
, let pre = take (length s - length s') s
]
-}
prop_String_Yes :: String -> [Char] -> Bool
prop_String_Yes this s =
readP_to_S (string this) (this ++ s) =~
[(this,s)]
prop_String_Maybe :: String -> String -> Bool
prop_String_Maybe this s =
readP_to_S (string this) s =~
[(this, drop (length this) s) | this `isPrefixOf` s]
prop_Munch :: (Char -> Bool) -> String -> Bool
prop_Munch p s =
readP_to_S (munch p) s =~
[(takeWhile p s, dropWhile p s)]
prop_Munch1 :: (Char -> Bool) -> String -> Bool
prop_Munch1 p s =
readP_to_S (munch1 p) s =~
[(res,s') | let (res,s') = (takeWhile p s, dropWhile p s), not (null res)]
{-
prop_Choice :: [ReadP Int Int] -> String -> Bool
prop_Choice ps s =
readP_to_S (choice ps) s =~.
readP_to_S (foldr (+++) pfail ps) s
prop_ReadS :: ReadS Int -> String -> Bool
prop_ReadS r s =
readP_to_S (readS_to_P r) s =~. r s
-}
evenChar :: Char -> Bool
evenChar = even . fromEnum
| jwiegley/ghc-release | libraries/Cabal/cabal/tests/UnitTests/Distribution/Compat/ReadP.hs | gpl-3.0 | 4,200 | 0 | 12 | 868 | 756 | 421 | 335 | 55 | 1 |
{-# LANGUAGE PatternSynonyms #-}
{-
Copyright 2017 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld (
-- * Entry points
drawingOf,
animationOf,
simulationOf,
interactionOf,
collaborationOf,
unsafeCollaborationOf,
-- * Pictures
Picture,
TextStyle(..),
Font(..),
blank,
path,
thickPath,
polygon,
thickPolygon,
solidPolygon,
curve,
thickCurve,
loop,
thickLoop,
solidLoop,
rectangle,
solidRectangle,
thickRectangle,
circle,
solidCircle,
thickCircle,
arc,
sector,
thickArc,
text,
styledText,
colored,
coloured,
translated,
scaled,
dilated,
rotated,
pictures,
(<>),
(&),
coordinatePlane,
codeWorldLogo,
Point,
Vector,
vectorLength,
vectorDirection,
vectorSum,
vectorDifference,
scaledVector,
rotatedVector,
dotProduct,
-- * Colors
Color(..),
Colour,
pattern RGB,
pattern HSL,
black,
white,
red,
green,
blue,
cyan,
magenta,
yellow,
aquamarine,
orange,
azure,
violet,
chartreuse,
rose,
brown,
pink,
purple,
gray,
grey,
mixed,
lighter,
light,
darker,
dark,
brighter,
bright,
duller,
dull,
translucent,
assortedColors,
hue,
saturation,
luminosity,
alpha,
fromHSL,
-- * Events
Event(..),
MouseButton(..),
pattern PointerPress,
pattern PointerRelease,
pattern PointerMovement,
-- * Debugging
trace
) where
import CodeWorld.Color
import CodeWorld.Picture
import CodeWorld.Event
import CodeWorld.Driver
import Data.Monoid
| venkat24/codeworld | codeworld-api/src/CodeWorld.hs | apache-2.0 | 2,283 | 0 | 5 | 666 | 360 | 242 | 118 | 103 | 0 |
module Main where
import Graphics.Vty
import Control.Applicative
import Control.Arrow
import Control.Monad.RWS
import Data.Sequence (Seq, (<|) )
import qualified Data.Sequence as Seq
import Data.Foldable
eventBufferSize = 1000
type App = RWST Vty () (Seq String) IO
main = do
vty <- mkVty defaultConfig
_ <- execRWST (vtyInteract False) vty Seq.empty
shutdown vty
vtyInteract :: Bool -> App ()
vtyInteract shouldExit = do
updateDisplay
unless shouldExit $ handleNextEvent >>= vtyInteract
updateDisplay :: App ()
updateDisplay = do
let info = string defAttr "Press ESC to exit."
eventLog <- foldMap (string defAttr) <$> get
let pic = picForImage $ info <-> eventLog
vty <- ask
liftIO $ update vty pic
handleNextEvent = ask >>= liftIO . nextEvent >>= handleEvent
where
handleEvent e = do
modify $ (<|) (show e) >>> Seq.take eventBufferSize
return $ e == EvKey KEsc []
| jtdaugherty/vty | test/EventEcho.hs | bsd-3-clause | 966 | 0 | 13 | 236 | 320 | 161 | 159 | 29 | 1 |
no = elem 1 [] : [] | bitemyapp/apply-refact | tests/examples/Default43.hs | bsd-3-clause | 20 | 0 | 7 | 7 | 19 | 9 | 10 | 1 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnliftedNewtypes #-}
-- In tcConDecl, there is a place where a panic can happen if
-- a newtype has multiple fields. This test is here to make
-- sure that the appropriate validity checks happen before
-- we get to the panic. See Note [Kind-checking the field type].
module UnliftedNewtypesMultiFieldGadt where
import GHC.Exts
import Data.Kind
newtype Foo :: TYPE 'IntRep where
FooC :: Bool -> Char -> Foo
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/UnliftedNewtypesMultiFieldGadt.hs | bsd-3-clause | 559 | 0 | 7 | 96 | 49 | 33 | 16 | -1 | -1 |
module Infix3 where
data T1 a b = b :#: a
g :: (T1 Int Int) -> Int
g ((x :#: y)) = y - x
f x y
= g (error
"x :$: y no longer defined for T1 at line: 5")
| kmate/HaRe | old/testing/removeCon/Infix3AST.hs | bsd-3-clause | 185 | 0 | 8 | 75 | 75 | 41 | 34 | 7 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleInstances, UndecidableInstances #-}
module TH_tf3 where
type family T a
$( [d| foo :: T [a] ~ Bool => a -> a
foo x = x |] )
$( [d| class C a
instance a ~ Int => C a |] ) | spacekitteh/smcghc | testsuite/tests/th/TH_tf3.hs | bsd-3-clause | 223 | 0 | 6 | 63 | 33 | 22 | 11 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module ASCII () where
import Tree
import PatternGenerator
type EP g = Bool
templateFoo ''EP ['A'..'Z']
| ezyang/ghc | testsuite/tests/th/should_compile/T13949/ASCII.hs | bsd-3-clause | 146 | 0 | 6 | 30 | 36 | 22 | 14 | 6 | 0 |
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE DeriveAnyClass #-}
data Foo a = F Int | A a
deriving Show
data Foo1 a = F1 Int | A1 a
deriving (Show)
data Foo2 a = F2 Int | A2 a
deriving (Show, Eq)
data FooStock = FS Int
deriving stock Show
data FooAnyClass = Fa Int
deriving anyclass Show
newtype FooNewType = Fn Int
deriving newtype (Show)
| ezyang/ghc | testsuite/tests/printer/Ppr018.hs | bsd-3-clause | 361 | 0 | 6 | 84 | 114 | 67 | 47 | -1 | -1 |
{-# LANGUAGE TypeFamilies, EmptyDataDecls #-}
module T2639 where
data Eps
data family Work a v
data instance Work Eps v = Eps v
type family Dual a
type instance Dual Eps = Eps
class Connect s where
connect :: (Dual s ~ c, Dual c ~ s) => Work s a -> Work c b -> (a,b)
instance Connect Eps where
connect (Eps a) (Eps b) = (a,b)
| urbanslug/ghc | testsuite/tests/indexed-types/should_compile/T2639.hs | bsd-3-clause | 340 | 0 | 10 | 82 | 145 | 79 | 66 | -1 | -1 |
-- | Wrappers around "Futhark.Compiler" to provide functionality
-- useful for Template Haskell.
module Futhark.Compiler.TH
(embedBasis)
where
import Control.Monad.Except (runExceptT)
import Language.Futhark.TH ()
import Language.Haskell.TH.Syntax (Exp, Q, runIO, lift, qAddDependentFile)
import System.FilePath
import Futhark.Compiler
import Futhark.Util (directoryContents)
futFiles :: FilePath -> IO [FilePath]
futFiles dir = filter isFut <$> directoryContents dir
where isFut = (==".fut") . takeExtension
readBasis :: ImportPaths -> FilePath -> String -> Q Basis
readBasis search_path fpath entry = do
files <- runIO $ futFiles fpath
-- In many cases, the 'fpath' may be only a single file, which
-- imports others. We will assume that all .fut files in the
-- containing directory may influence for dependency information.
-- Even if we get this wrong, it only means we'll do a few
-- unnecessary recompiles.
all_files <- runIO $ futFiles $ takeDirectory fpath
mapM_ qAddDependentFile all_files
res <- runIO $ runExceptT $ readLibrary False emptyBasis search_path files
case res of
Right (_, _, imps, src) ->
return $ Basis imps src [entry]
Left err -> error $ show err
-- | At compile-time, produce an 'Exp' corresponding to a 'Basis'.
-- The 'FilePath' must refer to a @.fut@ file.
embedBasis :: ImportPaths -> FilePath -> String -> Q Exp
embedBasis search_path fpath entry =
lift =<< readBasis search_path fpath entry
| ihc/futhark | src/Futhark/Compiler/TH.hs | isc | 1,474 | 0 | 12 | 262 | 330 | 177 | 153 | 24 | 2 |
-- A Context-Free Grammar Library
module ContextFreeGrammar
data BNF a = [Rule a]
data Rule a = Rule { ruleName :: String
, ruleDef :: [RuleDef a]
}
data RuleDef a = (Terminal a) => Term a
| RuleRef String
| ConcatOf [RuleDef a]
| shouya/thinking-dumps | automata/ContextFreeGrammar.hs | mit | 300 | 2 | 10 | 115 | 85 | 48 | 37 | -1 | -1 |
{-# htermination (lcm :: MyInt -> MyInt -> MyInt) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
primNegInt :: MyInt -> MyInt;
primNegInt (Pos x) = Neg x;
primNegInt (Neg x) = Pos x;
negate :: MyInt -> MyInt
negate = primNegInt;
absReal0 x MyTrue = negate x;
otherwise :: MyBool;
otherwise = MyTrue;
absReal1 x MyTrue = x;
absReal1 x MyFalse = absReal0 x otherwise;
fromInt :: MyInt -> MyInt
fromInt x = x;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compare :: MyInt -> MyInt -> Ordering
compare = primCmpInt;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
not :: MyBool -> MyBool;
not MyTrue = MyFalse;
not MyFalse = MyTrue;
fsEs :: Ordering -> Ordering -> MyBool
fsEs x y = not (esEsOrdering x y);
gtEs :: MyInt -> MyInt -> MyBool
gtEs x y = fsEs (compare x y) LT;
absReal2 x = absReal1 x (gtEs x (fromInt (Pos Zero)));
absReal x = absReal2 x;
abs :: MyInt -> MyInt
abs = absReal;
stop :: MyBool -> a;
stop MyFalse = stop MyFalse;
error :: a;
error = stop MyTrue;
primGEqNatS :: Nat -> Nat -> MyBool;
primGEqNatS (Succ x) Zero = MyTrue;
primGEqNatS (Succ x) (Succ y) = primGEqNatS x y;
primGEqNatS Zero (Succ x) = MyFalse;
primGEqNatS Zero Zero = MyTrue;
primMinusNatS :: Nat -> Nat -> Nat;
primMinusNatS (Succ x) (Succ y) = primMinusNatS x y;
primMinusNatS Zero (Succ y) = Zero;
primMinusNatS x Zero = x;
primModNatS0 x y MyTrue = primModNatS (primMinusNatS x (Succ y)) (Succ (Succ y));
primModNatS0 x y MyFalse = Succ x;
primModNatS :: Nat -> Nat -> Nat;
primModNatS Zero Zero = error;
primModNatS Zero (Succ x) = Zero;
primModNatS (Succ x) Zero = error;
primModNatS (Succ x) (Succ Zero) = Zero;
primModNatS (Succ x) (Succ (Succ y)) = primModNatS0 x y (primGEqNatS x (Succ y));
primRemInt :: MyInt -> MyInt -> MyInt;
primRemInt (Pos x) (Pos (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Pos x) (Neg (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Neg x) (Pos (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt (Neg x) (Neg (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt wx wy = error;
rem :: MyInt -> MyInt -> MyInt
rem = primRemInt;
gcd0Gcd'0 x y = gcd0Gcd' y (rem x y);
primEqNat :: Nat -> Nat -> MyBool;
primEqNat Zero Zero = MyTrue;
primEqNat Zero (Succ y) = MyFalse;
primEqNat (Succ x) Zero = MyFalse;
primEqNat (Succ x) (Succ y) = primEqNat x y;
primEqInt :: MyInt -> MyInt -> MyBool;
primEqInt (Pos (Succ x)) (Pos (Succ y)) = primEqNat x y;
primEqInt (Neg (Succ x)) (Neg (Succ y)) = primEqNat x y;
primEqInt (Pos Zero) (Neg Zero) = MyTrue;
primEqInt (Neg Zero) (Pos Zero) = MyTrue;
primEqInt (Neg Zero) (Neg Zero) = MyTrue;
primEqInt (Pos Zero) (Pos Zero) = MyTrue;
primEqInt xx xy = MyFalse;
esEs :: MyInt -> MyInt -> MyBool
esEs = primEqInt;
gcd0Gcd'1 MyTrue x xz = x;
gcd0Gcd'1 yu yv yw = gcd0Gcd'0 yv yw;
gcd0Gcd'2 x xz = gcd0Gcd'1 (esEs xz (fromInt (Pos Zero))) x xz;
gcd0Gcd'2 yx yy = gcd0Gcd'0 yx yy;
gcd0Gcd' x xz = gcd0Gcd'2 x xz;
gcd0Gcd' x y = gcd0Gcd'0 x y;
gcd0 x y = gcd0Gcd' (abs x) (abs y);
gcd1 MyTrue yz zu = error;
gcd1 zv zw zx = gcd0 zw zx;
gcd2 MyTrue yz zu = gcd1 (esEs zu (fromInt (Pos Zero))) yz zu;
gcd2 zy zz vuu = gcd0 zz vuu;
gcd3 yz zu = gcd2 (esEs yz (fromInt (Pos Zero))) yz zu;
gcd3 vuv vuw = gcd0 vuv vuw;
gcd yz zu = gcd3 yz zu;
gcd x y = gcd0 x y;
primDivNatS0 x y MyTrue = Succ (primDivNatS (primMinusNatS x y) (Succ y));
primDivNatS0 x y MyFalse = Zero;
primDivNatS :: Nat -> Nat -> Nat;
primDivNatS Zero Zero = error;
primDivNatS (Succ x) Zero = error;
primDivNatS (Succ x) (Succ y) = primDivNatS0 x y (primGEqNatS x y);
primDivNatS Zero (Succ x) = Zero;
primQuotInt :: MyInt -> MyInt -> MyInt;
primQuotInt (Pos x) (Pos (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt (Pos x) (Neg (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Pos (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Neg (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt wz xu = error;
quot :: MyInt -> MyInt -> MyInt
quot = primQuotInt;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primMulNat :: Nat -> Nat -> Nat;
primMulNat Zero Zero = Zero;
primMulNat Zero (Succ y) = Zero;
primMulNat (Succ x) Zero = Zero;
primMulNat (Succ x) (Succ y) = primPlusNat (primMulNat x (Succ y)) (Succ y);
primMulInt :: MyInt -> MyInt -> MyInt;
primMulInt (Pos x) (Pos y) = Pos (primMulNat x y);
primMulInt (Pos x) (Neg y) = Neg (primMulNat x y);
primMulInt (Neg x) (Pos y) = Neg (primMulNat x y);
primMulInt (Neg x) (Neg y) = Pos (primMulNat x y);
sr :: MyInt -> MyInt -> MyInt
sr = primMulInt;
lcm0 x y = abs (sr (quot x (gcd x y)) y);
lcm1 MyTrue vux xw = fromInt (Pos Zero);
lcm1 vuy vuz vvu = lcm0 vuz vvu;
lcm2 vux xw = lcm1 (esEs vux (fromInt (Pos Zero))) vux xw;
lcm2 vvv vvw = lcm0 vvv vvw;
lcm3 MyTrue xv vvx = fromInt (Pos Zero);
lcm3 vvy vvz vwu = lcm2 vvz vwu;
lcm4 xv vvx = lcm3 (esEs vvx (fromInt (Pos Zero))) xv vvx;
lcm4 vwv vww = lcm2 vwv vww;
lcm xv vvx = lcm4 xv vvx;
lcm vux xw = lcm2 vux xw;
lcm x y = lcm0 x y;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/lcm_1.hs | mit | 6,102 | 0 | 11 | 1,312 | 3,090 | 1,611 | 1,479 | 159 | 1 |
module Dama.Error (Error(Error, UnknownError)) where
import Dama.Location
data Error = UnknownError | Error Location String
deriving Show
instance Monoid Error where
mempty = UnknownError
Error la sa `mappend` Error lb sb
| lb > la = Error lb sb
| otherwise = Error la sa
Error l s `mappend` _ = Error l s
_ `mappend` Error l s = Error l s
_ `mappend` _ = UnknownError
| tysonzero/dama | src/Dama/Error.hs | mit | 412 | 0 | 9 | 111 | 163 | 84 | 79 | 14 | 0 |
{-
The MIT License (MIT)
Copyright (c) 2015 Mário Feroldi Filho
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Token where
import Data.List
import ErrorHandler
type Tokens = [Token]
data Token
= CONST Int
| CONSTF Float
| DECLARE
| IDENT String
| STRING String
| CHAR Char
| IMPORT1
| IMPORT String
| RARROW
| LARROW
| PLUS
| MINUS
| MUL
| DIV
| GRTH
| GRTHEQ
| LSTH
| LSTHEQ
| EQUAL
| NOTEQ
| NOT
| MOD
| EXP
| AND
| OR
| TRUE
| FALSE
| ASSIGN
| MATCH
| WITH
| SO
| AS
| COMMA
| EXCLAMATION
| COMPOSITION
| OPCALLFUNCTION String
| CALL1
| CALLALONE String
| COMMENT
| COUNTLIST
| CONCATLIST
| OPENPAREN
| CLOSEPAREN
| OPENLIST
| CLOSELIST
| LAMBDA
| IF
| THEN
| ELSE
| ELSEIF
| ISEITHER
| ISNEITHER
| FOR
| DO
| IN
| WHEN
| FUNCTION
| TAKE
| COLON
| NULLSTRING
| VOIDARGUMENTS
| NULL
| YADAYADA
| SOME
| NONE
| UNWRAP
| VOID
| EOF -- End of line
deriving (Show, Read, Eq)
| thelostt/climbu | src/Token.hs | mit | 2,078 | 0 | 6 | 559 | 262 | 170 | 92 | 74 | 0 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -fdefer-type-errors #-}
#endif
module TupleTest where
import Testing
import Control.Hspl.Internal.Tuple
#if __GLASGOW_HASKELL__ >= 800
import Test.ShouldNotTypecheck
#endif
test = describeModule "Control.Hspl.Internal.Tuple" $ do
describe "Tupleable" $ do
it "should convert non-tuple values to singleton tuple" $ do
mkTuple 'a' `shouldBe` Singleton 'a'
mkTuple True `shouldBe` Singleton True
it "should map tuple values to themselves when the result type is a Many tuple" $ do
mkTuple ('a', True) `shouldBe` Tuple ('a', True)
mkTuple ('a', True, ()) `shouldBe` Tuple ('a', True, ())
mkTuple ('a', True, (), ('a', True)) `shouldBe` Tuple ('a', True, (), ('a', True))
mkTuple ('a', True, (), ('a', True), "foo") `shouldBe`
Tuple ('a', True, (), ('a', True), "foo")
mkTuple ('a', True, (), ('a', True), "foo", 'b') `shouldBe`
Tuple ('a', True, (), ('a', True), "foo", 'b')
mkTuple ('a', True, (), ('a', True), "foo", 'b', False) `shouldBe`
Tuple ('a', True, (), ('a', True), "foo", 'b', False)
it "should map tuple values to nested singleton tuples when the result type is a One tuple" $ do
mkTuple ('a', True) `shouldBe` Singleton ('a', True)
mkTuple ('a', True, ()) `shouldBe` Singleton ('a', True, ())
mkTuple ('a', True, (), ('a', True)) `shouldBe` Singleton ('a', True, (), ('a', True))
mkTuple ('a', True, (), ('a', True), "foo") `shouldBe`
Singleton ('a', True, (), ('a', True), "foo")
mkTuple ('a', True, (), ('a', True), "foo", 'b') `shouldBe`
Singleton ('a', True, (), ('a', True), "foo", 'b')
mkTuple ('a', True, (), ('a', True), "foo", 'b', False) `shouldBe`
Singleton ('a', True, (), ('a', True), "foo", 'b', False)
it "should not create Many tuples from tuples of the wrong type" $ do
#if __GLASGOW_HASKELL__ >= 800
shouldNotTypecheck (mkTuple 'a' :: Tuple (Char, Char) Many)
shouldNotTypecheck (mkTuple ('a', True) :: Tuple (Char, Char) Many)
shouldNotTypecheck (mkTuple ('a', 'b', 'c') :: Tuple (Char, Char) Many)
shouldNotTypecheck (mkTuple ('a', 'b') :: Tuple (Char, Char, Char) Many)
#else
pendingWith "ShouldNotTypecheck tests require GHC >= 8.0"
#endif
describe "TupleCons" $ do
it "should prepend a value to a tuple" $ do
tcons 'a' True `shouldBe` ('a', True)
tcons 'a' (True, ()) `shouldBe` ('a', True, ())
tcons 'a' (True, (), ('a', True)) `shouldBe` ('a', True, (), ('a', True))
tcons 'a' (True, (), ('a', True), "foo") `shouldBe` ('a', True, (), ('a', True), "foo")
tcons 'a' (True, (), ('a', True), "foo", 'b') `shouldBe`
('a', True, (), ('a', True), "foo", 'b')
tcons 'a' (True, (), ('a', True), "foo", 'b', False) `shouldBe`
('a', True, (), ('a', True), "foo", 'b', False)
it "should get the head of a tuple" $ do
thead ('a', True) `shouldBe` 'a'
thead ('a', True, ()) `shouldBe` 'a'
thead ('a', True, (), ('a', True)) `shouldBe` 'a'
thead ('a', True, (), ('a', True), "foo") `shouldBe` 'a'
thead ('a', True, (), ('a', True), "foo", 'b') `shouldBe` 'a'
thead ('a', True, (), ('a', True), "foo", 'b', False) `shouldBe` 'a'
it "should get the tail of a tuple" $ do
ttail ('a', True) `shouldBe` True
ttail ('a', True, ()) `shouldBe` (True, ())
ttail ('a', True, (), ('a', True)) `shouldBe` (True, (), ('a', True))
ttail ('a', True, (), ('a', True), "foo") `shouldBe` (True, (), ('a', True), "foo")
ttail ('a', True, (), ('a', True), "foo", 'b') `shouldBe` (True, (), ('a', True), "foo", 'b')
ttail ('a', True, (), ('a', True), "foo", 'b', False) `shouldBe`
(True, (), ('a', True), "foo", 'b', False)
| jbearer/hspl | test/TupleTest.hs | mit | 3,821 | 0 | 17 | 871 | 1,689 | 991 | 698 | 56 | 1 |
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_HADDOCK prune #-}
-- | Biegunka.Source.Tar - functions to work with [.tar, .tar.gz, .tar.bz2] archives as sources
module Control.Biegunka.Source.Tar
( -- * Source layer
tar, tar_
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip (decompress)
import qualified Codec.Compression.BZip as BZip (decompress)
import Data.ByteString.Lazy (ByteString)
import System.FilePath (takeExtension)
import Control.Biegunka.Language
import Control.Biegunka.Script (Script, sourced)
import Control.Biegunka.Source.Archive (update)
-- | Download and extract tar archive (possibly with compression)
-- from the given url to specified path. Also executes attached script
--
-- > tar "https://example.com/archive.tar.gz" "tar/archive" $ do
-- > register "some/not/so/long/path"
-- > link "important.file" ".config"
--
-- * download and extract archive from https:\/\/example.com\/archive.tar.gz to ${HOME}\/tar\/archive
--
-- * link ${HOME}\/tar\/archive to ${HOME}\/some\/not\/so\/long\/path
--
-- * link ${HOME}\/tar\/archive\/important.file to ${HOME}\/.config
tar :: String -> FilePath -> Script Actions () -> Script Sources ()
tar url path script = sourced "tar" url path script (updateTar url)
-- | Download and extract tar archive (possibly with compression)
-- from the given url to specified path.
--
-- > tar_ "https://example.com/archive.tar.gz" "tar/archive"
--
-- * download and extract archive from https:\/\/example.com\/archive.tar.gz to ${HOME}\/tar\/archive
tar_ :: String -> FilePath -> Script Sources ()
tar_ url path = tar url path $ return ()
updateTar :: String -> FilePath -> IO ()
updateTar url path = update url path (Tar.unpack path . Tar.read . decompress url)
decompress :: String -> ByteString -> ByteString
decompress url = case takeExtension url of
".gz" -> GZip.decompress
".bz2" -> BZip.decompress
_ -> id
| biegunka/biegunka-archive | src/Control/Biegunka/Source/Tar.hs | mit | 1,957 | 0 | 10 | 305 | 338 | 196 | 142 | 24 | 3 |
module Pregame.Either
( module X
, maybeToLeft
, maybeToRight
) where
import Data.Either.Combinators as X
( leftToMaybe
, rightToMaybe
)
import Pregame.Base
maybeToLeft :: b -> Maybe a -> Either a b
maybeToLeft _ (Just x) = Left x
maybeToLeft y Nothing = Right y
maybeToRight :: b -> Maybe a -> Either b a
maybeToRight _ (Just x) = Right x
maybeToRight y Nothing = Left y
| jxv/pregame | src/Pregame/Either.hs | mit | 392 | 0 | 7 | 88 | 145 | 76 | 69 | 14 | 1 |
module Main where
import ConstantPool
import Method
import Attribute
import Field
--import Text.Pretty.Simple
import Text.Show.Pretty
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as B
import Data.Binary.Get
import Data.Word
import qualified Data.ByteString.UTF8 as UTF8
import Numeric
import System.Environment
import Debug.Trace
import ConstantPool
data Hex32 = Hex32 Word32
instance Show Hex32 where
show (Hex32 x) = "0x" ++ (showHex x "")
data ClassFile = ClassFile {
magic :: Hex32, -- CAFE BABE
minor :: Word16,
major :: Word16,
constantPool :: [Constant],
c_access_flags :: Word16,
this_class :: Word16,
super_class :: Word16,
interfaces :: [Word16],
fields :: [Field],
methods :: [Method],
cl_attributes :: [Attribute]
} deriving Show
readJCF = do
magic <- getWord32be
minor <- getWord16be
major <- getWord16be
constant_pool_count <- getWord16be
constant_pool <- getConstantPool (fromIntegral constant_pool_count)
access_flags <- getWord16be
this_class <- getWord16be
super_class <- getWord16be
interfaces_count <- getWord16be
interfaces <- getInterfaces (fromIntegral interfaces_count)
fields_count <- getWord16be
fields <- getFields (fromIntegral fields_count)
methods_count <- getWord16be
methods <- getMethods (fromIntegral methods_count)
attributes_count <- getWord16be
attributes <- getAttributes (fromIntegral attributes_count)
return (ClassFile (Hex32 magic) minor major
constant_pool
access_flags
this_class
super_class
interfaces
fields
methods
attributes)
readFiles [] = return ()
readFiles (f:files) = do
input <- BL.readFile f
pPrint $ runGet readJCF input
readFiles files
main = do
filePaths <- getArgs
readFiles filePaths
| cwgreene/javaclassreader | src/Main.hs | mit | 1,953 | 0 | 11 | 484 | 491 | 261 | 230 | 65 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module System.Mesos.Resources where
import Control.Lens
import Data.ByteString (ByteString)
import Data.List (find, foldl', groupBy)
import Data.Word
import System.Mesos.Types
newtype Resources = Resources { fromResources :: [Resource] }
deriving (Eq, Show)
instance Wrapped Resources where
type Unwrapped Resources = [Resource]
_Wrapped' = iso fromResources Resources
instance (t ~ Resources) => Rewrapped Resources t
instance Ord Resources where
sub <= super = foldl' go True lgs
where
lgs = groupBy (\x y -> resourceName x == resourceName y) $ fromResources sub
rgs = groupBy (\x y -> resourceName x == resourceName y) $ fromResources super
go b res = case find (\x -> (res ^? traverse . to resourceName) == (x ^? traverse . to resourceName)) rgs of
Nothing -> False
Just gs -> b && (sumOf (traverse . value . scalar) res <= sumOf (traverse . value . scalar) gs)
{-
class HasResources a where
resources :: Lens' a [Resource]
instance HasResources SlaveInfo where
resources = lens slaveInfoResources (\s rs -> s { slaveInfoResources = rs })
instance HasResources ExecutorInfo where
resources = lens executorInfoResources (\e rs -> e { executorInfoResources = rs })
instance HasResources Request where
resources = lens reqResources (\r rs -> r { reqResources = rs })
instance HasResources Offer where
resources = lens offerResources (\o rs -> o { offerResources = rs })
instance HasResources TaskInfo where
resources = lens taskResources (\t rs -> t { taskResources = rs })
-}
value :: Lens' Resource Value
value = lens resourceValue $ \r v -> r { resourceValue = v }
scalar :: Prism' Value Double
scalar = prism' Scalar $ \x -> case x of
Scalar d -> Just d
_ -> Nothing
ranges :: Prism' Value [(Word64, Word64)]
ranges = prism' Ranges $ \x -> case x of
Ranges rs -> Just rs
_ -> Nothing
set :: Prism' Value [ByteString]
set = prism' Set $ \x -> case x of
Set bs -> Just bs
_ -> Nothing
text :: Prism' Value ByteString
text = prism' Text $ \x -> case x of
Text t -> Just t
_ -> Nothing
cpus :: Prism' Resource Double
cpus = prism' (\x -> Resource "cpus" (Scalar x) (Just "*")) $ \r ->
if resourceName r == "cpus"
then r ^? value . scalar
else Nothing
mem :: Prism' Resource Double
mem = prism' (\x -> Resource "mem" (Scalar x) (Just "*")) $ \r ->
if resourceName r == "mem"
then r ^? value . scalar
else Nothing
disk :: Prism' Resource Double
disk = prism' (\x -> Resource "disk" (Scalar x) (Just "*")) $ \r ->
if resourceName r == "disk"
then r ^? value . scalar
else Nothing
ports :: Prism' Resource [(Word64, Word64)]
ports = prism' (\x -> Resource "ports" (Ranges x) (Just "*")) $ \r ->
if resourceName r == "ports"
then r ^? value . ranges
else Nothing
flattened :: Getter [Resource] [Resource]
flattened = undefined
{-
extract
find
get
getAll
parse
parse'
isValid
isAllocatable
isZero
-}
| jhedev/hs-mesos | src/System/Mesos/Resources.hs | mit | 3,411 | 0 | 17 | 987 | 930 | 488 | 442 | -1 | -1 |
import Text.Parsec
import Text.Parsec.String
roman :: Parser Int
roman = do
x <- many (char 'X')
v <- many (char 'V')
i <- many (char 'I')
eof
return $ (length x * 10) + (length v * 5) + length i
main = print $ parse roman "fail" "XVII"
| Russell91/roman | 04.hs | mit | 259 | 0 | 12 | 71 | 130 | 62 | 68 | 10 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
module Data.Makefile.Parse.Internal where
import Control.Monad
import Data.Foldable
import Data.Attoparsec.Text
import Data.Makefile
import Data.Monoid
import Control.Applicative
import qualified Data.Attoparsec.Text as Atto
import qualified Data.Text as T
import qualified Data.Text.IO as T
-- $setup
-- >>> :set -XOverloadedStrings
-- | Parse makefile.
--
-- Tries to open and parse a file name @Makefile@ in the current directory.
parseMakefile :: IO (Either String Makefile)
parseMakefile = Atto.parseOnly makefile <$> T.readFile "Makefile"
-- | Parse the specified file as a makefile.
parseAsMakefile :: FilePath -> IO (Either String Makefile)
parseAsMakefile f = Atto.parseOnly makefile <$> T.readFile f
parseMakefileContents :: T.Text -> Either String Makefile
parseMakefileContents = Atto.parseOnly makefile
-- | Similar to 'Atto.parseOnly' but fails if all input has not been consumed.
parseAll :: Parser a -> T.Text -> Either String a
parseAll p = Atto.parseOnly (p <* Atto.endOfInput)
--------------------------------------------------------------------------------
-- Parsers
-- | Parser for a makefile
makefile :: Parser Makefile
makefile = Makefile <$> many' entry
-- | Parser for a makefile entry (either a rule or a variable assignment)
entry :: Parser Entry
entry = assignment <|> rule <|> otherLine
-- | Parser of variable assignment (see 'Assignment'). Note that leading and
-- trailing whitespaces will be stripped both from the variable name and
-- assigned value.
--
-- Note that this tries to follow GNU make's (crazy) behavior when it comes to
-- variable names and assignment operators.
--
-- >>> parseAll assignment "foo = bar "
-- Right (Assignment RecursiveAssign "foo" "bar")
--
-- >>> parseAll assignment "foo := bar "
-- Right (Assignment SimpleAssign "foo" "bar")
--
-- >>> parseAll assignment "foo ::= bar "
-- Right (Assignment SimplePosixAssign "foo" "bar")
--
-- >>> parseAll assignment "foo?= bar "
-- Right (Assignment ConditionalAssign "foo" "bar")
--
-- >>> parseAll assignment "foo??= bar "
-- Right (Assignment ConditionalAssign "foo?" "bar")
--
-- >>> parseAll assignment "foo!?!= bar "
-- Right (Assignment ShellAssign "foo!?" "bar")
assignment :: Parser Entry
assignment = do
varName <- variableName
assType <- assignmentType
varVal <- toEscapedLineEnd
return (Assignment assType varName varVal)
-- | Read chars while some ('Parser', monadic) predicate is 'True'.
--
-- XXX: extremely inefficient.
takeWhileM :: (Char -> Parser Bool) -> Parser T.Text
takeWhileM a = (T.pack . reverse) <$> go []
where
go cs = do
c <- Atto.anyChar
True <- a c
go (c:cs) <|> pure (c:cs)
-- | Parse a variable name, not consuming any of the assignment operator. See
-- also 'assignment'.
--
-- >>> Atto.parseOnly variableName "foo!?!= bar "
-- Right "foo!?"
variableName :: Parser T.Text
variableName = stripped $ takeWhileM go
where
go '+' = Atto.peekChar' >>= \case
'=' -> return False
_c -> return True
go '?' = Atto.peekChar' >>= \case
'=' -> return False
_c -> return True
go '!' = Atto.peekChar' >>= \case
'=' -> return False
_c -> return True
-- those chars are not allowed in variable names
go ':' = return False
go '#' = return False
go '=' = return False
go (Atto.isEndOfLine -> True) = return False
go _c = return True
-- | Parse an assignment type, not consuming any of the assigned value. See
-- also 'assignment'.
--
-- >>> Atto.parseOnly assignmentType "!= bar "
-- Right ShellAssign
assignmentType :: Parser AssignmentType
assignmentType =
("=" *> pure RecursiveAssign)
<|> ("+=" *> pure AppendAssign)
<|> ("?=" *> pure ConditionalAssign)
<|> ("!=" *> pure ShellAssign)
<|> (":=" *> pure SimpleAssign)
<|> ("::=" *> pure SimplePosixAssign)
-- | Parser for an entire rule
rule :: Parser Entry
rule =
Rule
<$> target
<*> (many' dependency <* (Atto.takeWhile (not.Atto.isEndOfLine) <* endOfLine'))
<*> many' command
-- | Succeeds on 'Atto.endOfLine' (line end) or if the end of input is reached.
endOfLine' :: Parser ()
endOfLine' =
Atto.endOfLine <|> (Atto.atEnd >>= check)
where
check True = pure ()
check False = mzero
-- | Parser for a command
command :: Parser Command
command = Command <$> recipeLine
recipeLine :: Parser T.Text
recipeLine =
Atto.char '\t' *> recipeLineContents ""
where
recipeLineContents pre = do
cur <- Atto.takeWhile $ \c ->
c /= '\\' && not (Atto.isEndOfLine c)
asum
[ -- Multi-line
Atto.char '\\'
*> Atto.endOfLine
*> (void (Atto.char '\t') <|> pure ())
*> recipeLineContents (pre <> cur <> "\\\n")
, -- Just EOL or EOF
endOfLine' *> pure (pre <> cur)
, -- It was just a backslash within a recipe line, we're not doing
-- anything particular
Atto.char '\\' *> recipeLineContents (pre <> cur <> "\\")
]
-- | Parser for a (rule) target
target :: Parser Target
target = Target <$> (go $ stripped (Atto.takeWhile (/= ':') <* Atto.char ':'))
where
-- takes care of some makefile target quirks
go :: Parser a -> Parser a
go p =
Atto.takeWhile (liftA2 (||) (== ' ') (== '\t'))
*> (Atto.peekChar >>= \case
Just '#' -> mzero
Just '\n' -> mzero
_ -> p)
-- | Parser for a (rule) dependency
dependency :: Parser Dependency
dependency = Dependency <$> (sameLine <|> newLine)
where
sameLine =
Atto.takeWhile (== ' ')
*> Atto.takeWhile1 (`notElem` [' ', '\n', '#', '\\'])
newLine =
Atto.takeWhile (== ' ')
*> Atto.char '\\'
*> Atto.char '\n'
*> (sameLine <|> newLine)
-- | Catch all, used for
-- * comments, empty lines
-- * lines that failed to parse
--
-- >>> parseAll otherLine "# I AM A COMMENT\n"
-- Right (OtherLine "# I AM A COMMENT")
--
-- Ensure all 'Entry's consume the end of line:
-- >>> parseAll otherLine "\n"
-- Right (OtherLine "")
--
otherLine :: Parser Entry
otherLine = OtherLine <$> go
where
go = asum
[ -- Typical case of empty line
Atto.endOfLine *> pure ""
, -- Either a line of spaces and/or comment, or a line that we failed to
-- parse
Atto.takeWhile1 (not . Atto.isEndOfLine) <* Atto.endOfLine
]
toLineEnd :: Parser T.Text
toLineEnd = Atto.takeWhile (`notElem` ['\n', '#'])
-- | Get the contents until the end of the (potentially multi) line. Multiple
-- lines are separated by a @\\@ char and individual lines will be stripped and
-- spaces will be interspersed.
--
-- The final @\n@ character is consumed.
--
-- >>> Atto.parseOnly toEscapedLineEnd "foo bar \\\n baz"
-- Right "foo bar baz"
--
-- >>> Atto.parseOnly toEscapedLineEnd "foo \t\\\n bar \\\n baz \\\n \t"
-- Right "foo bar baz"
toEscapedLineEnd :: Parser T.Text
toEscapedLineEnd = (T.unwords . filter (not . T.null)) <$> go
where
go = do
l <- toLineEnd <* (void (Atto.char '\n') <|> pure ())
case T.stripSuffix "\\" l of
Nothing -> return [T.strip l]
Just l' -> (T.strip l':) <$> go
-------------------------------------------------------------------------------
-- Helpers
-------------------------------------------------------------------------------
stripped :: Parser T.Text -> Parser T.Text
stripped = fmap T.strip
| nmattia/mask | src/Data/Makefile/Parse/Internal.hs | mit | 7,577 | 1 | 18 | 1,731 | 1,598 | 857 | 741 | 124 | 11 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-property-redshift-clusterparametergroup-parameter.html
module Stratosphere.ResourceProperties.RedshiftClusterParameterGroupParameter where
import Stratosphere.ResourceImports
-- | Full data type definition for RedshiftClusterParameterGroupParameter. See
-- 'redshiftClusterParameterGroupParameter' for a more convenient
-- constructor.
data RedshiftClusterParameterGroupParameter =
RedshiftClusterParameterGroupParameter
{ _redshiftClusterParameterGroupParameterParameterName :: Val Text
, _redshiftClusterParameterGroupParameterParameterValue :: Val Text
} deriving (Show, Eq)
instance ToJSON RedshiftClusterParameterGroupParameter where
toJSON RedshiftClusterParameterGroupParameter{..} =
object $
catMaybes
[ (Just . ("ParameterName",) . toJSON) _redshiftClusterParameterGroupParameterParameterName
, (Just . ("ParameterValue",) . toJSON) _redshiftClusterParameterGroupParameterParameterValue
]
-- | Constructor for 'RedshiftClusterParameterGroupParameter' containing
-- required fields as arguments.
redshiftClusterParameterGroupParameter
:: Val Text -- ^ 'rcpgpParameterName'
-> Val Text -- ^ 'rcpgpParameterValue'
-> RedshiftClusterParameterGroupParameter
redshiftClusterParameterGroupParameter parameterNamearg parameterValuearg =
RedshiftClusterParameterGroupParameter
{ _redshiftClusterParameterGroupParameterParameterName = parameterNamearg
, _redshiftClusterParameterGroupParameterParameterValue = parameterValuearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-property-redshift-clusterparametergroup-parameter.html#cfn-redshift-clusterparametergroup-parameter-parametername
rcpgpParameterName :: Lens' RedshiftClusterParameterGroupParameter (Val Text)
rcpgpParameterName = lens _redshiftClusterParameterGroupParameterParameterName (\s a -> s { _redshiftClusterParameterGroupParameterParameterName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-property-redshift-clusterparametergroup-parameter.html#cfn-redshift-clusterparametergroup-parameter-parametervalue
rcpgpParameterValue :: Lens' RedshiftClusterParameterGroupParameter (Val Text)
rcpgpParameterValue = lens _redshiftClusterParameterGroupParameterParameterValue (\s a -> s { _redshiftClusterParameterGroupParameterParameterValue = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/RedshiftClusterParameterGroupParameter.hs | mit | 2,515 | 0 | 13 | 221 | 266 | 152 | 114 | 29 | 1 |
{- Textures.hs; Mun Hon Cheong (mhch295@cse.unsw.edu.au) 2005
This module is for loading textures
-}
module Textures where
import Graphics.UI.GLUT
import TGA (readTga)
import Data.Word (Word8)
import Foreign.Marshal.Alloc (free)
import Control.Exception (catch)
import Control.Exception.Base (SomeException)
-- read a list of images and returns a list of textures
-- all images are assumed to be in the TGA image format
getAndCreateTextures :: [String] -> IO [Maybe TextureObject]
getAndCreateTextures fileNames = do
fileNamesExts <- return (map (("tga/" ++) . (++ ".tga")) fileNames)
texData <- mapM readImageC fileNamesExts
texObjs <- mapM createTexture texData
return texObjs
-- read a single texture
getAndCreateTexture :: String -> IO (Maybe TextureObject)
getAndCreateTexture fileName = do
texData <- readImageC ("tga/" ++ fileName ++ ".tga")
texObj <- createTexture texData
return texObj
-- read the image data
readImageC :: String -> IO (Maybe (Size, PixelData Word8))
readImageC path = catch (readTga path) (\e -> let _ = (e :: SomeException) in do
putStrLn $ "missing texture: "++path
return Nothing)
-- creates the texture
createTexture :: (Maybe (Size, PixelData a)) -> IO (Maybe TextureObject)
createTexture (Just ((Size x y), pixels@(PixelData _ _ ptr))) = do
[texName] <- genObjectNames 1 -- generate our texture.
--rowAlignment Unpack $= 1
textureBinding Texture2D $= Just texName -- make our new texture the current texture.
--generateMipmap Texture2D $= Enabled
build2DMipmaps Texture2D RGBA' (fromIntegral x) (fromIntegral y) pixels
textureFilter Texture2D $= ((Linear', Just Nearest), Linear')
--textureWrapMode Texture2D S $= (Repeated, Repeat)
--textureWrapMode Texture2D T $= (Repeated, Repeat)
textureFunction $= Modulate
free ptr
return (Just texName)
createTexture Nothing = return Nothing
| elitak/frag | src/Textures.hs | gpl-2.0 | 2,037 | 0 | 13 | 473 | 503 | 258 | 245 | 32 | 1 |
{-| Implementation of the Ganeti configuration database.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Config
( LinkIpMap
, NdParamObject(..)
, loadConfig
, getNodeInstances
, getNodeRole
, getNodeNdParams
, getDefaultNicLink
, getDefaultHypervisor
, getInstancesIpByLink
, getMasterCandidates
, getNode
, getInstance
, getGroup
, getGroupNdParams
, getGroupIpolicy
, getGroupDiskParams
, getGroupNodes
, getGroupInstances
, getGroupOfNode
, getInstPrimaryNode
, getInstMinorsForNode
, getInstAllNodes
, getFilledInstHvParams
, getFilledInstBeParams
, getFilledInstOsParams
, getNetwork
, buildLinkIpInstnameMap
, instNodes
) where
import Control.Monad (liftM)
import Control.Monad.IO.Class (liftIO)
import Data.List (foldl', nub)
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Text.JSON as J
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Types
-- | Type alias for the link and ip map.
type LinkIpMap = M.Map String (M.Map String String)
-- | Type class denoting objects which have node parameters.
class NdParamObject a where
getNdParamsOf :: ConfigData -> a -> Maybe FilledNDParams
-- | Reads the config file.
readConfig :: FilePath -> IO (Result String)
readConfig = runResultT . liftIO . readFile
-- | Parses the configuration file.
parseConfig :: String -> Result ConfigData
parseConfig = fromJResult "parsing configuration" . J.decodeStrict
-- | Wrapper over 'readConfig' and 'parseConfig'.
loadConfig :: FilePath -> IO (Result ConfigData)
loadConfig = fmap (>>= parseConfig) . readConfig
-- * Query functions
-- | Computes the nodes covered by a disk.
computeDiskNodes :: Disk -> S.Set String
computeDiskNodes dsk =
case diskLogicalId dsk of
LIDDrbd8 nodeA nodeB _ _ _ _ -> S.fromList [nodeA, nodeB]
_ -> S.empty
-- | Computes all disk-related nodes of an instance. For non-DRBD,
-- this will be empty, for DRBD it will contain both the primary and
-- the secondaries.
instDiskNodes :: Instance -> S.Set String
instDiskNodes = S.unions . map computeDiskNodes . instDisks
-- | Computes all nodes of an instance.
instNodes :: Instance -> S.Set String
instNodes inst = instPrimaryNode inst `S.insert` instDiskNodes inst
-- | Computes the secondary nodes of an instance. Since this is valid
-- only for DRBD, we call directly 'instDiskNodes', skipping over the
-- extra primary insert.
instSecondaryNodes :: Instance -> S.Set String
instSecondaryNodes inst =
instPrimaryNode inst `S.delete` instDiskNodes inst
-- | Get instances of a given node.
-- The node is specified through its UUID.
getNodeInstances :: ConfigData -> String -> ([Instance], [Instance])
getNodeInstances cfg nname =
let all_inst = M.elems . fromContainer . configInstances $ cfg
pri_inst = filter ((== nname) . instPrimaryNode) all_inst
sec_inst = filter ((nname `S.member`) . instSecondaryNodes) all_inst
in (pri_inst, sec_inst)
-- | Computes the role of a node.
getNodeRole :: ConfigData -> Node -> NodeRole
getNodeRole cfg node
| nodeUuid node == clusterMasterNode (configCluster cfg) = NRMaster
| nodeMasterCandidate node = NRCandidate
| nodeDrained node = NRDrained
| nodeOffline node = NROffline
| otherwise = NRRegular
-- | Get the list of master candidates.
getMasterCandidates :: ConfigData -> [Node]
getMasterCandidates cfg =
filter ((==) NRCandidate . getNodeRole cfg)
(map snd . M.toList . fromContainer . configNodes $ cfg)
-- | Returns the default cluster link.
getDefaultNicLink :: ConfigData -> String
getDefaultNicLink =
nicpLink . (M.! C.ppDefault) . fromContainer .
clusterNicparams . configCluster
-- | Returns the default cluster hypervisor.
getDefaultHypervisor :: ConfigData -> Hypervisor
getDefaultHypervisor cfg =
case clusterEnabledHypervisors $ configCluster cfg of
-- FIXME: this case shouldn't happen (configuration broken), but
-- for now we handle it here because we're not authoritative for
-- the config
[] -> XenPvm
x:_ -> x
-- | Returns instances of a given link.
getInstancesIpByLink :: LinkIpMap -> String -> [String]
getInstancesIpByLink linkipmap link =
M.keys $ M.findWithDefault M.empty link linkipmap
-- | Generic lookup function that converts from a possible abbreviated
-- name to a full name.
getItem :: String -> String -> M.Map String a -> ErrorResult a
getItem kind name allitems = do
let lresult = lookupName (M.keys allitems) name
err msg = Bad $ OpPrereqError (kind ++ " name " ++ name ++ " " ++ msg)
ECodeNoEnt
fullname <- case lrMatchPriority lresult of
PartialMatch -> Ok $ lrContent lresult
ExactMatch -> Ok $ lrContent lresult
MultipleMatch -> err "has multiple matches"
FailMatch -> err "not found"
maybe (err "not found after successfull match?!") Ok $
M.lookup fullname allitems
-- | Looks up a node by name or uuid.
getNode :: ConfigData -> String -> ErrorResult Node
getNode cfg name =
let nodes = fromContainer (configNodes cfg)
in case getItem "Node" name nodes of
-- if not found by uuid, we need to look it up by name
Ok node -> Ok node
Bad _ -> let by_name = M.mapKeys
(nodeName . (M.!) nodes) nodes
in getItem "Node" name by_name
-- | Looks up an instance by name or uuid.
getInstance :: ConfigData -> String -> ErrorResult Instance
getInstance cfg name =
let instances = fromContainer (configInstances cfg)
in case getItem "Instance" name instances of
-- if not found by uuid, we need to look it up by name
Ok inst -> Ok inst
Bad _ -> let by_name = M.mapKeys
(instName . (M.!) instances) instances
in getItem "Instance" name by_name
-- | Looks up a node group by name or uuid.
getGroup :: ConfigData -> String -> ErrorResult NodeGroup
getGroup cfg name =
let groups = fromContainer (configNodegroups cfg)
in case getItem "NodeGroup" name groups of
-- if not found by uuid, we need to look it up by name, slow
Ok grp -> Ok grp
Bad _ -> let by_name = M.mapKeys
(groupName . (M.!) groups) groups
in getItem "NodeGroup" name by_name
-- | Computes a node group's node params.
getGroupNdParams :: ConfigData -> NodeGroup -> FilledNDParams
getGroupNdParams cfg ng =
fillNDParams (clusterNdparams $ configCluster cfg) (groupNdparams ng)
-- | Computes a node group's ipolicy.
getGroupIpolicy :: ConfigData -> NodeGroup -> FilledIPolicy
getGroupIpolicy cfg ng =
fillIPolicy (clusterIpolicy $ configCluster cfg) (groupIpolicy ng)
-- | Computes a group\'s (merged) disk params.
getGroupDiskParams :: ConfigData -> NodeGroup -> DiskParams
getGroupDiskParams cfg ng =
GenericContainer $
fillDict (fromContainer . clusterDiskparams $ configCluster cfg)
(fromContainer $ groupDiskparams ng) []
-- | Get nodes of a given node group.
getGroupNodes :: ConfigData -> String -> [Node]
getGroupNodes cfg gname =
let all_nodes = M.elems . fromContainer . configNodes $ cfg in
filter ((==gname) . nodeGroup) all_nodes
-- | Get (primary, secondary) instances of a given node group.
getGroupInstances :: ConfigData -> String -> ([Instance], [Instance])
getGroupInstances cfg gname =
let gnodes = map nodeUuid (getGroupNodes cfg gname)
ginsts = map (getNodeInstances cfg) gnodes in
(concatMap fst ginsts, concatMap snd ginsts)
-- | Looks up a network. If looking up by uuid fails, we look up
-- by name.
getNetwork :: ConfigData -> String -> ErrorResult Network
getNetwork cfg name =
let networks = fromContainer (configNetworks cfg)
in case getItem "Network" name networks of
Ok net -> Ok net
Bad _ -> let by_name = M.mapKeys
(fromNonEmpty . networkName . (M.!) networks)
networks
in getItem "Network" name by_name
-- | Retrieves the instance hypervisor params, missing values filled with
-- cluster defaults.
getFilledInstHvParams :: [String] -> ConfigData -> Instance -> HvParams
getFilledInstHvParams globals cfg inst =
-- First get the defaults of the parent
let hvName = hypervisorToRaw . instHypervisor $ inst
hvParamMap = fromContainer . clusterHvparams $ configCluster cfg
parentHvParams = maybe M.empty fromContainer $ M.lookup hvName hvParamMap
-- Then the os defaults for the given hypervisor
osName = instOs inst
osParamMap = fromContainer . clusterOsHvp $ configCluster cfg
osHvParamMap = maybe M.empty fromContainer $ M.lookup osName osParamMap
osHvParams = maybe M.empty fromContainer $ M.lookup hvName osHvParamMap
-- Then the child
childHvParams = fromContainer . instHvparams $ inst
-- Helper function
fillFn con val = fillDict con val globals
in GenericContainer $ fillFn (fillFn parentHvParams osHvParams) childHvParams
-- | Retrieves the instance backend params, missing values filled with cluster
-- defaults.
getFilledInstBeParams :: ConfigData -> Instance -> ErrorResult FilledBeParams
getFilledInstBeParams cfg inst = do
let beParamMap = fromContainer . clusterBeparams . configCluster $ cfg
parentParams <- getItem "FilledBeParams" C.ppDefault beParamMap
return $ fillBeParams parentParams (instBeparams inst)
-- | Retrieves the instance os params, missing values filled with cluster
-- defaults.
getFilledInstOsParams :: ConfigData -> Instance -> OsParams
getFilledInstOsParams cfg inst =
let osLookupName = takeWhile (/= '+') (instOs inst)
osParamMap = fromContainer . clusterOsparams $ configCluster cfg
childOsParams = instOsparams inst
in case getItem "OsParams" osLookupName osParamMap of
Ok parentOsParams -> GenericContainer $
fillDict (fromContainer parentOsParams)
(fromContainer childOsParams) []
Bad _ -> childOsParams
-- | Looks up an instance's primary node.
getInstPrimaryNode :: ConfigData -> String -> ErrorResult Node
getInstPrimaryNode cfg name =
liftM instPrimaryNode (getInstance cfg name) >>= getNode cfg
-- | Retrieves all nodes hosting a DRBD disk
getDrbdDiskNodes :: ConfigData -> Disk -> [Node]
getDrbdDiskNodes cfg disk =
let retrieved = case diskLogicalId disk of
LIDDrbd8 nodeA nodeB _ _ _ _ ->
justOk [getNode cfg nodeA, getNode cfg nodeB]
_ -> []
in retrieved ++ concatMap (getDrbdDiskNodes cfg) (diskChildren disk)
-- | Retrieves all the nodes of the instance.
--
-- As instances not using DRBD can be sent as a parameter as well,
-- the primary node has to be appended to the results.
getInstAllNodes :: ConfigData -> String -> ErrorResult [Node]
getInstAllNodes cfg name = do
inst <- getInstance cfg name
let diskNodes = concatMap (getDrbdDiskNodes cfg) $ instDisks inst
pNode <- getInstPrimaryNode cfg name
return . nub $ pNode:diskNodes
-- | Filters DRBD minors for a given node.
getDrbdMinorsForNode :: String -> Disk -> [(Int, String)]
getDrbdMinorsForNode node disk =
let child_minors = concatMap (getDrbdMinorsForNode node) (diskChildren disk)
this_minors =
case diskLogicalId disk of
LIDDrbd8 nodeA nodeB _ minorA minorB _
| nodeA == node -> [(minorA, nodeB)]
| nodeB == node -> [(minorB, nodeA)]
_ -> []
in this_minors ++ child_minors
-- | String for primary role.
rolePrimary :: String
rolePrimary = "primary"
-- | String for secondary role.
roleSecondary :: String
roleSecondary = "secondary"
-- | Gets the list of DRBD minors for an instance that are related to
-- a given node.
getInstMinorsForNode :: String -> Instance
-> [(String, Int, String, String, String, String)]
getInstMinorsForNode node inst =
let role = if node == instPrimaryNode inst
then rolePrimary
else roleSecondary
iname = instName inst
-- FIXME: the disk/ build there is hack-ish; unify this in a
-- separate place, or reuse the iv_name (but that is deprecated on
-- the Python side)
in concatMap (\(idx, dsk) ->
[(node, minor, iname, "disk/" ++ show idx, role, peer)
| (minor, peer) <- getDrbdMinorsForNode node dsk]) .
zip [(0::Int)..] . instDisks $ inst
-- | Builds link -> ip -> instname map.
--
-- TODO: improve this by splitting it into multiple independent functions:
--
-- * abstract the \"fetch instance with filled params\" functionality
--
-- * abstsract the [instance] -> [(nic, instance_name)] part
--
-- * etc.
buildLinkIpInstnameMap :: ConfigData -> LinkIpMap
buildLinkIpInstnameMap cfg =
let cluster = configCluster cfg
instances = M.elems . fromContainer . configInstances $ cfg
defparams = (M.!) (fromContainer $ clusterNicparams cluster) C.ppDefault
nics = concatMap (\i -> [(instName i, nic) | nic <- instNics i])
instances
in foldl' (\accum (iname, nic) ->
let pparams = nicNicparams nic
fparams = fillNicParams defparams pparams
link = nicpLink fparams
in case nicIp nic of
Nothing -> accum
Just ip -> let oldipmap = M.findWithDefault M.empty
link accum
newipmap = M.insert ip iname oldipmap
in M.insert link newipmap accum
) M.empty nics
-- | Returns a node's group, with optional failure if we can't find it
-- (configuration corrupt).
getGroupOfNode :: ConfigData -> Node -> Maybe NodeGroup
getGroupOfNode cfg node =
M.lookup (nodeGroup node) (fromContainer . configNodegroups $ cfg)
-- | Returns a node's ndparams, filled.
getNodeNdParams :: ConfigData -> Node -> Maybe FilledNDParams
getNodeNdParams cfg node = do
group <- getGroupOfNode cfg node
let gparams = getGroupNdParams cfg group
return $ fillNDParams gparams (nodeNdparams node)
instance NdParamObject Node where
getNdParamsOf = getNodeNdParams
instance NdParamObject NodeGroup where
getNdParamsOf cfg = Just . getGroupNdParams cfg
instance NdParamObject Cluster where
getNdParamsOf _ = Just . clusterNdparams
| badp/ganeti | src/Ganeti/Config.hs | gpl-2.0 | 15,196 | 0 | 20 | 3,519 | 3,338 | 1,728 | 1,610 | 256 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Metainfo.InterfaceCollector
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | This modulle extracts information from .hi files for installed packages
--
-------------------------------------------------------------------------------
module IDE.Metainfo.InterfaceCollector (
collectPackageFromHI
, extractExportedDescrH
, extractExportedDescrR
) where
import Prelude ()
import Prelude.Compat
import Module hiding (ModuleName)
import qualified Module (ModuleName)
import qualified Maybes as M
import DynFlags (DynFlags)
import HscTypes
import GhcMonad hiding (liftIO)
import qualified GhcMonad as Hs (liftIO)
#if MIN_VERSION_ghc(8,0,0)
import Avail
import TysWiredIn ( )
#else
import Avail
import TysWiredIn ( eqTyConName )
#endif
import LoadIface
import Outputable hiding(trace)
import IfaceSyn
import FastString
import Name
import PrelNames
import PackageConfig (PackageConfig)
import TcRnTypes
import Data.Char (isSpace)
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import Data.Set (Set)
#if MIN_VERSION_ghc(8,2,0)
import ToIface (toIfaceTyCon_name)
import FieldLabel (flSelector)
import GHC.PackageDb (exposedModules, hiddenModules)
#else
import GHC.PackageDb (exposedModules, hiddenModules, exposedName)
#endif
import Distribution.Package hiding (PackageId)
import Distribution.ModuleName
import Distribution.Text (simpleParse)
import qualified Data.ByteString.Char8 as BS
import IDE.Core.Serializable ()
import IDE.Core.CTypes
import Data.ByteString.Char8 (ByteString)
import TcRnMonad (initTcRnIf)
import IDE.Utils.GHCUtils
import IDE.Utils.FileUtils (getSysLibDir)
import Control.DeepSeq(deepseq)
import Data.Text (Text)
import qualified Data.Text as T (pack)
import System.Log.Logger (debugM)
import GHC.Stack (HasCallStack)
import IDE.Utils.Project (ProjectKey)
import TysWiredIn
#if MIN_VERSION_ghc(8,2,0)
exposedName :: (Module.ModuleName, Maybe Module.Module) -> Module.ModuleName
exposedName = fst
nameOccName82 :: Name -> OccName
nameOccName82 = nameOccName
#else
nameOccName82 :: OccName -> OccName
nameOccName82 = id
flSelector :: OccName -> OccName
flSelector = id
ifConName :: IfaceConDecl -> OccName
ifConName = ifConOcc
#endif
collectPackageFromHI :: HasCallStack => Maybe ProjectKey -> PackageConfig -> [FilePath] -> IO (Maybe PackageDescr)
collectPackageFromHI mbProject packageConfig dbs = do
let pIdAndKey = getThisPackage packageConfig
debugM "leksah-server" $ "collectPackageFromHI " ++ show (packId pIdAndKey) ++ " " ++ show dbs
getSysLibDir mbProject (Just VERSION_ghc) >>= \case
Nothing -> return Nothing
Just libDir ->
inGhcIO libDir [] [] dbs $ \ dflags -> do
session <- getSession
exportedIfaceInfos <- getIFaceInfos pIdAndKey
(map exposedName $ exposedModules packageConfig) session
hiddenIfaceInfos <- getIFaceInfos pIdAndKey
(hiddenModules packageConfig) session
let pd = extractInfo dflags exportedIfaceInfos hiddenIfaceInfos pIdAndKey
[] -- TODO 6.12 (IPI.depends $ packageConfigToInstalledPackageInfo packageConfig))
deepseq pd (return $ if null exportedIfaceInfos && null hiddenIfaceInfos then Nothing else Just pd)
getIFaceInfos :: PackageIdAndKey -> [Module.ModuleName] -> HscEnv -> Ghc [(ModIface, FilePath)]
getIFaceInfos p modules _session = do
let pid = packId p
#if MIN_VERSION_ghc(8,2,0)
makeMod = mkModule (DefiniteUnitId (DefUnitId (packUnitId p)))
makeInstMod = InstalledModule (if isBase then toInstalledUnitId baseUnitId else packUnitId p)
#elif MIN_VERSION_ghc(8,0,0)
makeMod = mkModule (packUnitId p)
#else
makeMod = mkModule (packKey p)
#endif
isBase = pkgName pid == mkPackageName "base"
ifaces = mapM (\ mn -> findAndReadIface empty
#if MIN_VERSION_ghc(8,2,0)
(makeInstMod mn)
#endif
(if isBase
then mkBaseModule_ mn
else makeMod mn)
False) modules
hscEnv <- getSession
let gblEnv = IfGblEnv { if_rec_types = Nothing, if_doc = empty }
maybes <- Hs.liftIO $ initTcRnIf 'i' hscEnv gblEnv () ifaces
let res = mapMaybe handleErr maybes
return res
where
handleErr (M.Succeeded val) = Just val
handleErr (M.Failed _mess) = Nothing
-------------------------------------------------------------------------
converModuleName :: Module.ModuleName -> ModuleName
converModuleName = fromJust . simpleParse . moduleNameString
extractInfo :: DynFlags -> [(ModIface, FilePath)] -> [(ModIface, FilePath)] -> PackageIdAndKey ->
[PackageIdentifier] -> PackageDescr
extractInfo dflags ifacesExp ifacesHid pid buildDepends =
let allDescrs = concatMap (extractExportedDescrH dflags pid . fst)
(ifacesHid ++ ifacesExp)
mods = map (extractExportedDescrR dflags pid allDescrs . fst) ifacesExp
in PackageDescr {
pdPackage = packId pid
, pdModules = mods
, pdBuildDepends = buildDepends
, pdMbSourcePath = Nothing}
extractExportedDescrH :: DynFlags -> PackageIdAndKey -> ModIface -> [((ModuleName, OccName), Descr)]
extractExportedDescrH dflags pid iface =
let mid = converModuleName . moduleName $ mi_module iface
exportedNames = Set.fromList
$ map nameOccName
$ concatMap availNames
$ mi_exports iface
exportedDecls = filter (\ ifdecl -> nameOccName82 (ifName ifdecl) `Set.member` exportedNames)
(map snd (mi_decls iface))
in concatMap (extractIdentifierDescr dflags pid mid) exportedDecls
extractExportedDescrR :: DynFlags
-> PackageIdAndKey
-> [((ModuleName, OccName), Descr)]
-> ModIface
-> ModuleDescr
extractExportedDescrR dflags pid hidden iface =
let mid = converModuleName . moduleName $ mi_module iface
exportedNames = Set.fromList
$ map (\n -> (converModuleName . moduleName $ nameModule n, nameOccName n))
$ concatMap availNames
$ mi_exports iface
exportedDecls = filter (\ ifdecl -> (converModuleName . moduleName $ mi_module iface, nameOccName82 (ifName ifdecl))
`Set.member` exportedNames)
(map snd (mi_decls iface))
ownDecls = concatMap (extractIdentifierDescr dflags pid mid) exportedDecls
otherDecls = exportedNames `Set.difference` Set.fromList (map fst ownDecls)
reexported = map (Reexported . ReexportedDescr (Just (PM (packId pid) mid)) . snd)
$ filter (\k -> fst k `Set.member` otherDecls) hidden
inst = concatMap (extractInstances dflags (PM (packId pid) mid)) (mi_insts iface)
uses = Map.fromList . catMaybes $ map (extractUsages dflags) (mi_usages iface)
declsWithExp = map withExp ownDecls
withExp (n, Real d) = Real $ d{dscExported' = Set.member n exportedNames}
withExp _ = error "Unexpected Reexported"
in ModuleDescr {
mdModuleId = PM (packId pid) mid
, mdMbSourcePath = Nothing
, mdReferences = uses
, mdIdDescriptions = declsWithExp ++ inst ++ reexported}
extractIdentifierDescr :: DynFlags -> PackageIdAndKey -> ModuleName -> IfaceDecl -> [((ModuleName, OccName), Descr)]
extractIdentifierDescr dflags package mid decl
= let descr = RealDescr{
dscName' = T.pack . unpackFS . occNameFS . nameOccName82 $ ifName decl
, dscMbTypeStr' = Just . BS.pack . unlines . filter (any (not . isSpace)) . lines
. filterExtras . showSDocUnqual dflags $ ppr decl
, dscMbModu' = Just (PM (packId package) mid)
, dscMbLocation' = Nothing
, dscMbComment' = Nothing
, dscTypeHint' = VariableDescr
, dscExported' = True
}
in map ((mid, nameOccName82 $ ifName decl),) $ case decl of
IfaceId{} -> [Real descr]
IfaceData{ifName = name, ifCons = ifCons'}
-> let d = case ifCons' of
IfDataTyCon {}
-> let
fieldNames = concatMap (extractFields dflags) (visibleIfConDecls ifCons')
constructors' = extractConstructors dflags (nameOccName82 name) (visibleIfConDecls ifCons')
in DataDescr constructors' fieldNames
IfNewTyCon {}
-> let
fieldNames = concatMap (extractFields dflags) (visibleIfConDecls ifCons')
constructors' = extractConstructors dflags (nameOccName82 name) (visibleIfConDecls ifCons')
mbField = case fieldNames of
[] -> Nothing
[fn] -> Just fn
_ -> error $ "InterfaceCollector >> extractIdentifierDescr: "
++ "Newtype with more then one field"
constructor = case constructors' of
[c] -> c
_ -> error $ "InterfaceCollector >> extractIdentifierDescr: "
++ "Newtype with not exactly one constructor"
in NewtypeDescr constructor mbField
IfAbstractTyCon {} -> DataDescr [] []
#if !MIN_VERSION_ghc(8,0,0)
IfDataFamTyCon -> DataDescr [] []
#endif
in [Real (descr{dscTypeHint' = d})]
#if MIN_VERSION_ghc(8,2,0)
IfaceClass{ifBody = IfAbstractClass}
-> let
classOpsID = []
superclasses = []
in [Real descr{dscTypeHint' = ClassDescr superclasses classOpsID}]
IfaceClass{ifBody = IfConcreteClass{ifClassCtxt = context, ifSigs = ifSigs'}}
#else
IfaceClass{ifCtxt = context, ifSigs = ifSigs'}
#endif
-> let
classOpsID = map (extractClassOp dflags) ifSigs'
superclasses = extractSuperClassNames context
in [Real descr{dscTypeHint' = ClassDescr superclasses classOpsID}]
IfaceAxiom {}
-> [Real descr]
IfaceSynonym {}
-> [Real $ descr{dscTypeHint' = TypeDescr}]
IfaceFamily {}
-> [Real $ descr{dscTypeHint' = TypeDescr}]
IfacePatSyn {}
-> [Real descr{dscTypeHint' = PatternSynonymDescr}]
extractConstructors :: DynFlags -> OccName -> [IfaceConDecl] -> [SimpleDescr]
extractConstructors dflags name = map (\decl -> SimpleDescr (T.pack . unpackFS $occNameFS (nameOccName82 $ ifConName decl))
(Just (BS.pack $ filterExtras $ showSDocUnqual dflags $
#if MIN_VERSION_ghc(8,4,0)
pprIfaceForAllPart (ifConUserTvBinders decl)
#else
pprIfaceForAllPart (ifConExTvs decl)
#endif
(eq_ctxt decl ++ ifConCtxt decl) (pp_tau decl)))
Nothing Nothing True)
where
pp_tau decl = case map pprParendIfaceType (ifConArgTys decl) ++ [pp_res_ty decl] of
(t:ts) -> fsep (t : map (arrow <+>) ts)
[] -> panic "pp_con_taus"
pp_res_ty _decl = ppr name <+> fsep [] -- TODO figure out what to do here
#if MIN_VERSION_ghc(8,8,0)
eq_ctxt decl = [IfaceTyConApp (toIfaceTyCon_name eqTyConName) (IA_Arg (IfaceTyVar tv) Required (IA_Arg ty Required IA_Nil))
#elif MIN_VERSION_ghc(8,2,0)
eq_ctxt decl = [IfaceTyConApp (toIfaceTyCon_name eqTyConName) (ITC_Vis (IfaceTyVar tv) (ITC_Vis ty ITC_Nil))
#elif MIN_VERSION_ghc(8,0,0)
eq_ctxt decl = [IfaceTyConApp (IfaceTyCon eqTyConName NoIfaceTyConInfo) (ITC_Vis (IfaceTyVar tv) (ITC_Vis ty ITC_Nil))
#else
eq_ctxt decl = [IfaceTyConApp (IfaceTc eqTyConName) (ITC_Type (IfaceTyVar tv) (ITC_Type ty ITC_Nil))
#endif
| (tv,ty) <- ifConEqSpec decl]
extractFields :: DynFlags -> IfaceConDecl -> [SimpleDescr]
extractFields dflags decl = map (\ (n, t) -> SimpleDescr n t Nothing Nothing True)
$ zip (map (extractFieldNames . nameOccName82 . flSelector) (ifConFields decl))
(map (extractType dflags) (ifConArgTys decl))
extractType :: DynFlags -> IfaceType -> Maybe ByteString
extractType dflags it = Just ((BS.pack . filterExtras . showSDocUnqual dflags . ppr) it)
extractFieldNames :: OccName -> Text
extractFieldNames occName' = T.pack . unpackFS $occNameFS occName'
extractClassOp :: DynFlags -> IfaceClassOp -> SimpleDescr
extractClassOp dflags (IfaceClassOp occName' ty _dm) = SimpleDescr (T.pack . unpackFS . occNameFS $ nameOccName82 occName')
(Just (BS.pack $ showSDocUnqual dflags (ppr ty)))
Nothing Nothing True
extractSuperClassNames :: [IfacePredType] -> [Text]
extractSuperClassNames = mapMaybe extractSuperClassName
where
#if !MIN_VERSION_ghc(7,3,0)
extractSuperClassName (IfaceClassP name _) =
Just (unpackFS $occNameFS $ nameOccName name)
#endif
extractSuperClassName _ = Nothing
extractInstances :: DynFlags
-> PackModule
-> IfaceClsInst
-> [Descr]
extractInstances dflags pm ifaceInst =
let className = showSDocUnqual dflags $ ppr $ ifInstCls ifaceInst
dataNames = map (T.pack . showSDocUnqual dflags . ppr) . catMaybes $ ifInstTys ifaceInst
in [Real RealDescr
{ dscName' = T.pack className
, dscMbTypeStr' = Nothing
, dscMbModu' = Just pm
, dscMbLocation' = Nothing
, dscMbComment' = Nothing
, dscTypeHint' = InstanceDescr dataNames
, dscExported' = False}]
extractUsages :: DynFlags -> Usage -> Maybe (ModuleName, Set Text)
extractUsages _ UsagePackageModule {usg_mod = usg_mod'} =
let name = (fromJust . simpleParse . moduleNameString) (moduleName usg_mod')
in Just (name, Set.fromList [])
extractUsages dflags UsageHomeModule {usg_mod_name = usg_mod_name', usg_entities = usg_entities'} =
let name = (fromJust . simpleParse . moduleNameString) usg_mod_name'
ids = map (T.pack . showSDocUnqual dflags . ppr . fst) usg_entities'
in Just (name, Set.fromList ids)
extractUsages _ UsageFile {} = Nothing
#if MIN_VERSION_ghc(8,2,0)
extractUsages _ UsageMergedRequirement {} = Nothing
#endif
filterExtras, filterExtras' :: String -> String
filterExtras ('{':'-':r) = filterExtras' r
filterExtras ('R':'e':'c':'F':'l':'a':'g':r)
= filterExtras (skipNextWord r)
filterExtras ('G':'e':'n':'e':'r':'i':'c':'s':':':r)
= filterExtras (skipNextWord r)
filterExtras ('F':'a':'m':'i':'l':'y':'I':'n':'s':'t':'a':'n':'c':'e':':':r)
= filterExtras (skipNextWord r)
filterExtras (c:r) = c : filterExtras r
filterExtras [] = []
filterExtras' ('-':'}':r) = filterExtras r
filterExtras' (_:r) = filterExtras' r
filterExtras' [] = []
skipNextWord, skipNextWord' :: String -> String
skipNextWord (a:r)
| isSpace a = skipNextWord r
| otherwise = skipNextWord' r
skipNextWord [] = []
skipNextWord'(a:r)
| a == '\n' = r
| isSpace a = a:r
| otherwise = skipNextWord' r
skipNextWord' [] = []
| leksah/leksah-server | src/IDE/Metainfo/InterfaceCollector.hs | gpl-2.0 | 18,090 | 0 | 26 | 6,439 | 4,016 | 2,118 | 1,898 | 267 | 13 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Types where
import ClassyPrelude
import Database.Persist.TH
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Article json
title Text
url Text
downloaded Bool
UniqueUrl url
deriving Show Eq
|]
| sebastianpoeplau/servant-experiment | src/Types.hs | gpl-2.0 | 753 | 0 | 7 | 232 | 48 | 33 | 15 | 15 | 0 |
-- Map declaration as in PFDS Appendix A, p 204
{-# OPTIONS_GHC -XMultiParamTypeClasses #-}
module FiniteMap(FiniteMap(..)) where
-- m: concrete map type, k: key type, a: value type
-- Note: changed the name of the lookup function to "find", to avoid having
-- hide "lookup" in Prelude.
class FiniteMap m k where
empty :: m k a
bind :: k -> a -> m k a -> m k a
find :: k -> m k a -> Maybe a
| fishbee/pfds-haskell | chapter2/FiniteMap.hs | gpl-3.0 | 403 | 0 | 10 | 92 | 87 | 47 | 40 | -1 | -1 |
module Main (
main
) where
main :: IO ()
main = do
print "hi"
| adarqui/Darqios | examples/simple.hs | gpl-3.0 | 65 | 0 | 7 | 18 | 30 | 16 | 14 | 5 | 1 |
module RenamerSpec (spec) where
import Test.Hspec
import Language.Mulang.Ast
import Language.Mulang.Transform.Renamer (rename)
import Language.Mulang.Parsers.JavaScript (js)
spec :: Spec
spec = do
describe "rename" $ do
it "renames empty asts" $ do
(rename (js "")) `shouldBe` None
it "renames single var" $ do
(rename (js "let x = 1")) `shouldBe` (js "let mulang_var_n0 = 1")
it "renames two vars" $ do
(rename (js "let x = 1; let y = 2;")) `shouldBe` (js "let mulang_var_n0 = 1; let mulang_var_n1 = 2;")
it "renames three vars" $ do
(rename (js "let x = 1; let y = 2; let z = 3;")) `shouldBe` (
js "let mulang_var_n0 = 1; let mulang_var_n1 = 2; let mulang_var_n2 = 3;")
it "renames references" $ do
(rename (js "let x = 1; console.log(x * 2)")) `shouldBe` (js "let mulang_var_n0 = 1; console.log(mulang_var_n0 * 2)")
it "renames three vars with references" $ do
(rename (js "let x = 1; let y = 2; let z = x + f(y);")) `shouldBe` (
js "let mulang_var_n0 = 1; let mulang_var_n1 = 2; let mulang_var_n2 = mulang_var_n0 + f(mulang_var_n1);")
it "does not rename unknown references" $ do
(rename (js "console.log(x * 2)")) `shouldBe` (js "console.log(x * 2)")
it "renames generator param" $ do
(rename (js "for (let x of xs) { console.log(x) }")) `shouldBe` (js "for (let mulang_param_n0 of xs) { console.log(mulang_param_n0) }")
it "renames single param" $ do
(rename (js "function f(x) {}")) `shouldBe` (js "function f(mulang_param_n0) {}")
it "renames multiple param" $ do
(rename (js "function f(x, y) {}")) `shouldBe` (js "function f(mulang_param_n0, mulang_param_n1) {}")
it "renames multiple params with references" $ do
(rename (js "function f(x, y) { return x + y }")) `shouldBe` (
js "function f(mulang_param_n0, mulang_param_n1) { return mulang_param_n0 + mulang_param_n1 }")
it "renames multiple params with mixed references" $ do
(rename (js "let y = 0; function f(x) { return x + y }")) `shouldBe` (
js "let mulang_var_n0 = 0; function f(mulang_param_n0) { return mulang_param_n0 + mulang_var_n0 }")
it "resets references renames across multiple computations" $ do
(rename (js "function f(x) { return 2 * x }; function g(x) { return 2 * x };")) `shouldBe` (
js "function f(mulang_param_n0) { return 2 * mulang_param_n0 }; function g(mulang_param_n0) { return 2 * mulang_param_n0 }")
| mumuki/mulang | spec/RenamerSpec.hs | gpl-3.0 | 2,519 | 0 | 17 | 597 | 539 | 270 | 269 | 39 | 1 |
getChar :: () -> IO Char | hmemcpy/milewski-ctfp-pdf | src/content/3.5/code/haskell/snippet30.hs | gpl-3.0 | 24 | 0 | 6 | 5 | 15 | 7 | 8 | 1 | 0 |
{-# OPTIONS -fno-warn-missing-signatures #-}
module Editor.Config(
overlayDocKeys,
quitKeys, undoKeys,
-- cutKeys, pasteKeys,
makeBranchKeys, delBranchKeys,
-- actionKeys,
-- moveToParentKeys,
addParamKeys, delParamKeys,
exprFocusDelegatorKeys)
where
import qualified Graphics.UI.Bottle.EventMap as E
import qualified Graphics.UI.Bottle.Widgets.FocusDelegator as FocusDelegator
group = E.KeyEventType
ctrl = group E.ctrl . E.charKey
alt = group E.alt . E.charKey
simple = group E.noMods
-- pasteKeys = [ctrl 'v']
-- cutKeys = [ctrl 'x']
-- actionKeys = [simple E.KeyEnter]
quitKeys = [ctrl 'q']
undoKeys = [ctrl 'z']
makeBranchKeys = [ctrl 's']
-- moveToParentKeys = [group E.alt E.KeyLeft]
overlayDocKeys = [simple E.KeyF1, alt 'h']
addParamKeys = [alt 'p']
delParamKeys = [alt 'o']
delBranchKeys = [alt 'o']
exprFocusDelegatorKeys = FocusDelegator.Keys {
FocusDelegator.startDelegatingKey = group E.shift E.KeyRight,
FocusDelegator.stopDelegatingKey = group E.shift E.KeyLeft
} | alonho/bottle | examples/Editor/Config.hs | gpl-3.0 | 1,098 | 0 | 8 | 235 | 238 | 140 | 98 | 23 | 1 |
-- generate balanced numbers
-- with max height n
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving (Show, Eq)
p59 :: a -> Int -> [Tree a]
p59 x n = trees !! n where
trees = [Empty] : [Branch x Empty Empty] : zipWith combine (tail trees) trees where
combine t st = [ Branch x l r | (lt, rt) <- [(t,t), (t,st), (st,t)],
l <- lt, r <- rt ]
| yalpul/CENG242 | H99/54-60/p59.hs | gpl-3.0 | 421 | 0 | 13 | 153 | 196 | 107 | 89 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module WebParsing.UTSCParser
(parseUTSC) where
import Network.HTTP
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Match
import Database.Persist.Sqlite
import qualified Data.Text as T
import Database.Tables as Tables
import WebParsing.ParsingHelp
utscCalendarUrl :: String
utscCalendarUrl = "http://www.utsc.utoronto.ca/~registrar/calendars/calendar/"
getDeptList :: [Tag String] -> [String]
getDeptList tags =
let beforeList = dropWhile (/= TagText "\nPrograms and Courses\n") tags
removeUls = dropBetweenAll (== TagOpen "ul" [("class", "circle")]) (== TagClose "ul") beforeList
hrefs = filter (tagOpen (== "a") isHref) removeUls
in takeWhile (/= "Admissions.html") $ map getAttribute hrefs
where
isHref [("href", _)] = True
isHref _ = False
getAttribute (TagOpen _ [(_, link)]) = link
getCalendar :: String -> IO ()
getCalendar str = do
rsp <- simpleHTTP (getRequest (utscCalendarUrl ++ str))
body <- getResponseBody rsp
let tags = filter isntComment $ parseTags (T.pack body)
coursesSoup = takeWhile (/= TagOpen "div" [("id", "pdf_files")]) $ lastH2 tags
courses = map (filter (tagText (\_ -> True))) $ partitions isCourseTitle coursesSoup
course = map processCourseToData courses
print ("parsing: " ++ str)
runSqlite databasePath $ do
runMigration migrateAll
mapM_ insertCourse course
where
isntComment (TagComment _) = False
isntComment _ = True
lastH2 = last . sections (tagOpen (== "h2") (\_ -> True))
isCourseTitle (TagOpen _ attrs) = any (\x -> fst x == "name" && T.length (snd x) == 8) attrs
isCourseTitle _ = False
parseTitleUTSC :: CoursePart -> CoursePart
parseTitleUTSC (tags, course) =
let nme = fromTagText $tags !! 0
ttle = fromTagText $ tags !! 1
in (drop 2 tags, course {name = nme, title = Just ttle})
{----------------------------------------------------------------------------------------
INPUT: a list of tags representing a single course,
OUTPUT: Course 'record' containing course info
----------------------------------------------------------------------------------------}
processCourseToData :: [Tag T.Text] -> Course
processCourseToData tags =
let course = emptyCourse
in snd $ (tags, course) ~:
preProcess -:
parseTitleUTSC -:
parseDescription -:
parsePrerequisite -:
parseCorequisite -:
parseExclusion -:
parseRecommendedPrep -:
parseDistAndBreadth
parseUTSC :: IO ()
parseUTSC = do
rsp <- simpleHTTP (getRequest (utscCalendarUrl ++ "Table_of_Contents.html"))
body <- getResponseBody rsp
let depts = getDeptList $ parseTags body
putStrLn "Parsing UTSC Calendar..."
mapM_ getCalendar depts
| arkon/courseography | hs/WebParsing/UTSCParser.hs | gpl-3.0 | 2,871 | 0 | 17 | 644 | 800 | 415 | 385 | 62 | 3 |
module Chap02.Exercise03 where
import Chap02.Data.Set ()
import Chap02.Data.UnbalancedSet
import Data.Maybe
import Control.Applicative (liftA3, pure, (<$>))
insert :: Ord a => a -> UnbalancedSet a -> UnbalancedSet a
insert x t = fromMaybe t $ BST <$> go x (unBST t)
where
go :: Ord a => a -> BinaryTree a -> Maybe (BinaryTree a)
go y E = Just $ T E y E
go y (T l z r)
| y < z = liftA3 T (go y l) (pure z) (pure r)
| y > z = T l z <$> go y r
| otherwise = Nothing
| stappit/okasaki-pfds | src/Chap02/Exercise03.hs | gpl-3.0 | 505 | 0 | 12 | 142 | 252 | 126 | 126 | 13 | 2 |
import Language.Dockerfile
main = do
ef <- parseFile "./Dockerfile"
print ef
| beijaflor-io/haskell-language-dockerfile | examples/parse.hs | gpl-3.0 | 85 | 0 | 8 | 19 | 27 | 12 | 15 | 4 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
-----------------------------------------------------------------------------
-- |
-- Module : HEP.Parser.LHE.Sanitizer.Action
-- Copyright : (c) 2013 Ian-Woo Kim
--
-- License : GPL-3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-- shuffling events to randomize LHE file
--
-----------------------------------------------------------------------------
module HEP.Parser.LHE.Sanitizer.Action where
import Control.Monad.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Text.IO as TIO
import System.IO
import System.Random.Shuffle
import Text.XML.Conduit.Parse.Util
--
import Data.Conduit.Util.Control
import HEP.Parser.LHE.Conduit
import HEP.Parser.LHE.DecayTop
import HEP.Parser.LHE.Formatter
import HEP.Parser.LHE.Type
--
import HEP.Parser.LHE.Sanitizer.Reconnect
import HEP.Parser.LHE.Sanitizer.Util
--
-- |
shuffle :: FilePath -> FilePath -> IO ()
shuffle ifn ofn =
fileProcInOut ifn ofn $ \(InFile ih) (OutFile oh) -> do
(hdr,evs) <- parseXmlFile ih $ do
header <- textLHEHeader
rs <- parseEvent =$ CL.consume
return (header,rs)
mapM_ (TIO.hPutStr oh) hdr
evs' <- shuffleM evs
mapM_ (hPrintEv oh) evs'
hPutStrLn oh "</LesHouchesEvents>\n\n"
-- | eliminate particular particles of specified PDGIDs.
-- Note : this seems to have bugs when having longer cascades!
eliminate :: [Int] -> FilePath -> FilePath -> IO ()
eliminate pids ifn ofn = (fileProcInOut ifn ofn . preserveHeaderAndProcessEvents) $ \h ->
doBranchE (checkAndFilterOnShell (Just pids)) (elimAction h) (preserveAction h)
-- | replace particular particle of specified PDGIDs with another designated particle
replace :: [(Int,Int)] -> FilePath -> FilePath -> IO ()
replace pids ifn ofn = (fileProcInOut ifn ofn . preserveHeaderAndProcessEvents) $ \h ->
awaitForever $ liftIO . replaceAction h pids
-- | remove all the internal particles
blobize :: FilePath -> FilePath -> IO ()
blobize ifn ofn = (fileProcInOut ifn ofn . preserveHeaderAndProcessEvents) $ \h ->
doBranchE (checkAndFilterOnShell Nothing) (blobAction h) (preserveAction h)
preserveHeaderAndProcessEvents :: (Handle -> Sink LHEventTop (StateT Int IO) ())
-> InFileHandle
-> OutFileHandle
-> IO ()
preserveHeaderAndProcessEvents someAction (InFile ih) (OutFile oh) = do
let iter = do
header <- textLHEHeader
liftIO $ mapM_ (TIO.hPutStr oh) $ header
parseEvent =$ process
process = processinside oh
processinside h = decayTopConduit =$ someAction h
flip runStateT (0::Int) (parseXmlFile ih iter)
hPutStrLn oh "</LesHouchesEvents>\n\n"
-- |
checkAndFilterOnShell :: Maybe [PDGID]
-> LHEventTop
-> Either LHEventTop LHEventTop
-- ^ left is on-shell, right is off-shell
checkAndFilterOnShell mpids (LHEventTop ev pmap dtops) =
let dtops' = filterOnShellFromDecayTop mpids dtops
in if (not.null) dtops'
then Left (LHEventTop ev pmap dtops')
else Right (LHEventTop ev pmap dtops')
replacePDGID :: [(PDGID,PDGID)] -> LHEvent -> LHEvent
replacePDGID pidlst ev@(LHEvent einfo pinfos) =
let pinfos' = map rf pinfos
in LHEvent einfo pinfos'
where rf x = case lookup (idup x) pidlst of
Nothing -> x
Just nid -> x { idup = nid }
filterOnShellFromDecayTop :: Maybe [PDGID] -- ^ Nothing then filter all on-shell intermediates
-> [DecayTop PtlIDInfo]
-> [DecayTop PtlIDInfo]
filterOnShellFromDecayTop mpids lst =
let worker x acc =
case x of
Decay (PIDInfo pid' _, _) -> maybe (x:acc) (\pids->if (pid' `elem` pids) then x:acc else acc) mpids
_ -> acc
in foldr worker [] lst
getPtlID :: DecayTop PtlIDInfo -> PtlID
getPtlID (Decay (pidinfo,_)) = ptlid . ptlinfo $ pidinfo
getPtlID x = error $ "in getPtlID " ++ (show x)
preserveAction :: Handle -> LHEventTop -> IO ()
preserveAction h (LHEventTop ev _pmap _dtops) = do
hPutStrLn h "<event>"
hPutStrLn h (formatLHEvent ev)
hPutStrLn h "</event>"
elimAction :: Handle -> LHEventTop -> IO ()
elimAction h (LHEventTop ev pmap dtops) = do
hPutStrLn h "<event>"
case ev of
LHEvent einfo _ -> do
let newpinfos = cleanUpAll (ev,pmap,dtops)
n = Prelude.length newpinfos
(hPutStrLn h . formatLHEvent) (LHEvent einfo { nup = n } newpinfos)
hPutStrLn h "</event>"
blobAction :: Handle -> LHEventTop -> IO ()
blobAction h t = do
let LHEventTop ev pmap dtops = go t
hPutStrLn h "<event>"
{- case ev of
LHEvent einfo _ -> do
let newpinfos = cleanUpAll (ev,pmap,dtops)
n = Prelude.length newpinfos
(hPutStrLn h . formatLHEvent) (LHEvent einfo { nup = n } newpinfos) -}
(hPutStrLn h . formatLHEvent) ev
hPutStrLn h "</event>"
where go (LHEventTop ev@(LHEvent einfo _) pmap dtops) =
let newpinfos = cleanUpAll (ev,pmap,dtops)
n = Prelude.length newpinfos
nev = LHEvent einfo { nup = n } newpinfos
ndtop = getDecayTop nev
in case checkAndFilterOnShell Nothing ndtop of
Left nndtop -> go nndtop
Right nndtop' -> nndtop'
replaceAction :: Handle -> [(Int,Int)] -> LHEventTop -> IO ()
replaceAction h pids (LHEventTop ev _pmap _dtops) = do
hPutStrLn h "<event>"
let ev' = replacePDGID pids ev
hPutStrLn h (formatLHEvent ev')
hPutStrLn h "</event>"
| wavewave/LHE-sanitizer | src/HEP/Parser/LHE/Sanitizer/Action.hs | gpl-3.0 | 5,847 | 0 | 17 | 1,551 | 1,615 | 828 | 787 | 110 | 3 |
{-
Copyright 2013 Mario Pastorelli (pastorelli.mario@gmail.com)
This file is part of HSProcess.
HSProcess is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HSProcess is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with HSProcess. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE ExtendedDefaultRules,OverloadedStrings #-}
module System.Console.HSProcess.Representable.Test where
import qualified Data.ByteString.Lazy.Char8 as C8
import qualified Test.Framework as TF
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import Test.HUnit
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import System.Console.HSProcess.Representable
import Test.Hspec
spec :: Spec
spec = describe "repr" $ do
it "can convert boolean values" $ do
repr True `shouldBe` ["True"]
repr False `shouldBe` ["False"]
it "can convert char values" $ example $
repr 'c' `shouldBe` ["c"]
it "can convert double values" $ example $
repr (1.1::Double) `shouldBe` ["1.1"]
it "can convert float values" $ example $
repr (1.1::Float) `shouldBe` ["1.1"]
it "can convert int values" $ example $
repr (1::Int) `shouldBe` ["1"]
it "can convert integer values" $ example $
repr (1::Integer) `shouldBe` ["1"]
it "can convert maybe values" $ do
example $ repr (Nothing::Maybe ()) `shouldBe` [""]
example $ repr (Just 1::Maybe Int) `shouldBe` ["1"]
example $ repr (Just (Just True)) `shouldBe` ["True"]
it "can convert unit value" $
repr () `shouldBe` [""]
it "can convert string values" $ do
example $ repr "" `shouldBe` [""]
example $ repr "word" `shouldBe` ["word"]
example $ repr "word word" `shouldBe` ["word word"]
it "can convert tuple values" $ do
example $ repr (1,True) `shouldBe` ["1","True"]
example $ repr ((1,2),False) `shouldBe` ["1 2","False"]
it "can convert list values" $ do
repr ([]::[()]) `shouldBe` []
example $ repr [True] `shouldBe` ["True"]
example $ repr [True,False] `shouldBe` ["True","False"]
example $ repr [Just 1,Nothing] `shouldBe` ["1",""]
example $ repr [[1,2,3],[4,5,6]] `shouldBe` ["1 2 3","4 5 6"]
example $ repr ["w w","w w"] `shouldBe` ["w w","w w"]
example $ repr [["w w"],["w w"]] `shouldBe` ["w w","w w"]
it "can convert map values" $ do
repr (M.empty::Map Bool Bool) `shouldBe` []
example $ repr (M.fromList [(1,2),(3,4)]) `shouldBe` ["1 2","3 4"]
example $ repr ([M.fromList [(1,2),(3,4)]]) `shouldBe` ["1 2\t3 4"]
it "can convert set values" $ do
repr (S.empty::Set Bool) `shouldBe` []
example $ repr (S.fromList [1,2,3,4]) `shouldBe` ["1","2","3","4"]
example $ repr ([S.fromList [1,2]]) `shouldBe` ["1 2"]
| melrief/HSProcess | tests/System/Console/HSProcess/Representable/Test.hs | gpl-3.0 | 3,344 | 0 | 18 | 780 | 1,048 | 581 | 467 | 56 | 1 |
module SeedyPelias where
import Data.Char
import Data.String.Utils
import Pelias
seedyExtract :: [JSONOperation] -> String -> Maybe Value
seedyExtract = optimisedExtract seedyOptimiser
seedyOptimiser :: Optimiser
seedyOptimiser (Index i : ops) json = (reform $ (substr start end) splitJSON, ops)
where
splitJSON = (split "{") json
substr :: Int -> Int -> [a] -> [a]
substr s e = (drop s) . (take e)
reform :: [String] -> String
reform = concat . (map (\x -> '{' : x))
indexItemRange :: Int -> Int
indexItemRange x = (length . takeWhile (< (1 + 2 * x))) bracketMap
bracketMap = scanl (+) 0 $ (map (succ . countStrBalance)) $ (drop 2) $ splitJSON
(start, end) = (indexItemRange i, indexItemRange (i + 1))
seedyOptimiser ops json = (json, ops)
| ivanmoore/seedy | src/SeedyPelias.hs | gpl-3.0 | 815 | 0 | 14 | 199 | 330 | 179 | 151 | 18 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer.PretargetingConfig.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing pretargeting config. This method supports patch
-- semantics.
--
-- /See:/ <https://developers.google.com/ad-exchange/buyer-rest Ad Exchange Buyer API Reference> for @adexchangebuyer.pretargetingConfig.patch@.
module Network.Google.Resource.AdExchangeBuyer.PretargetingConfig.Patch
(
-- * REST Resource
PretargetingConfigPatchResource
-- * Creating a Request
, pretargetingConfigPatch
, PretargetingConfigPatch
-- * Request Lenses
, pcpPayload
, pcpAccountId
, pcpConfigId
) where
import Network.Google.AdExchangeBuyer.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer.pretargetingConfig.patch@ method which the
-- 'PretargetingConfigPatch' request conforms to.
type PretargetingConfigPatchResource =
"adexchangebuyer" :>
"v1.4" :>
"pretargetingconfigs" :>
Capture "accountId" (Textual Int64) :>
Capture "configId" (Textual Int64) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] PretargetingConfig :>
Patch '[JSON] PretargetingConfig
-- | Updates an existing pretargeting config. This method supports patch
-- semantics.
--
-- /See:/ 'pretargetingConfigPatch' smart constructor.
data PretargetingConfigPatch = PretargetingConfigPatch'
{ _pcpPayload :: !PretargetingConfig
, _pcpAccountId :: !(Textual Int64)
, _pcpConfigId :: !(Textual Int64)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PretargetingConfigPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pcpPayload'
--
-- * 'pcpAccountId'
--
-- * 'pcpConfigId'
pretargetingConfigPatch
:: PretargetingConfig -- ^ 'pcpPayload'
-> Int64 -- ^ 'pcpAccountId'
-> Int64 -- ^ 'pcpConfigId'
-> PretargetingConfigPatch
pretargetingConfigPatch pPcpPayload_ pPcpAccountId_ pPcpConfigId_ =
PretargetingConfigPatch'
{ _pcpPayload = pPcpPayload_
, _pcpAccountId = _Coerce # pPcpAccountId_
, _pcpConfigId = _Coerce # pPcpConfigId_
}
-- | Multipart request metadata.
pcpPayload :: Lens' PretargetingConfigPatch PretargetingConfig
pcpPayload
= lens _pcpPayload (\ s a -> s{_pcpPayload = a})
-- | The account id to update the pretargeting config for.
pcpAccountId :: Lens' PretargetingConfigPatch Int64
pcpAccountId
= lens _pcpAccountId (\ s a -> s{_pcpAccountId = a})
. _Coerce
-- | The specific id of the configuration to update.
pcpConfigId :: Lens' PretargetingConfigPatch Int64
pcpConfigId
= lens _pcpConfigId (\ s a -> s{_pcpConfigId = a}) .
_Coerce
instance GoogleRequest PretargetingConfigPatch where
type Rs PretargetingConfigPatch = PretargetingConfig
type Scopes PretargetingConfigPatch =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient PretargetingConfigPatch'{..}
= go _pcpAccountId _pcpConfigId (Just AltJSON)
_pcpPayload
adExchangeBuyerService
where go
= buildClient
(Proxy :: Proxy PretargetingConfigPatchResource)
mempty
| rueshyna/gogol | gogol-adexchange-buyer/gen/Network/Google/Resource/AdExchangeBuyer/PretargetingConfig/Patch.hs | mpl-2.0 | 4,034 | 0 | 14 | 868 | 499 | 295 | 204 | 75 | 1 |
import Data.Array
import Text.Regex.PCRE
langs = ["C","CPP","JAVA","PYTHON","PERL","PHP","RUBY","CSHARP","HASKELL","CLOJURE","BASH","SCALA","ERLANG","CLISP","LUA","BRAINFUCK","JAVASCRIPT","GO","D","OCAML","R","PASCAL","SBCL","DART","GROOVY","OBJECTIVEC"]
validity :: String -> String
validity s = let p = "^\\d{5}\\s+(\\w*)"
m = s =~ p :: MatchArray
sub = (\(i, l) -> take l (drop i s)) (m ! 1)
in if elem sub langs then "VALID" else "INVALID"
main = do
n <- readLn
c <- getContents
putStr $ unlines $ map validity $ take n $ lines c
| itsbruce/hackerrank | alg/regex/hacklang.hs | unlicense | 596 | 0 | 14 | 131 | 237 | 134 | 103 | 12 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
-- |
-- Module : Credentials
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- This module provides a common interface for operating on your shared
-- credentials.
module Credentials
(
-- * Usage
-- $usage
-- * Operations
insert
, select
, delete
, truncate
, revisions
, setup
, teardown
-- * KMS
, KeyId (..)
, defaultKeyId
-- * DynamoDB
, DynamoTable (..)
, defaultTable
-- * Errors
, CredentialError (..)
, AsCredentialError (..)
-- * Types
, Name (..)
, Revision (..)
, Context (..)
, Setup (..)
) where
import Credentials.DynamoDB
import Credentials.Types
-- $usage
-- To use the library, make sure you have met the following prerequisites:
--
-- * You have a master key in KMS. You can create this under Identity and Access
-- Management > Encryption Keys, in the AWS developer console.
--
-- * Your AWS access credentials are available where
-- <https://hackage.haskell.org/package/amazonka amazonka> can find them. This
-- will be automatic if you are running on an EC2 host, otherwise
-- the <https://blogs.aws.amazon.com/security/post/Tx3D6U6WSFGOK2H/A-New-and-Standardized-Way-to-Manage-Credentials-in-the-AWS-SDKs ~\/.aws\/credentials> file, as @AWS_ACCESS_KEY_ID@ and
-- @AWS_SECRET_ACCESS_KEY@ environment variables need to be configured.
--
-- Since all of the credentials operations are constrained by a 'MonadAWS' context,
-- running them is identical to that of [amazonka](https://hackage.haskell.org/package/amazonka),
-- which you will also need to add to your @build-depends@ section of your project's cabal file.
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > import Credentials
-- > import Control.Lens
-- > import Data.ByteString (ByteString)
-- > import Network.AWS
-- > import System.IO
-- >
-- > example :: IO (ByteString, Revision)
-- > example = do
-- > -- A new 'Logger' to replace the default noop logger is created,
-- > -- which will print AWS debug information and errors to stdout.
-- > lgr <- newLogger Debug stdout
-- >
-- > -- A new amazonka 'Env' is created, which auto-discovers the
-- > -- underlying host credentials.
-- > env <- newEnv Frankfurt Discover
-- >
-- > let table = "dynamo-table-name"
-- > key = "kms-key-alias"
-- > name = "credential-name"
-- >
-- > -- We now run the 'AWS' computation with the overriden logger,
-- > -- performing the sequence of credentials operations.
-- > runResourceT . runAWS (env & envLogger .~ lgr) $ do
-- > -- Firstly, we create the DynamoDB table.
-- > -- This is an idempotent operation but since it makes remote API calls,
-- > -- it's recommended you only run this once via the CLI.
-- > Credentials.setup table
-- >
-- > -- Then we insert a credential\'s value, for a given name.
-- > -- Encryption is handled transparently and the resulting revision
-- > -- is returned.
-- > _ <- Credentials.insert key mempty name "a-super-secret-value" table
-- >
-- > -- Selecting the credential by name, and specifying 'Nothing' for the
-- > -- revision results in the latest credential revision being returned.
-- > Credentials.select mempty name Nothing table
| brendanhay/credentials | credentials/src/Credentials.hs | apache-2.0 | 3,741 | 0 | 5 | 910 | 175 | 144 | 31 | 26 | 0 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
-- | A library whose header contains Unicode.
-- Makes sure that the CheckModuleNames program can parse it.
module UnicodeLib(α) where
α = 42
| google/cabal2bazel | bzl/tests/rules/UnicodeLib.hs | apache-2.0 | 734 | 0 | 4 | 126 | 29 | 24 | 5 | 2 | 1 |
--Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
module Duckling.Rules.TA
( defaultRules
, langRules
, localeRules
) where
import Duckling.Dimensions.Types
import Duckling.Locale
import Duckling.Types
import qualified Duckling.Numeral.TA.Rules as Numeral
import qualified Duckling.Ordinal.TA.Rules as Ordinal
defaultRules :: Seal Dimension -> [Rule]
defaultRules = langRules
localeRules :: Region -> Seal Dimension -> [Rule]
localeRules region (Seal (CustomDimension dim)) = dimLocaleRules region dim
localeRules _ _ = []
langRules :: Seal Dimension -> [Rule]
langRules (Seal AmountOfMoney) = []
langRules (Seal CreditCardNumber) = []
langRules (Seal Distance) = []
langRules (Seal Duration) = []
langRules (Seal Numeral) = Numeral.rules
langRules (Seal Email) = []
langRules (Seal Ordinal) = Ordinal.rules
langRules (Seal PhoneNumber) = []
langRules (Seal Quantity) = []
langRules (Seal RegexMatch) = []
langRules (Seal Temperature) = []
langRules (Seal Time) = []
langRules (Seal TimeGrain) = []
langRules (Seal Url) = []
langRules (Seal Volume) = []
langRules (Seal (CustomDimension dim)) = dimLangRules TA dim
| facebookincubator/duckling | Duckling/Rules/TA.hs | bsd-3-clause | 1,307 | 0 | 9 | 199 | 418 | 224 | 194 | 32 | 1 |
module Main () where
import Data.Word
import Data.Bits ((.&.), shiftL, shiftR)
import Data.Char (digitToInt)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Control.Concurrent.STM
import Network.Socket (Socket)
import qualified Network.Socket as S
import Protocol.Peer
import qualified Process.Peer.Supervisor
peerId = "-AT00-123asd213dfrae"
infohash = B.pack
[106, 54, 222, 32, 29, 242, 241, 178, 200, 23, 71, 76, 48, 117, 255, 14, 170, 140, 119, 133]
handshake = Handshake peerId infohash []
run addr = do
let [host, port] = splitWhen (== ':') addr
port1 = (read port :: Word16)
port2 = ((port1 .&. 0x00FF) `shiftL` 8) + ((port1 .&. 0xFF00) `shiftR` 8)
host1 = convert host
sock = S.SockAddrInet (S.PortNum port2) host1
Process.Peer.Supervisor.runPeer sock handshake
convert :: (Integral a) => String -> a
convert host = fromIntegral . collect . map toInt . splitWhen (== '.') $ host
collect :: [Int] -> Int
collect [a4, a3, a2, a1] =
shiftL a1 24 + shiftL a2 16 + shiftL a3 8 + a4
toInt :: String -> Int
toInt s = foldl (\acc c -> acc * 10 + digitToInt c) 0 s
splitWhen :: (Char -> Bool) -> String -> [String]
splitWhen p s
= case dropWhile p s of
"" -> []
s' -> w : splitWhen p s''
where (w, s'') = break p s'
| artems/FlashBit | demo/peer.hs | bsd-3-clause | 1,344 | 0 | 14 | 312 | 551 | 311 | 240 | 35 | 2 |
import MIPS_I
import Elf
import Control.Monad.Trans.Except
import Control.Monad.Trans.Class (lift)
main = do
r <- runExceptT $ do
img <- readElfFile "/media/tmpfs/testsuite/basic_tests/jumpr.elf"
run img
case r of
Right _ -> print "Ok"
Left s -> print $ "Fail: " ++ s
| p12nGH/miss | src/Main.hs | bsd-3-clause | 321 | 0 | 12 | 93 | 94 | 47 | 47 | 11 | 2 |
module Lib.Config
( getConfig
, Config
, getWidth
, getHeight
) where
import Data.Char (digitToInt)
import Data.Maybe (isJust)
import Data.Either
import Control.Applicative ((*>))
import Text.Parsec (many, (<|>), sepBy1, parse)
import Text.Parsec.Char (char, digit)
import Text.Parsec.String (Parser)
import Data.List (foldl')
data KeyVal = Width Int
| Height Int
deriving (Show)
type Config = [KeyVal]
atoi :: [Char] -> Int
atoi a = foldl' (\num x -> num * 10 + (digitToInt x)) 0 a
parseConfig :: Parser Config
parseConfig = parseKeyVal `sepBy1` (char ',')
parseKeyVal :: Parser KeyVal
parseKeyVal = parseWidth <|> parseHeight
parseWidth :: Parser KeyVal
parseWidth = do
char 'w'
char '='
ds <- many digit
return $ Width (atoi ds)
parseHeight :: Parser KeyVal
parseHeight = do
char 'h'
char '='
ds <- many digit
return $ Height (atoi ds)
getConfig :: String -> Either String Config
getConfig input = case parse parseConfig "Config Error!" input of
Left err -> Left $ show err
Right c
| isJust (getWidth c) -> Right c
| isJust (getHeight c) -> Right c
| otherwise -> Left "Either width or height must be supplied"
getWidth :: Config -> Maybe Int
getWidth [] = Nothing
getWidth ((Width w):kvs) = Just w
getWidth (kv:kvs) = getWidth kvs
getHeight :: Config -> Maybe Int
getHeight [] = Nothing
getHeight ((Height h):kvs) = Just h
getHeight (kv:kvs) = getHeight kvs
| edvakf/haskell-thumber | src/Lib/Config.hs | bsd-3-clause | 1,450 | 0 | 13 | 313 | 575 | 296 | 279 | 50 | 2 |
-- | RDP is able to model feedback cycles with shared resources. For
-- demand monitors, a cycle might be:
--
-- monitor >>> bfmap foo >>> bdelay 0.1 >>> demand
--
-- Logically, the above behavior will cycle at 10Hz. Without choke,
-- Sirea would compute it as fast as possible, perhaps at 10kHz, and
-- thus be 9990 cycles ahead by the time one second had passed. That
-- would be an inefficient use of CPU and memory, and risks rework
-- for values speculated several minutes ahead.
--
-- Cyclic feedback can model interactive systems and coordination
-- patterns. However, cycles are expensive to compute and speculate.
-- Due to RDP's support for effects, there is no magical fixpoint.
-- Each full cycle involves at least one 'runStepper' call at each
-- partition involved in the cycle.
--
-- Developers are encouraged to avoid cycles where possible, instead
-- favor animated state models, which compute fixpoints of futures.
-- But cycles cannot be avoided entirely in open systems, so RDP and
-- Sirea must handle them robustly and efficiently. Potential cycles
-- are choked at every resource that might introduce them (state and
-- demand monitors, mostly).
--
-- Choking ensures a sane behavior: we speculate just fractions of a
-- second, and the equilibrium rate for updates is same as logical
-- frequency. The physical computation is not tightly coupled to the
-- logical frequency - e.g. it would run in bursts for frequencies
-- much higher than Sirea's heartbeat rate.
--
-- To improve anticipation, choke uses a heuristic equilibrium such
-- that speculation runs a rough number of cycles ahead. Choke is
-- introduced implicitly at resources that need it, e.g. all demand
-- monitors are choked.
--
-- A second concern regarding cycles is interaction with ln_touch.
--
-- A cycle within a partition is broken across steps. This ensures
-- each step performs a predictable amount of computation before
-- returning, though this does hurt snapshot consistency across the
-- cyclic resource. To keep more computation in the step, cycles are
-- detected within each partition (using ln_cycle) and breaks occur
-- at most once per cycle.
--
-- TODO: consider developing some sort of EqChoke to eliminate false
-- updates, to avoid unnecessary rework.
--
module Sirea.Internal.Choke
( newChoke
) where
import Data.IORef
import Data.Unique
import qualified Data.Set as S
import Control.Monad (unless, when)
import Control.Exception (assert)
import Sirea.Signal
import Sirea.Time
import Sirea.UnsafeLink
import Sirea.Internal.Tuning (tAncient, dtFutureChoke)
import Sirea.Internal.LTypes
import Sirea.Partition
-- Q: When do I clear CycSt?
-- A: I can clear when the model becomes inactive.
data Choke z = Choke
{ ck_link :: !(LnkUp z)
, ck_ident :: !Unique
, ck_cycle :: !(IORef CycSt)
, ck_data :: !(IORef (CKD z))
, ck_psched :: !PSched
}
data CKD z = CKD
{ ckd_stable :: !StableT -- last reported stability
, ckd_expect :: !Bool -- touched by main link
, ckd_flush :: !Bool -- flush active this step?
, ckd_update :: !(UPD z) -- pending update
}
data CycSt = CycUntested | CycTested | CycDetected
data UPD z = Idle | Update !(Sig z) {-# UNPACK #-} !T
ckdZero :: CKD z
ckdZero = CKD (StableT tAncient) False False Idle
newChoke :: PSched -> LnkUp z -> IO (LnkUp z)
newChoke pd lu =
newUnique >>= \ u ->
newIORef CycUntested >>= \ rfCyc ->
newIORef ckdZero >>= \ rfDat ->
let ck = Choke lu u rfCyc rfDat pd in
return (chokeLnk ck)
-- the main choke behavior
chokeLnk :: Choke z -> LnkUp z
chokeLnk ck = LnkUp touch update idle cyc where
touch = chokeTouch ck
cyc = chokeCyc ck
update tS tU su = chokeLinkUpdate ck tS $ applyUpd tU su
idle tS = chokeLinkUpdate ck tS id
-- compose or piggyback updates
applyUpd :: T -> Sig z -> UPD z -> UPD z
applyUpd tU su Idle = Update su tU
applyUpd tU su (Update s0 tU0) =
if (tU > tU0) then Update (s_switch' s0 tU su) tU0
else Update su tU
-- chokeTouch records and reports a touch on the main link.
chokeTouch :: Choke z -> IO ()
chokeTouch ck =
readIORef (ck_data ck) >>= \ ckd ->
unless (ckd_expect ckd) $
let ckd' = ckd { ckd_expect = True } in
writeIORef (ck_data ck) ckd' >>
unless (ckdActive ckd) (chokeInit ck)
ckdActive :: CKD z -> Bool
ckdActive ckd = ckd_expect ckd || ckd_flush ckd
-- flush forces an update to be emitted downstream even when there
-- is no upstream update. This is used to break cycles or deliver
-- the updates on the next step. chokeFlush must be executed during
-- the touch phase.
chokeFlush :: Choke z -> IO ()
chokeFlush ck =
readIORef (ck_data ck) >>= \ ckd ->
unless (ckd_flush ckd) $
let ckd' = ckd { ckd_flush = True } in
writeIORef (ck_data ck) ckd' >>
onUpdPhase (ck_psched ck) (chokeFlushUpdate ck) >>
unless (ckdActive ckd) (chokeInit ck)
-- When choke is activated for any reason, we'll test whether it is
-- part of a cycle, and we'll touch the link to indicate an upcoming
-- update. This happens once per step at most.
--
-- I'd like to explore techniques to minimize the number of cycle
-- tests, i.e. to avoid this 'flood'.
chokeInit :: Choke z -> IO ()
chokeInit ck = tstCyc >> touch where
tstCyc =
readIORef (ck_cycle ck) >>= \ cycSt ->
when (cycUntested cycSt) $
writeIORef (ck_cycle ck) CycTested >>
ln_cycle (ck_link ck) (S.singleton (ck_ident ck))
touch = ln_touch (ck_link ck)
cycUntested :: CycSt -> Bool
cycUntested CycUntested = True
cycUntested _ = False
cycDetected :: CycSt -> Bool
cycDetected CycDetected = True
cycDetected _ = False
-- chokeCyc detects partition-local cycles in a step
--
-- If we're also waiting on an upstream update, we'll need to use
-- flush to break the cycle. We'll assume a cycle.
chokeCyc :: Choke z -> CycleSet -> IO ()
chokeCyc ck ns =
readIORef (ck_cycle ck) >>= \ cycSt ->
unless (cycDetected cycSt) $
if (S.member (ck_ident ck) ns)
then writeIORef (ck_cycle ck) CycDetected >>
chokeFlush ck -- break cycle with flush
else writeIORef (ck_cycle ck) CycTested >>
let ns' = S.insert (ck_ident ck) ns in
ln_cycle (ck_link ck) ns'
-- update initiated by chokeFlush, runs in update phase
chokeFlushUpdate :: Choke z -> IO ()
chokeFlushUpdate ck =
readIORef (ck_data ck) >>= \ ckd ->
readIORef (ck_cycle ck) >>= \ cycSt ->
assert (ckd_flush ckd) $
let ckd' = ckd { ckd_flush = False } in
let bDone = not (ckdActive ckd') in
let bDeliver = bDone || cycDetected cycSt in
writeIORef (ck_data ck) ckd' >>
when bDone (writeIORef (ck_cycle ck) CycUntested) >>
when bDeliver (chokeDeliver ck)
-- main link update or idle
--
-- If we're in a cycle, we need to decide whether to flush update on
-- next step. Otherwise, we should update this step.
chokeLinkUpdate :: Choke z -> StableT -> (UPD z -> UPD z) -> IO ()
chokeLinkUpdate ck tS fn =
readIORef (ck_data ck) >>= \ ckd ->
readIORef (ck_cycle ck) >>= \ cycSt ->
writeIORef (ck_cycle ck) CycUntested >>= \ _ ->
assert (ckd_expect ckd) $ -- should be touched by link
assert (tS >= ckd_stable ckd) $ -- non-decreasing stabiliy
assert (not (cycDetected cycSt && ckd_flush ckd)) $ -- flush runs to break cycle
let upd' = fn (ckd_update ckd) in
let ckd' = ckd { ckd_stable = tS, ckd_expect = False, ckd_update = upd' } in
writeIORef (ck_data ck) ckd' >>
let bWaitForFlush = ckd_flush ckd' in
unless bWaitForFlush $ -- wait for flush to avoid double update
if cycDetected cycSt
then let bSched = (ckd_stable ckd /= tS) || timeToDeliverU tS upd' in
when bSched (onNextStep (ck_psched ck) (chokeFlush ck))
else chokeDeliver ck
timeToDeliverU :: StableT -> UPD z -> Bool
timeToDeliverU _ Idle = False
timeToDeliverU tS (Update _ tU) = timeToDeliver tS tU
timeToDeliver :: StableT -> T -> Bool
timeToDeliver (StableT tS) tU = tU < (tS `addTime` dtFutureChoke)
-- Deliver the standing update. Called by chokeFlushUpdate or chokeLinkUpdate.
-- Deliver should run at most once per step. But it may run before link update
-- when a partition-local cycle is detected.
--
-- The caller is responsible for clearing any cycle information as needed.
chokeDeliver :: Choke z -> IO ()
chokeDeliver ck =
readIORef (ck_data ck) >>= \ ckd ->
assert (not (ckd_flush ckd)) $
let tS = ckd_stable ckd in
let upd = ckd_update ckd in
case upd of
Idle -> ln_idle (ck_link ck) tS
Update su tU ->
if timeToDeliver tS tU
then let ckd' = ckd { ckd_update = Idle } in
ckd' `seq` writeIORef (ck_data ck) ckd' >>
ln_update (ck_link ck) tS tU su
else ln_idle (ck_link ck) tS
| dmbarbour/Sirea | src/Sirea/Internal/Choke.hs | bsd-3-clause | 8,960 | 0 | 28 | 2,106 | 2,024 | 1,052 | 972 | 154 | 3 |
{- |
Module : Network.Silver.Meta
Description : Bittorrent protocol metainfo.
Copyright : (c) Eric Izoita 2017
License : BSD3
Maintainer : ericizoita@gmail.com
Stability : experimental
Portability : portable
This module handles validation and reading of
metainfo files.
-}
module Network.Silver.Meta
( MetaInfo(..)
-- Content Functions
, announce
, PieceList(..)
, pieceList
, InfoHash(..)
, infoHash
-- Decoding
, decodeMetaFile
, decodeMeta
, isFileDict
, isInfoDict
, isMetaInfo
) where
-- Crypto
import Crypto.Hash (Digest, hash)
import Crypto.Hash.Algorithms (SHA1)
-- Binary Data
import Data.ByteString.Base16 (decode)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
-- Containers
import qualified Data.Map.Strict as M
import Data.Map.Strict ((!))
-- Internal
import Network.Silver.BEncode (BVal(..), bDecode, bEncode, key)
-- | A meta info.
newtype MetaInfo =
MetaInfo BVal
deriving (Show, Eq)
-- | A hash of the infodict within a meta info.
newtype InfoHash =
InfoHash ByteString
deriving (Show, Eq)
-- | A list of pieces within a meta info.
newtype PieceList =
PieceList [ByteString]
deriving (Show, Eq)
-- | Extract announce url from MetaInfo.
announce :: MetaInfo -> ByteString
announce (MetaInfo (BDict m)) =
let (BStr uri) = m ! (key "announce")
in uri
announce _ = error "announce of invalid metainfo"
-- | Generate a SHA1 info_hash from MetaInfo.
infoHash :: MetaInfo -> ByteString
infoHash (MetaInfo (BDict m)) =
let sha1 :: ByteString -> Digest SHA1
sha1 = hash
s = bEncode (m ! (key "info"))
(raw, _) = (decode . BS.pack . show . sha1) s
in raw
infoHash _ = error "infoHash of invalid metainfo"
-- | Split a byte string into pieces of length 20.
split20 :: ByteString -> [ByteString]
split20 xs
| xs == BS.empty = []
| otherwise =
let cur = BS.take 20 xs
nxt = BS.drop 20 xs
in cur : split20 nxt
-- | Extract pieces list from MetaInfo.
pieceList :: MetaInfo -> PieceList
pieceList (MetaInfo (BDict m)) =
let (BDict inf) = m ! (key "info")
(BStr pieces) = inf ! (key "pieces")
in PieceList $ split20 pieces
pieceList _ = error "pieceList of invalid metainfo"
-- | Decode and validate MetaInfo from a file.
decodeMetaFile :: String -> IO (Maybe MetaInfo)
decodeMetaFile f = BS.readFile f >>= \bs -> return $ decodeMeta bs
-- | Decode and validate MetaInfo from a ByteString.
decodeMeta :: ByteString -> Maybe MetaInfo
decodeMeta xs =
let check v =
if isMetaInfo v
then Just $ MetaInfo v
else Nothing
in bDecode xs >>= check
-- | Check whether a BVal is a non-empty BStr.
isBStr :: BVal -> Bool
isBStr (BStr s) = BS.length s > 0
isBStr _ = False
-- | Check whether a BVal is a valid FileDict.
isFileDict :: BVal -> Bool
isFileDict (BDict f) =
let l =
case M.lookup (key "length") f of
Just (BInt _) -> True
_ -> False
p =
case M.lookup (key "path") f of
Just (BList []) -> False
Just (BList xs) ->
let items = map isBStr xs
in foldr (&&) True items
_ -> False
in l && p
isFileDict _ = False
-- | Check whether a BVal is a valid InfoDict.
isInfoDict :: BVal -> Bool
isInfoDict (BDict i) =
let n =
case M.lookup (key "name") i of
Just (BStr _) -> True
_ -> False
pl =
case M.lookup (key "piece length") i of
Just (BInt _) -> True
_ -> False
p =
case M.lookup (key "pieces") i of
Just (BStr s) -> (BS.length s) `rem` 20 == 0
_ -> False
l =
case M.lookup (key "length") i of
Just (BInt _) -> True
_ -> False
f =
case M.lookup (key "files") i of
Just (BList ld) ->
let items = map isFileDict ld
in foldr (&&) True items
_ -> False
xor a b = (a || b) && not (a && b)
in n && pl && p && (xor l f)
isInfoDict _ = False
-- | Check whether a BVal is a valid MetaInfo.
isMetaInfo :: BVal -> Bool
isMetaInfo (BDict m) =
let announce' =
case M.lookup (key "announce") m of
Just (BStr _) -> True
_ -> False
info =
case M.lookup (key "info") m of
Just i -> isInfoDict i
_ -> False
in announce' && info
isMetaInfo _ = False
| nytopop/silver | src/Network/Silver/Meta.hs | bsd-3-clause | 4,396 | 0 | 16 | 1,242 | 1,381 | 721 | 660 | 121 | 6 |
{-# LANGUAGE JavaScriptFFI #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecursiveDo #-}
module Main ( main ) where
import JsHs.LikeJS.Class (asJSVal)
import JsHs.Useful
--import JsHs.Nullable (Nullable(..))
import Control.Monad (when)
import Data.Geometry
import Data.Coerce
import qualified Data.Geometry.Transform as T
import System.IO.Unsafe
-- functional reactive programming
import Reactive.Banana.Frameworks
import Reactive.Banana.Combinators
import Reactive.Banana.JsHs
import Program
import Program.Model.Camera
import Program.Model.City
import Program.Model.CityObject
import Program.Model.LuciConnection
import Program.View
import Program.Settings
import Program.Types
-- Events
import qualified Program.Controllers.GeoJSONFileImport as JFI
import qualified Program.Controllers.GUI as GUI
import qualified Program.Controllers.Logging as Logging
main :: IO ()
main = do
-- get program settings
lsettings <- loadSettings
putStrLn "Getting program input settings:"
print lsettings
-- whole document
-- body <- documentBody
-- drawing area
canvas <- getElementById "glcanvas"
-- "import geometry" button converts GeoJSON into internal representation
-- importButton <- getElementById "jsonfileinput"
-- geoJSON updates
(clearGeomHandler, clearFire) <- newAddHandler
(geoJSONImportsHandler, geoJSONImportFire) <- newAddHandler
-- JFI.registerButton geoJSONImportsHandler importButton
-- ground draws and updates
(groundUpdateRequestH, groundUpdateRequestFire) <- newAddHandler
when (profile lsettings == Full) $ do
JFI.registerClearGeometry clearFire
JFI.registerJSONFileImports (geoJSONImportFire . Left)
GUI.registerServiceClear (const $ groundUpdateRequestFire GroundClearRequest)
GUI.registerServiceRun (const $ groundUpdateRequestFire GroundUpdateRequest)
canv <- getCanvasById "glcanvas"
-- view <- initView program canv
-- reactive-banana event network
heh <- elementHandler $ coerce canvas
network <- compile $ mdo
-- initial state of various params
isize <- viewPortSize heh >>= valueB
let icamera = initCamera (realToFrac $ coordX isize)
(realToFrac $ coordY isize)
CState { viewPoint = vector3 (-17.5) (-17) 0
, viewAngles = (0.345, 0.825)
, viewDist = 138 }
-- GeoJSON updates
geoJSONImportE <- fromAddHandler geoJSONImportsHandler
clearGeometryE <- fmap (const ClearingGeometry) <$> fromAddHandler clearGeomHandler
let cityChangeE = unionWith (const id) (CityUpdate . fun <$> geoJSONImportE) (CityErase <$ clearGeometryE)
fun (Left a) = a
fun (Right a) = a
-- canvas events
pointerE <- pointerEvents heh
wheelE <- wheelEvents heh
resizeE <- resizeEvents heh
curPointersB <- curPointers heh
oldPointersB <- downPointers heh
buttonsB' <- buttons heh
ctrlKeyB <- ctrlKey heh
shiftKeyB <- shiftKey heh
let modButtons True True 1 = 4
modButtons True False 1 = 2
modButtons False True 1 = 2
modButtons _ _ b = b
buttonsB = modButtons <$> shiftKeyB <*> ctrlKeyB <*> buttonsB'
coordsB = combinePointers <$> oldPointersB <*> curPointersB
-----------------------
-- program components
-----------------------
-- selection must go first for some reason (otherwise blocked by MVar)
(heldObjIdB, heldObjIdE) <- heldObjectIdBehavior pointerE cameraB (context <$> viewB)
(selObjIdB, selObjIdE) <- selectedObjectIdBehavior pointerE cameraB (context <$> viewB)
let allowCameraMoveB = f <$> selObjIdB <*> heldObjIdB
where
f _ Nothing = True
f Nothing _ = True
f (Just i) (Just j) | j /= i = True
| otherwise = False
-- conrol camera
(resetCamE, resetCamFire) <- newEvent
liftIO $ GUI.registerResetCamera resetCamFire
cameraB <- cameraBehavior icamera
pointerE
wheelE
resizeE
resetCamE
buttonsB
coordsB
allowCameraMoveB
-- object transform applies to any active object
let objectTransformE :: Event (ObjectTransform T.QFTransform CityObject)
objectTransformE = objectTransformEvents pointerE
buttonsB
coordsB
cameraB
settingsB <- stepper lsettings (lsettings{objectScale = Nothing} <$ clearGeometryE)
groundUpdateRequestE <- fromAddHandler groundUpdateRequestH
(colorizePropertyE, colorizePropertyFire) <- newEvent
liftIO $ GUI.registerColorizeProperty colorizePropertyFire
reactimate $ (\mx -> case mx of
Nothing -> colorizePropertyFire Nothing
_ -> return ()
) <$> selObjIdE
-- city
(vsResultsE', vsResultsFire') <- newEvent
(cityChanges, cityB, errsE, motionRecordsE, groundUpdatedE) <- cityBehavior settingsB
selObjIdB
colorizePropertyE
heldObjIdE
objectTransformE
cityChangeE
groundUpdateRequestE
vsResultsE'
-- clear ground when city is updated
reactimate $ groundUpdateRequestFire GroundClearRequest <$ cityChangeE
-- when in full mode, we have a grid. Reset it on object motion!
when (profile lsettings == Full) $
reactimate $ groundUpdateRequestFire GroundClearRequest <$ motionRecordsE
-- show building info on selection
let showInfoA _ Nothing = GUI.showInfo (unsafePerformIO newObj)
showInfoA ci (Just i) = GUI.showInfo . maybe (unsafePerformIO newObj) (shownProps ci . T.unwrap) $ getObject i ci
reactimate $ showInfoA <$> cityB <@> selObjIdE
-- a little bit of logging
reactimate $ mapM_ logText' <$> errsE
-- reactimate $ print <$> motionRecordsE
let programB = initProgram settingsB cameraB cityB
-- render scene
updateE <- updateEvents heh
(wantPictureE, wantPictureFire) <- newEvent
(viewB, pictureE) <- viewBehavior canv wantPictureE resizeE cityChanges updateE vsResultsE programB
-- use luci only in full profile
vsResultsE <- case profile lsettings of
Full -> luciBehavior lsettings geoJSONImportFire cityB groundUpdatedE geoJSONImportE clearGeometryE motionRecordsE
_ -> return never
reactimate $ vsResultsFire' <$> vsResultsE
-- log all actions if there is a place to log to
when (profile lsettings /= ExternalViewer) $
case loggingUrl lsettings of
Nothing -> return ()
Just url -> let onlyNew _ (CityNew _) = Just ()
onlyNew ci (CityUpdate _) = if isEmptyCity ci then Just () else Nothing
onlyNew _ _ = Nothing
onlyUpd ci (CityUpdate fc) = if isEmptyCity ci then Nothing else Just fc
onlyUpd _ _ = Nothing
getNew ci () = (storeCityAsIs ci, fst $ cityTransform ci)
in do
cityNews <- mapEventIO (const $ return ()) $ filterJust $ onlyNew <$> cityB <@> cityChangeE
Logging.logActions url motionRecordsE (getNew <$> cityB <@> cityNews) (filterJust $ onlyUpd <$> cityB <@> cityChangeE)
-- save submission if in edit mode
when (profile lsettings == ExternalEditor) $ do
(submissionE, submissionFire) <- newEvent
liftIO $ GUI.registerSubmit submissionFire
waitForPictureE <- mapEventIO (\f -> wantPictureFire WantPicture >> return f) submissionE
waitForPictureB <- stepper (return (return ())) waitForPictureE
reactimate $ (\s c f p -> let construct Nothing = return ()
construct (Just url) = f (url, storeCityWithProps c, asJSVal p)
in construct $ submitUrl s
) <$> settingsB <*> cityB <*> waitForPictureB <@> pictureE
return ()
actuate network
play heh
putStrLn "Program started."
programIdle
-- load geometry from a link if needed
case scenarioUrl lsettings of
Nothing -> return ()
Just url -> JFI.loadGeoJSONFromLink url (geoJSONImportFire . Left)
| achirkin/ghcjs-modeler | src/Main.hs | bsd-3-clause | 8,969 | 0 | 28 | 2,910 | 1,919 | 957 | 962 | 150 | 18 |
module L1.Tests where
import L1.Grammar
import L1.Parser
import L1.Display
import Test.QuickCheck
main = qc
{- Quickcheck identity tests -}
prop_program_identity :: Program -> Bool
prop_program_identity x = x == (readProg . displayProgram $ x)
qc = quickCheckWith progArgs prop_program_identity
progArgs = Args { replay = Nothing
, maxSuccess = 100
, maxSize = 100
, maxDiscardRatio = 5
, chatty = True
} | mhuesch/scheme_compiler | test/L1/Tests.hs | bsd-3-clause | 492 | 0 | 8 | 150 | 111 | 66 | 45 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Stripe.Charge
( Charge(..)
, ChargeId(..)
, chargeToken
, chargeTokenById
, chargeCustomer
, chargeCustomerById
, chargeRCard
, getCharge
, getCharges
, partialRefund
, partialRefundById
, fullRefund
, fullRefundById
{- Re-Export -}
, Amount(..)
, Count(..)
, Currency(..)
, Description(..)
, Offset(..)
, UTCTime(..)
, StripeConfig(..)
, StripeT(StripeT)
, runStripeT
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (liftM, mzero)
import Control.Monad.Error (MonadIO)
import Data.Aeson (FromJSON (..), Value (..), (.:), (.:?))
import qualified Data.ByteString as B
import qualified Data.Text as T
import Network.HTTP.Types (StdMethod (..))
import Web.Stripe.Card (Card, RequestCard, rCardKV)
import Web.Stripe.Client (StripeConfig (..), StripeRequest (..),
StripeT (..), baseSReq, query, queryData,
runStripeT)
import Web.Stripe.Customer (Customer (..), CustomerId (..))
import Web.Stripe.Token (Token (..), TokenId (..))
import Web.Stripe.Utils (Amount (..), Count (..), Currency (..),
Description (..), Offset (..),
UTCTime (..), fromSeconds, optionalArgs,
showByteString, textToByteString)
----------------
-- Data Types --
----------------
-- | Represents a charge in the Stripe system.
data Charge = Charge
{ chargeId :: ChargeId
, chargeCreated :: UTCTime
, chargeDescription :: Maybe Description
, chargeCurrency :: Currency
, chargeAmount :: Amount
, chargeLive :: Bool
, chargePaid :: Bool
, chargeRefunded :: Bool
, chargeCard :: Card
} deriving Show
-- | Represents the identifier for a given 'Charge' in the Stripe system.
newtype ChargeId = ChargeId { unChargeId :: T.Text } deriving (Show, Eq)
-- | Submit a 'Charge' to the Stripe API using an already constructed 'Token'.
chargeToken :: MonadIO m => Token -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
chargeToken = chargeTokenById . tokId
-- | Submit a 'Charge' to the Stripe API using a 'TokenId'.
chargeTokenById :: MonadIO m => TokenId -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
chargeTokenById (TokenId tid) = charge [("card", textToByteString tid)]
-- | Submit a 'Charge' to the Stripe for a specific 'Customer' that already has
-- payment details on file.
chargeCustomer :: MonadIO m => Customer -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
chargeCustomer = chargeCustomerById . custId
-- | Submit a 'Charge' to the Stripe for a specific 'Customer', identified by
-- its 'CustomerId', that already has payment details on file.
chargeCustomerById :: MonadIO m => CustomerId -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
chargeCustomerById (CustomerId cid) = charge [("customer", textToByteString cid)]
-- | Submit a 'Charge' to the Stripe API using a 'RequestCard' to describe
-- payment details.
chargeRCard :: MonadIO m => RequestCard -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
chargeRCard rc = charge (rCardKV rc)
-- | Internal convenience function to handle actually submitting a 'Charge'
-- request to the Stripe API.
charge :: MonadIO m => [(B.ByteString, B.ByteString)] -> Amount -> Currency
-> Maybe Description -> Maybe Amount -> StripeT m Charge
charge adata a c mcd maf =
snd `liftM` query (chargeRq []) { sMethod = POST, sData = fdata }
where
fdata = optionalArgs odata ++ adata ++ bdata
odata = [ ("description", textToByteString . unDescription <$> mcd)
, ("application_fee", showByteString . unAmount <$> maf)
]
bdata = [ ("amount", showByteString . unAmount $ a)
, ("currency", textToByteString $ unCurrency c)
]
-- | Retrieve a 'Charge' from the Stripe API, identified by 'ChargeId'.
getCharge :: MonadIO m => ChargeId -> StripeT m Charge
getCharge (ChargeId cid) = snd `liftM` query (chargeRq [cid])
-- | Retrieve a list of 'Charge's from the Stripe API. The query can optionally
-- be refined to a specific:
--
-- * number of charges, via 'Count',
-- * page of results, via 'Offset', and
-- * 'Customer'.
getCharges :: MonadIO m => Maybe CustomerId -> Maybe Count -> Maybe Offset
-> StripeT m [Charge]
getCharges mcid mc mo = liftM snd $
queryData ((chargeRq []) { sQString = optionalArgs oqs })
where
oqs = [ ("count", show . unCount <$> mc)
, ("offset", show . unOffset <$> mo)
, ("customer", T.unpack . unCustomerId <$> mcid)
]
-- err = throwError $ strMsg "Unable to parse charge list."
-- | Requests that Stripe issue a partial refund to a specific 'Charge' for a
-- particular 'Amount'.
partialRefund :: MonadIO m => Charge -> Amount -> StripeT m Charge
partialRefund = partialRefundById . chargeId
-- | Requests that Stripe issue a partial refund to a specific 'Charge',
-- identified by 'ChargeId', for a particular 'Amount'.
partialRefundById :: MonadIO m => ChargeId -> Amount -> StripeT m Charge
partialRefundById cid = refundChargeById cid . Just
-- | Requests that Stripe issue a full refund to a specific 'Charge'.
fullRefund :: MonadIO m => Charge -> StripeT m Charge
fullRefund = fullRefundById . chargeId
-- | Requests that Stripe issue a full refund to a specific 'Charge',
-- identified by 'ChargeId'.
fullRefundById :: MonadIO m => ChargeId -> StripeT m Charge
fullRefundById cid = refundChargeById cid Nothing
-- | Internal convenience function used to handle submitting a refund request
-- to Stripe.
refundChargeById :: MonadIO m => ChargeId -> Maybe Amount -> StripeT m Charge
refundChargeById (ChargeId cid) ma =
snd `liftM` query (chargeRq [cid, "refund"]) { sMethod = POST, sData = fd }
where fd = optionalArgs [("amount", showByteString . unAmount <$> ma)]
-- | Convenience function to create a 'StripeRequest' specific to coupon-related
-- actions.
chargeRq :: [T.Text] -> StripeRequest
chargeRq pcs = baseSReq { sDestination = "charges":pcs }
------------------
-- JSON Parsing --
------------------
-- | Attempts to parse JSON into a 'Charge'.
instance FromJSON Charge where
parseJSON (Object v) = Charge
<$> (ChargeId <$> v .: "id")
<*> (fromSeconds <$> v .: "created")
<*> (fmap Description <$> v .:? "description")
<*> (Currency <$> v .: "currency")
<*> (Amount <$> v .: "amount")
<*> v .: "livemode"
<*> v .: "paid"
<*> v .: "refunded"
<*> v .: "card"
parseJSON _ = mzero
| michaelschade/hs-stripe | src/Web/Stripe/Charge.hs | bsd-3-clause | 7,223 | 0 | 21 | 1,958 | 1,639 | 937 | 702 | 115 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Json.Common
-- Copyright : (C) 2017 Irreverent Pixel Feats
-- License : BSD-style (see the file etc/LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Json.Common (
-- * Serialise
jsonBitbucketTime
, jsonDisplayName
, jsonEmailMailingList
, jsonGroupName
, jsonGroupOwner
, jsonGroupSlug
, jsonLanguage
, jsonHasIssues
, jsonHasWiki
, jsonUri
, jsonHref
, jsonRepositoryV1
, jsonScm
, jsonUsername
, jsonUuid
, jsonUser
, jsonUserV1
, jsonUserType
, jsonForkPolicy
, jsonPipelineEnvVarSecurity
, jsonPrivacy
, jsonProjectKey
, jsonSoloProjectKey
, jsonProjectName
, jsonProject
, jsonPublicSSHKey
, jsonPrivateSSHKey
, jsonRepoDescription
, jsonRepoName
, jsonRepoSlug
, jsonWebsite
-- ** Deserialisers
, parseBitbucketTimeJson
, parseDisplayNameJson
, parseEmailMailingListJson
, parseGroupName
, parseGroupOwner
, parseGroupSlug
, parseLanguageJson
, parseHasIssuesJson
, parseHasWikiJson
, parseUriJson
, parseHrefJson
, parseScmJson
, parseUsernameJson
, parseUuidJson
, parseUserTypeJson
, parseUserJson
, parseUserV1
, parseForkPolicyJson
, parsePrivacyJson
, parseProjectKeyJson
, parseSoloProjectKeyJson
, parsePipelineEnvSecurity
, parseProjectName
, parseProjectJson
, parsePublicSSHKey
, parsePrivateSSHKey
, parseRepoDescriptionJson
, parseRepoName
, parseRepoSlug
, parseRepositoryV1
, parseWebSiteJson
) where
import Irreverent.Bitbucket.Core.Data.Common (
BitbucketTime(..)
, DisplayName(..)
, EmailMailingList(..)
, ForkPolicy(..)
, Language(..)
, GroupName(..)
, GroupOwner(..)
, GroupSlug(..)
, HasIssues(..)
, HasWiki(..)
, Href(..)
, PipelinesEnvironmentVariableSecurity(..)
, Privacy(..)
, Project(..)
, ProjectKey(..)
, ProjectName(..)
, PrivateSSHKey(..)
, PublicSSHKey(..)
, RepoDescription(..)
, RepoName(..)
, RepositoryV1(..)
, RepoSlug(..)
, Scm(..)
, Uri(..)
, User(..)
, Username(..)
, UserType(..)
, UserV1(..)
, Uuid(..)
, Website(..)
)
import Ultra.Data.Aeson (
KeyValue
, Parser
, Object
, Value(..)
, (.=)
, (.:)
, (.:?)
, object
, parseJSON
, toJSON
, jsonTextEnum
)
import qualified Ultra.Data.Text as T
import Data.Time.ISO8601 (formatISO8601, parseISO8601)
import Preamble
jsonBitbucketTime :: BitbucketTime -> Value
jsonBitbucketTime (BitbucketTime t) = toJSON . formatISO8601 $ t
parseBitbucketTimeJson :: Value -> Parser BitbucketTime
parseBitbucketTimeJson v = parseJSON v >>=
maybe (fail "expected ISO8601 time") (pure . BitbucketTime) . parseISO8601 . T.unpack
jsonDisplayName :: DisplayName -> Value
jsonDisplayName (DisplayName n) = toJSON n
parseDisplayNameJson :: Value -> Parser DisplayName
parseDisplayNameJson v = DisplayName <$> parseJSON v
jsonEmailMailingList :: EmailMailingList -> Value
jsonEmailMailingList (EmailMailingList mailList) = toJSON mailList
parseEmailMailingListJson :: Value -> Parser EmailMailingList
parseEmailMailingListJson v = EmailMailingList <$> parseJSON v
jsonLanguage :: Language -> Value
jsonLanguage (Language l) = toJSON l
parseLanguageJson :: Value -> Parser Language
parseLanguageJson v = Language <$> parseJSON v
jsonHasIssues :: HasIssues -> Value
jsonHasIssues HasIssues = toJSON True
jsonHasIssues NoIssues = toJSON False
parseHasIssuesJson :: Value -> Parser HasIssues
parseHasIssuesJson v = flip fmap (parseJSON v) $ \case
True -> HasIssues
False -> NoIssues
jsonHasWiki :: HasWiki -> Value
jsonHasWiki HasWiki = toJSON True
jsonHasWiki NoWiki = toJSON False
parseHasWikiJson :: Value -> Parser HasWiki
parseHasWikiJson v = flip fmap (parseJSON v) $ \case
True -> HasWiki
False -> NoWiki
jsonUri :: Uri -> Value
jsonUri (Uri uri) = toJSON uri
parseUriJson :: Value -> Parser Uri
parseUriJson v = Uri <$> parseJSON v
jsonHref :: Href -> Value
jsonHref (Href name url) =
let
base :: (KeyValue a) => [a]
base = pure $ "href" .= jsonUri url
in object . maybe id (\n -> (:) ("name" .= n)) name $ base
parseHrefJson :: Object -> Parser Href
parseHrefJson o = Href
<$> o .:? "name"
<*> (o .: "href" >>= parseUriJson)
jsonScm :: Scm -> Value
jsonScm scm = toJSON $ case scm of
Git -> "git" :: T.Text
Mercurial -> "hg"
parseScmJson :: Value -> Parser Scm
parseScmJson = jsonTextEnum [("git", Git), ("hg", Mercurial)]
jsonUsername :: Username -> Value
jsonUsername (Username n) = toJSON n
parseUsernameJson :: Value -> Parser Username
parseUsernameJson v = Username <$> parseJSON v
jsonUuid :: Uuid -> Value
jsonUuid (Uuid u) = toJSON u
parseUuidJson :: Value -> Parser Uuid
parseUuidJson v = Uuid <$> parseJSON v
jsonUserType :: UserType -> Value
jsonUserType typ = toJSON $ case typ of
TeamUserType -> "team" :: T.Text
UserUserType -> "user"
parseUserTypeJson :: Value -> Parser UserType
parseUserTypeJson = jsonTextEnum [("team", TeamUserType), ("user", UserUserType)]
jsonUser :: User -> Value
jsonUser (User name dname typ uuid avatar html self) = object [
"links" .= object [
"avatar" .= jsonHref avatar
, "html" .= jsonHref html
, "self" .= jsonHref self
]
, "uuid" .= jsonUuid uuid
, "type" .= jsonUserType typ
, "display_name" .= jsonDisplayName dname
, "username" .= jsonUsername name
]
parseUserJson :: Object -> Parser User
parseUserJson o = do
linksObject <- o .: "links"
User
<$> (o .: "username" >>= parseUsernameJson)
<*> (o .: "display_name" >>= parseDisplayNameJson)
<*> (o .: "type" >>= parseUserTypeJson)
<*> (o .: "uuid" >>= parseUuidJson)
<*> (linksObject .: "avatar" >>= parseHrefJson)
<*> (linksObject .: "html" >>= parseHrefJson)
<*> (linksObject .: "self" >>= parseHrefJson)
jsonForkPolicy :: ForkPolicy -> Value
jsonForkPolicy policy = toJSON $ case policy of
NoForksPolicy -> "no_forks" :: T.Text
NoPublicForksPolicy -> "no_public_forks"
ForkAwayPolicy -> "allow_forks"
parseForkPolicyJson :: Value -> Parser ForkPolicy
parseForkPolicyJson = jsonTextEnum [
("no_forks", NoForksPolicy)
, ("no_public_forks", NoPublicForksPolicy)
, ("allow_forks", ForkAwayPolicy)
]
jsonPrivacy :: Privacy -> Value
jsonPrivacy p = toJSON $ case p of
Private -> True
Public -> False
parsePrivacyJson :: Value -> Parser Privacy
parsePrivacyJson v = flip fmap (parseJSON v) $ \case
True -> Private
False -> Public
-- |
-- In some messages, a project key appears in its own field,
-- but not as a string, but as a JSON object with a field "key"
jsonSoloProjectKey :: ProjectKey -> Value
jsonSoloProjectKey (ProjectKey key) = object [
"key" .= key
]
parseSoloProjectKeyJson :: Object -> Parser ProjectKey
parseSoloProjectKeyJson o = ProjectKey <$> o .: "key"
jsonProjectKey :: ProjectKey -> Value
jsonProjectKey (ProjectKey key) = toJSON key
parseProjectKeyJson :: Value -> Parser ProjectKey
parseProjectKeyJson v = ProjectKey <$> parseJSON v
jsonPublicSSHKey :: PublicSSHKey -> Value
jsonPublicSSHKey (PublicSSHKey key) = toJSON key
parsePublicSSHKey :: Value -> Parser PublicSSHKey
parsePublicSSHKey v = PublicSSHKey <$> parseJSON v
jsonPrivateSSHKey :: PrivateSSHKey -> Value
jsonPrivateSSHKey (PrivateSSHKey key) = toJSON key
parsePrivateSSHKey :: Value -> Parser PrivateSSHKey
parsePrivateSSHKey v = PrivateSSHKey <$> parseJSON v
jsonPipelineEnvVarSecurity :: PipelinesEnvironmentVariableSecurity -> Value
jsonPipelineEnvVarSecurity SecuredVariable = toJSON True
jsonPipelineEnvVarSecurity UnsecuredVariable = toJSON False
parsePipelineEnvSecurity :: Value -> Parser PipelinesEnvironmentVariableSecurity
parsePipelineEnvSecurity v = flip fmap (parseJSON v) $ \case
True -> SecuredVariable
False -> UnsecuredVariable
jsonProjectName :: ProjectName -> Value
jsonProjectName (ProjectName name) = toJSON name
parseProjectName :: Value -> Parser ProjectName
parseProjectName v = ProjectName <$> parseJSON v
jsonProject :: Project -> Value
jsonProject (Project name uuid key avatar html self) = object [
"links" .= object [
"avatar" .= jsonHref avatar
, "html" .= jsonHref html
, "self" .= jsonHref self
]
, "name" .= jsonProjectName name
, "uuid" .= jsonUuid uuid
, "type" .= ("project" :: T.Text)
, "key" .= jsonProjectKey key
]
parseProjectJson :: Object -> Parser Project
parseProjectJson o = do
linksObject <- o .: "links"
o .: "type" >>= jsonTextEnum [("project", ())]
Project
<$> (o .: "name" >>= parseProjectName)
<*> (o .: "uuid" >>= parseUuidJson)
<*> (o .: "key" >>= parseProjectKeyJson)
<*> (linksObject .: "avatar" >>= parseHrefJson)
<*> (linksObject .: "html" >>= parseHrefJson)
<*> (linksObject .: "self" >>= parseHrefJson)
jsonRepoDescription :: RepoDescription -> Value
jsonRepoDescription (RepoDescription desc) = toJSON desc
parseRepoDescriptionJson :: Value -> Parser RepoDescription
parseRepoDescriptionJson v = RepoDescription <$> parseJSON v
jsonRepoName :: RepoName -> Value
jsonRepoName (RepoName repoName) = toJSON repoName
parseRepoName :: Value -> Parser RepoName
parseRepoName v = RepoName <$> parseJSON v
jsonGroupOwner :: GroupOwner -> Value
jsonGroupOwner (GroupOwner owner) = toJSON owner
parseGroupOwner :: Value -> Parser GroupOwner
parseGroupOwner = fmap GroupOwner . parseJSON
jsonGroupName :: GroupName -> Value
jsonGroupName (GroupName name) = toJSON name
parseGroupName :: Value -> Parser GroupName
parseGroupName = fmap GroupName . parseJSON
jsonGroupSlug :: GroupSlug -> Value
jsonGroupSlug (GroupSlug s) = toJSON s
parseGroupSlug :: Value -> Parser GroupSlug
parseGroupSlug = fmap GroupSlug . parseJSON
jsonUserV1 :: UserV1 -> Value
jsonUserV1 (UserV1 nm first' last' avatar typ) =
let
isTeam :: Bool
isTeam = case typ of
TeamUserType -> True
UserUserType -> False
in object [
"username" .= jsonUsername nm
, "first_name" .= jsonDisplayName first'
, "last_name" .= jsonDisplayName last'
, "avatar" .= jsonUri avatar
, "is_team" .= isTeam
]
parseUserV1 :: Object -> Parser UserV1
parseUserV1 o =
let
userType' :: Bool -> UserType
userType' True = TeamUserType
userType' False = UserUserType
in UserV1
<$> (o .: "username" >>= parseUsernameJson)
<*> (o .: "first_name" >>= parseDisplayNameJson)
<*> (o .: "last_name" >>= parseDisplayNameJson)
<*> (o .: "avatar" >>= parseUriJson)
<*> (userType' <$> o .: "is_team")
jsonRepositoryV1 :: RepositoryV1 -> Value
jsonRepositoryV1 (RepositoryV1 owner nm slug) = object [
"owner" .= jsonUserV1 owner
, "name" .= jsonRepoName nm
, "slug" .= jsonRepoSlug slug
]
parseRepositoryV1 :: Object -> Parser RepositoryV1
parseRepositoryV1 o = RepositoryV1
<$> (o .: "owner" >>= parseUserV1)
<*> (o .: "name" >>= parseRepoName)
<*> (o .: "slug" >>= parseRepoSlug)
jsonRepoSlug :: RepoSlug -> Value
jsonRepoSlug (RepoSlug slug) = toJSON slug
parseRepoSlug :: Value -> Parser RepoSlug
parseRepoSlug v = RepoSlug <$> parseJSON v
jsonWebsite :: Website -> Value
jsonWebsite (Website website) = toJSON website
parseWebSiteJson :: Value -> Parser Website
parseWebSiteJson v = Website <$> parseJSON v
| irreverent-pixel-feats/bitbucket | bitbucket-json/src/Irreverent/Bitbucket/Json/Common.hs | bsd-3-clause | 11,516 | 0 | 16 | 2,138 | 3,163 | 1,703 | 1,460 | 329 | 3 |
{-# LANGUAGE CPP #-}
module Test.SSH.Internal.Util (sshInternalUtilTests) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import qualified Data.ByteString.Char8 as Char8
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.QuickCheck (choose, elements, listOf, testProperty)
import SSH.Internal.Util
(fromLBS, fromOctets, powersOf, strictLBS, toBase, toLBS, toOctets)
import Test.Util (ArbitraryLBS(..))
toFromLBSTest :: TestTree
toFromLBSTest = testProperty "(toLBS . fromLBS) x == x" $
\(ArbitraryLBS lazyByteString) ->
(toLBS . fromLBS) lazyByteString == lazyByteString
fromToLBSTest :: TestTree
fromToLBSTest = testProperty "(fromLBS . toLBS) x == x" $
\string -> (fromLBS . toLBS) string == string
strictLBSTest :: TestTree
strictLBSTest = testProperty "converting to strict bytestring works" $
\string -> (strictLBS . toLBS) string == Char8.pack string
powersOfTest :: TestTree
powersOfTest = testProperty "powersOf generates powers of a number" $ do
n <- elements $ [-100..(-1)] ++ [1..100]
let powers = powersOf (n::Integer)
-- if n is 3, then this creates a list of tuples like
-- [(1,3), (3, 9), (9, 27), (27, 81), ...]
powersZipped = take 20 $ zip powers $ tail powers
return $ all (\(a, b) -> b `div` a == n) powersZipped
toBaseTest :: TestTree
toBaseTest = testProperty "toBase can be converted back to original number" $ do
base <- choose (2, 1000)
n <- choose (0::Integer, 1000)
let newBase = toBase base n
revNewBase = reverse newBase
powersOfBase = powersOf base
zipped = zipWith (*) powersOfBase revNewBase
originalN = sum zipped
return $ originalN == n
toFromOctetsTest :: TestTree
toFromOctetsTest = testProperty "(toOctets . fromOctets) x == x" $ do
base <- choose (2::Integer, 256)
n <- dropWhile (== 0) <$> listOf (choose (0, (fromIntegral base) - 1))
let from = fromOctets base n :: Integer
to = toOctets base from
return $ to == n
fromToOctetsTest :: TestTree
fromToOctetsTest = testProperty "(fromOctets . toOctets) x == x" $ do
base <- choose (2::Integer, 256)
n <- choose (0, 100000000000000)
let to = toOctets base n
from = fromOctets base to :: Integer
return $ from == n
sshInternalUtilTests :: TestTree
sshInternalUtilTests = testGroup "SSH/Internal/Util.hs tests"
[ fromToLBSTest
, fromToOctetsTest
, strictLBSTest
, powersOfTest
, toBaseTest
, toFromLBSTest
, toFromOctetsTest
]
| cdepillabout/ssh | test/Test/SSH/Internal/Util.hs | bsd-3-clause | 2,541 | 0 | 16 | 547 | 710 | 384 | 326 | 58 | 1 |
module RomanNumbersKata.Day4Spec (spec) where
import Test.Hspec
import RomanNumbersKata.Day4 (toRomanNumber)
spec :: Spec
spec = do
it "returns an empty string when given 0"
(toRomanNumber 0 == "")
it "returns \"I\" when given 1"
(toRomanNumber 1 == "I")
it "returns \"V\" when given 5"
(toRomanNumber 5 == "V")
it "returns \"IV\" when given 4"
(toRomanNumber 4 == "IV")
it "returns \"X\" when given 10"
(toRomanNumber 10 == "X")
it "returns \"XIV\" when given 14"
(toRomanNumber 14 == "XIV")
it "returns \"IX\" when given 9"
(toRomanNumber 9 == "IX")
it "returns \"L\" when given 50"
(toRomanNumber 50 == "L")
it "returns \"XLIX\" when given 49"
(toRomanNumber 49 == "XLIX")
| Alex-Diez/haskell-tdd-kata | old-katas/test/RomanNumbersKata/Day4Spec.hs | bsd-3-clause | 882 | 0 | 10 | 313 | 198 | 94 | 104 | 23 | 1 |
module Main where
import Measurer
import VChanUtil
import Demo3Shared
import Control.Monad
main = do
putStrLn "main of Measurer"
putStrLn "OPENING CHAN"
chan <- server_init attId
forever $ meaLoop chan
putStrLn "CLOSING CHAN"
close chan
return ()
meaLoop :: LibXenVChan -> IO ()
meaLoop chan = do
meaProcess chan | armoredsoftware/protocol | tpm/mainline/measurer/MeasurerMain.hs | bsd-3-clause | 336 | 0 | 8 | 71 | 103 | 47 | 56 | 16 | 1 |
-- | Controller library.
module HL.Controller
(module C)
where
import HL.Foundation
import Yesod as C
type Controller = Handler
| yogsototh/hl | src/HL/Controller.hs | bsd-3-clause | 136 | 0 | 4 | 27 | 29 | 20 | 9 | 5 | 0 |
{-# Language FlexibleInstances #-}
-- | Rendering of Csound files and playing the music in real time.
--
-- How are we going to get the sound out of Haskell code?
-- Instruments are ready and we have written all the scores for them.
-- Now, it's time to use the rendering functions. We can render haskell expressions
-- to Csound code. A rendering function takes a value that represents a sound (it's a tuple of signals)
-- and produces a string with Csound code. It can take a value that represents
-- the flags for the csound compiler and global settings ('Csound.Options').
-- Then we can save this string to file and convert it to sound with csound compiler
--
-- > csound -o music.wav music.csd
--
-- Or we can play it in real time with -odac flag. It sends the sound directly to
-- soundcard. It's usefull when we are using midi or tweek the parameters in real time
-- with sliders or knobs.
--
-- > csound -odac music.csd
--
-- The main function of this module is 'Csound.IO.renderCsdBy'. Other function are nothing but
-- wrappers that produce the Csound code and make something useful with it (saving to file,
-- playing with specific player or in real time).
module Csound.IO (
-- * Rendering
RenderCsd(..),
renderCsd,
writeCsd, writeCsdBy,
writeSnd, writeSndBy,
-- * Playing the sound
playCsd, playCsdBy,
mplayer, mplayerBy, totem, totemBy,
-- * Live performance
dac, dacBy, vdac, vdacBy,
-- * Render and run
csd, csdBy
) where
import System.Process
import qualified Control.Exception as E
import Data.Monoid
import Data.Default
import Csound.Typed
import Csound.Control.Gui
import Csound.Options(setSilent, setDac)
render :: Sigs a => Options -> SE a -> IO String
render = renderOutBy
render_ :: Options -> SE () -> IO String
render_ = renderOutBy_
class RenderCsd a where
renderCsdBy :: Options -> a -> IO String
instance RenderCsd (SE ()) where
renderCsdBy = render_
instance RenderCsd Sig where
renderCsdBy opt a = render opt (return a)
instance RenderCsd Sig2 where
renderCsdBy opt a = render opt (return a)
instance RenderCsd Sig4 where
renderCsdBy opt a = render opt (return a)
instance RenderCsd Sig6 where
renderCsdBy opt a = render opt (return a)
instance RenderCsd Sig8 where
renderCsdBy opt a = render opt (return a)
instance RenderCsd (Sig8, Sig8) where
renderCsdBy opt a = render opt (return a)
instance RenderCsd (Sig8, Sig8, Sig8, Sig8) where
renderCsdBy opt a = render opt (return a)
instance RenderCsd (SE Sig) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE Sig2) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE Sig4) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE Sig6) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE Sig8) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE (Sig8, Sig8)) where
renderCsdBy opt a = render opt a
instance RenderCsd (SE (Sig8, Sig8, Sig8, Sig8)) where
renderCsdBy opt a = render opt a
instance (Sigs a) => RenderCsd (a -> Sig) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> Sig2) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> Sig4) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> Sig6) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> Sig8) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> (Sig8, Sig8)) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a) => RenderCsd (a -> (Sig8, Sig8, Sig8, Sig8)) where
renderCsdBy opt f = renderEffBy opt (return . f)
instance (Sigs a, Sigs b) => RenderCsd (a -> SE b) where
renderCsdBy opt f = renderEffBy opt f
instance RenderCsd (Source Sig) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
return asig
instance RenderCsd (Source Sig2) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
return asig
instance RenderCsd (Source Sig4) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
return asig
instance RenderCsd (Source (SE Sig)) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
asig
instance RenderCsd (Source (SE Sig2)) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
asig
instance RenderCsd (Source (SE Sig4)) where
renderCsdBy opt a = renderCsdBy opt res
where res = do
(gui, asig) <- a
panel gui
asig
instance RenderCsd (Source ()) where
renderCsdBy opt src = renderCsdBy opt $ do
(ui, _) <- src
panel ui
instance RenderCsd (Source (SE ())) where
renderCsdBy opt src = renderCsdBy opt (joinSource src)
-- | Renders Csound file.
renderCsd :: RenderCsd a => a -> IO String
renderCsd = renderCsdBy def
-- | Render Csound file and save it to the give file.
writeCsd :: RenderCsd a => String -> a -> IO ()
writeCsd file a = writeFile file =<< renderCsd a
-- | Render Csound file with options and save it to the give file.
writeCsdBy :: RenderCsd a => Options -> String -> a -> IO ()
writeCsdBy opt file a = writeFile file =<< renderCsdBy opt a
-- | Render Csound file and save result sound to the wav-file.
writeSnd :: RenderCsd a => String -> a -> IO ()
writeSnd = writeSndBy def
-- | Render Csound file with options and save result sound to the wav-file.
writeSndBy :: RenderCsd a => Options -> String -> a -> IO ()
writeSndBy opt file a = do
writeCsdBy opt fileCsd a
runWithUserInterrupt $ "csound -o " ++ file ++ " " ++ fileCsd
where fileCsd = "tmp.csd"
-- | Renders Csound file, saves it to the given file, renders with csound command and plays it with the given program.
--
-- > playCsd program file csd
--
-- Produces files @file.csd@ (with 'Csound.Render.Mix.renderCsd') and @file.wav@ (with @csound@) and then invokes:
--
-- > program "file.wav"
playCsd :: (RenderCsd a) => (String -> IO ()) -> String -> a -> IO ()
playCsd = playCsdBy def
-- | Works just like 'Csound.Render.Mix.playCsd' but you can supply csound options.
playCsdBy :: (RenderCsd a) => Options -> (String -> IO ()) -> String -> a -> IO ()
playCsdBy opt player file a = do
writeCsdBy opt fileCsd a
runWithUserInterrupt $ "csound -o " ++ fileWav ++ " " ++ fileCsd
player fileWav
return ()
where fileCsd = file ++ ".csd"
fileWav = file ++ ".wav"
simplePlayCsdBy :: (RenderCsd a) => Options -> String -> String -> a -> IO ()
simplePlayCsdBy opt player = playCsdBy opt phi
where phi file = do
runWithUserInterrupt $ player ++ " " ++ file
-- | Renders csound code to file @tmp.csd@ with flags set to @-odac@ and @-Ma@
-- (sound output goes to soundcard in real time).
dac :: (RenderCsd a) => a -> IO ()
dac = dacBy def
-- | 'Csound.Base.dac' with options.
dacBy :: (RenderCsd a) => Options -> a -> IO ()
dacBy opt' a = do
writeCsdBy opt "tmp.csd" a
runWithUserInterrupt $ "csound " ++ "tmp.csd"
where opt = opt' <> setDac
-- | Output to dac with virtual midi keyboard.
vdac :: (RenderCsd a) => a -> IO ()
vdac = dacBy (setVirtual def)
-- | Output to dac with virtual midi keyboard with specified options.
vdacBy :: (RenderCsd a) => Options -> a -> IO ()
vdacBy opt = dacBy (setVirtual opt)
setVirtual :: Options -> Options
setVirtual a = a { csdFlags = (csdFlags a) { rtmidi = Just VirtualMidi, midiRT = m { midiDevice = Just "0" } } }
where m = midiRT $ csdFlags a
-- | Renders to file @tmp.csd@ and invokes the csound on it.
csd :: (RenderCsd a) => a -> IO ()
csd = csdBy setSilent
-- | Renders to file @tmp.csd@ and invokes the csound on it.
csdBy :: (RenderCsd a) => Options -> a -> IO ()
csdBy options a = do
writeCsdBy (setSilent <> options) "tmp.csd" a
runWithUserInterrupt $ "csound tmp.csd"
----------------------------------------------------------
-- players
-- | Renders to tmp.csd and tmp.wav and plays with mplayer.
mplayer :: (RenderCsd a) => a -> IO ()
mplayer = mplayerBy def
-- | Renders to tmp.csd and tmp.wav and plays with mplayer.
mplayerBy :: (RenderCsd a) => Options -> a -> IO ()
mplayerBy opt = simplePlayCsdBy opt "mplayer" "tmp"
-- | Renders to tmp.csd and tmp.wav and plays with totem player.
totem :: (RenderCsd a) => a -> IO ()
totem = totemBy def
-- | Renders to tmp.csd and tmp.wav and plays with totem player.
totemBy :: (RenderCsd a) => Options -> a -> IO ()
totemBy opt = simplePlayCsdBy opt "totem" "tmp"
----------------------------------------------------------
-- handle user interrupts
runWithUserInterrupt :: String -> IO ()
runWithUserInterrupt cmd = do
pid <- runCommand cmd
E.catch (waitForProcess pid >> return ()) (onUserInterrupt pid)
where
onUserInterrupt :: ProcessHandle -> E.AsyncException -> IO ()
onUserInterrupt pid x = case x of
E.UserInterrupt -> terminateProcess pid >> E.throw x
e -> E.throw e
| silky/csound-expression | src/Csound/IO.hs | bsd-3-clause | 9,519 | 0 | 12 | 2,338 | 2,677 | 1,376 | 1,301 | 174 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_HADDOCK hide #-}
-- we hide this module from haddock to enforce GHC.Stack as the main
-- access point.
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Stack.Types
-- Copyright : (c) The University of Glasgow 2015
-- License : see libraries/ghc-prim/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- type definitions for call-stacks via implicit parameters.
-- Use "GHC.Stack" from the base package instead of importing this
-- module directly.
--
-----------------------------------------------------------------------------
module GHC.Stack.Types (
-- * Implicit parameter call stacks
CallStack(..), emptyCallStack, freezeCallStack, getCallStack, pushCallStack,
-- * Source locations
SrcLoc(..)
) where
{-
Ideally these would live in GHC.Stack but sadly they can't due to this
import cycle,
Module imports form a cycle:
module ‘Data.Maybe’ (libraries/base/Data/Maybe.hs)
imports ‘GHC.Base’ (libraries/base/GHC/Base.hs)
which imports ‘GHC.Err’ (libraries/base/GHC/Err.hs)
which imports ‘GHC.Stack’ (libraries/base/dist-install/build/GHC/Stack.hs)
which imports ‘GHC.Foreign’ (libraries/base/GHC/Foreign.hs)
which imports ‘Data.Maybe’ (libraries/base/Data/Maybe.hs)
-}
import GHC.Types
-- Make implicit dependency known to build system
import GHC.Tuple ()
import GHC.Integer ()
----------------------------------------------------------------------
-- Explicit call-stacks built via ImplicitParams
----------------------------------------------------------------------
-- | Implicit @CallStack@s are an alternate method of obtaining the call stack
-- at a given point in the program.
--
-- GHC has two built-in rules for solving implicit-parameters of type
-- @CallStack@.
--
-- 1. If the @CallStack@ occurs in a function call, it appends the
-- source location of the call to the @CallStack@ in the environment.
-- 2. @CallStack@s that cannot be solved normally (i.e. unbound
-- occurrences) are defaulted to the empty @CallStack@.
--
-- Otherwise implicit @CallStack@s behave just like ordinary implicit
-- parameters. For example:
--
-- @
-- myerror :: (?callStack :: CallStack) => String -> a
-- myerror msg = error (msg ++ "\n" ++ prettyCallStack ?callStack)
-- @
--
-- Will produce the following when evaluated,
--
-- @
-- ghci> myerror "die"
-- *** Exception: die
-- CallStack (from ImplicitParams):
-- myerror, called at <interactive>:2:1 in interactive:Ghci1
-- @
--
-- @CallStack@s do not interact with the RTS and do not require compilation with
-- @-prof@. On the other hand, as they are built up explicitly using
-- implicit-parameters, they will generally not contain as much information as
-- the simulated call-stacks maintained by the RTS.
--
-- A @CallStack@ is a @[(String, SrcLoc)]@. The @String@ is the name of
-- function that was called, the 'SrcLoc' is the call-site. The list is
-- ordered with the most recently called function at the head.
--
-- @since 4.8.1.0
data CallStack
= EmptyCallStack
| PushCallStack ([Char], SrcLoc) CallStack
| FreezeCallStack CallStack
-- ^ Freeze the stack at the given @CallStack@, preventing any further
-- call-sites from being pushed onto it.
-- See Note [Overview of implicit CallStacks]
-- | Extract a list of call-sites from the 'CallStack'.
--
-- The list is ordered by most recent call.
--
-- @since 4.8.1.0
getCallStack :: CallStack -> [([Char], SrcLoc)]
getCallStack stk = case stk of
EmptyCallStack -> []
PushCallStack cs stk' -> cs : getCallStack stk'
FreezeCallStack stk' -> getCallStack stk'
-- Note [Definition of CallStack]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Implicit CallStacks are defined very early in base because they are
-- used by error and undefined. At this point in the dependency graph,
-- we do not have enough functionality to (conveniently) write a nice
-- pretty-printer for CallStack. The sensible place to define the
-- pretty-printer would be GHC.Stack, which is the main access point,
-- but unfortunately GHC.Stack imports GHC.Exception, which *needs*
-- the pretty-printer. So the CallStack type and functions are split
-- between three modules:
--
-- 1. GHC.Stack.Types: defines the type and *simple* functions
-- 2. GHC.Exception: defines the pretty-printer
-- 3. GHC.Stack: exports everything and acts as the main access point
-- | Push a call-site onto the stack.
--
-- This function has no effect on a frozen 'CallStack'.
--
-- @since 4.9.0.0
pushCallStack :: ([Char], SrcLoc) -> CallStack -> CallStack
pushCallStack cs stk = case stk of
FreezeCallStack _ -> stk
_ -> PushCallStack cs stk
{-# INLINE pushCallStack #-}
-- | The empty 'CallStack'.
--
-- @since 4.9.0.0
emptyCallStack :: CallStack
emptyCallStack = EmptyCallStack
{-# INLINE emptyCallStack #-}
-- | Freeze a call-stack, preventing any further call-sites from being appended.
--
-- prop> pushCallStack callSite (freezeCallStack callStack) = freezeCallStack callStack
--
-- @since 4.9.0.0
freezeCallStack :: CallStack -> CallStack
freezeCallStack stk = FreezeCallStack stk
{-# INLINE freezeCallStack #-}
-- | A single location in the source code.
--
-- @since 4.8.1.0
data SrcLoc = SrcLoc
{ srcLocPackage :: [Char]
, srcLocModule :: [Char]
, srcLocFile :: [Char]
, srcLocStartLine :: Int
, srcLocStartCol :: Int
, srcLocEndLine :: Int
, srcLocEndCol :: Int
}
| gridaphobe/ghc | libraries/base/GHC/Stack/Types.hs | bsd-3-clause | 5,604 | 0 | 9 | 964 | 406 | 279 | 127 | 36 | 3 |
module Main where
import BasicPrelude
import NeuroSpider.Gui
main :: IO ()
main = runGUI
| pavelkogan/NeuroSpider | src/Main.hs | bsd-3-clause | 93 | 0 | 6 | 18 | 27 | 16 | 11 | 5 | 1 |
--
-- HTTP client for use with io-streams
--
-- Copyright © 2012-2013 Operational Dynamics Consulting, Pty Ltd
--
-- The code in this file, and the program it is a part of, is made
-- available to you by its authors as open source software: you can
-- redistribute it and/or modify it under a BSD licence.
--
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -fno-warn-unused-imports #-}
module Snippet where
import Control.Exception (bracket)
import Network.Http.Client
--
-- Otherwise redundent imports, but useful for testing in GHCi.
--
import Blaze.ByteString.Builder (Builder)
import qualified Blaze.ByteString.Builder as Builder
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S
import Debug.Trace
import OpenSSL (withOpenSSL)
import System.Exit (exitSuccess)
import System.IO.Streams (InputStream, OutputStream, stdout)
import qualified System.IO.Streams as Streams
main :: IO ()
main = example3
example1 = withOpenSSL $ do
ctx <- baselineContextSSL
c <- openConnectionSSL ctx "api.github.com" 443
q <- buildRequest $ do
http GET "/users/afcowie/orgs"
setAccept "application/json"
putStr $ show q
sendRequest c q emptyBody
receiveResponse c debugHandler
closeConnection c
example2 = do
get "https://api.github.com/users/afcowie" debugHandler
get "https://github.com/afcowie" debugHandler
--
-- Does nesting 'withOpenSSL' break things? Apparently not:
--
example3 = withOpenSSL $ do
get "https://api.github.com/users/afcowie" debugHandler
get "https://github.com/afcowie" debugHandler
| afcowie/pipes-http | tests/SecureSocketsSnippet.hs | bsd-3-clause | 1,596 | 0 | 12 | 271 | 275 | 154 | 121 | 32 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# OPTIONS -Wall #-}
module RegisterMachine.Models.CM3 (
mach
) where
import Basic.Types
import Basic.MemoryImpl (ListMem, fillMem, Address(..))
import Basic.Features
import RegisterMachine.State
import RegisterMachine.Operations
import RegisterMachine.Machine
--4.1.3 Base model 3
data Lang i = INC i | CP i i | JE i i i | Halt
instance Language (Lang i)
instance IsHalt (Lang i) where
isHaltInstr Halt = True
isHaltInstr _ = False
trans :: (Eq (HContents st), RWValue (Address v) (Heap st) (HContents st),
Incr v, Incr (HContents st), HasQ st, HasHeap st, Q st ~ Address v) =>
Lang v -> st -> st
trans (INC r) = incrMem (A r)
trans Halt = halt
trans (CP r1 r2) = copyMem (A r1) (A r2)
trans (JE r1 r2 z) = jumpe2Mem (A r1) (A r2) (A z)
prog :: ListMem (Lang Int)
prog = fillMem [INC 0, CP 0 2, INC 3, INC 0, JE 0 1 2, INC 1, Halt]
mach :: RM1 (Lang Int) ListMem (CounterMachineState Int ListMem (Address Int))
mach = RM prog initCM (compile trans)
| davidzhulijun/TAM | RegisterMachine/Models/CM3.hs | bsd-3-clause | 1,094 | 0 | 9 | 221 | 462 | 241 | 221 | 27 | 1 |
{- |
Module : ./DFOL/Comorphism.hs
Description : Helper functions for the DFOL -> CASL translation
Copyright : (c) Kristina Sojakova, DFKI Bremen 2009
License : GPLv2 or higher, see LICENSE.txt
Maintainer : k.sojakova@jacobs-university.de
Stability : experimental
Portability : portable
-}
module DFOL.Comorphism where
import Common.Id
import Common.AS_Annotation
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import Data.Maybe
import qualified Data.Set as Set
import qualified Data.Map as Map
import DFOL.Sign
import DFOL.AS_DFOL
import DFOL.Morphism
import DFOL.Symbol
import qualified CASL.Sign as CASL_Sign
import qualified CASL.AS_Basic_CASL as CASL_AS
import qualified CASL.Morphism as CASL_Morphism
-- shorthand notation
nr :: Range
nr = nullRange
-- the unique sort
sort :: CASL_AS.SORT
sort = mkId [Token "Sort" nr]
-- the special bot symbol
botTok :: Token
botTok = Token "Bot" nr
bot :: CASL_AS.CASLTERM
bot = CASL_AS.Application (CASL_AS.Qual_op_name (mkId [botTok])
(CASL_AS.Op_type CASL_AS.Total [] sort nr) nr) [] nr
-- constructing a FOL type of the specified arity
folType :: Int -> [CASL_AS.SORT]
folType n = replicate n sort
-- signature map
sigMap :: Sign -> CASL_Sign.CASLSign
sigMap sig =
foldr (sigMapH sig) caslSig2 symbols
where caslSig2 = (CASL_Sign.emptySign ())
{ CASL_Sign.sortRel = Rel.insertKey sort Rel.empty
, CASL_Sign.opMap = MapSet.insert (mkId [botTok])
(CASL_Sign.sortToOpType sort) MapSet.empty }
symbols = Set.toList $ getSymbols sig
sigMapH :: Sign -> NAME -> CASL_Sign.CASLSign -> CASL_Sign.CASLSign
sigMapH sig sym csig = case kind of
SortKind -> csig
{ CASL_Sign.predMap = insSym (predTy $ arity + 1) predis }
PredKind -> csig { CASL_Sign.predMap = insSym (predTy arity) predis }
FuncKind -> csig
{ CASL_Sign.opMap = MapSet.insert (mkId [sym])
(CASL_Sign.mkTotOpType (folType arity) sort)
$ CASL_Sign.opMap csig }
where predis = CASL_Sign.predMap csig
insSym = MapSet.insert (mkId [sym])
predTy = CASL_Sign.PredType . folType
Just kind = getSymbolKind sym sig
Just arity = getSymbolArity sym sig
-- generating axioms for a translated signature
generateAxioms :: Sign -> [Named CASL_AS.CASLFORMULA]
generateAxioms sig = predAx ++ funcAx ++ sortAx
where sorts = Set.toList $ getSymbolsByKind sig SortKind
funcs = Set.toList $ getSymbolsByKind sig FuncKind
preds = Set.toList $ getSymbolsByKind sig PredKind
sortAx = generateSortAxioms sig sorts
funcAx = generateFuncAxioms sig funcs
predAx = generatePredAxioms sig preds
-- generating axioms for translated predicate symbols
generatePredAxioms :: Sign -> [NAME] -> [Named CASL_AS.CASLFORMULA]
generatePredAxioms = concatMap . generatePredAxiomsH
generatePredAxiomsH :: Sign -> NAME -> [Named CASL_AS.CASLFORMULA]
generatePredAxiomsH sig p =
[makeNamed ("gen_pred_" ++ show p) formula | not $ null argNames]
where Just argNames = getArgumentNames p sig
Just argTypes = getArgumentTypes p sig
args = map makeVar argNames
formula = makeForall
argNames
(CASL_AS.mkImpl
(CASL_AS.mkNeg (makeTypeHyps argTypes args sig))
(CASL_AS.mkNeg (makePredication p args sig)))
-- generating axioms for translated function symbols
generateFuncAxioms :: Sign -> [NAME] -> [Named CASL_AS.CASLFORMULA]
generateFuncAxioms = concatMap . generateFuncAxiomsH
generateFuncAxiomsH :: Sign -> NAME -> [Named CASL_AS.CASLFORMULA]
generateFuncAxiomsH sig f =
if null argNames
then [makeNamed ("gen_func_1_" ++ show f) formula0]
else [makeNamed ("gen_func_1_" ++ show f) formula1,
makeNamed ("gen_func_2_" ++ show f) formula2]
where Just argNames = getArgumentNames f sig
Just argTypes = getArgumentTypes f sig
Just resultType = getReturnType f sig
args = map makeVar argNames
formula1 = makeForall
argNames
(CASL_AS.mkImpl
(CASL_AS.mkNeg (makeTypeHyps argTypes args sig))
(CASL_AS.mkStEq (makeApplication f args sig) bot))
formula2 = makeForall
argNames
(CASL_AS.mkImpl
(makeTypeHyps argTypes args sig)
(makeTypeHyp resultType
(makeApplication f args sig) sig))
formula0 = makeTypeHyp resultType (makeApplication f [] sig) sig
-- generating axioms for translated sort symbols
generateSortAxioms :: Sign -> [NAME] -> [Named CASL_AS.CASLFORMULA]
generateSortAxioms sig ss =
axioms1 ++ axioms2 ++ [axiom3] ++ axioms4
where axioms1 = concatMap (generateSortAxiomsH1 sig) ss
axioms2 = concatMap (generateSortAxiomsH2 sig) ss
axiom3 = generateSortAxiomsH3 sig ss
axioms4 = generateSortAxiomsH4 sig ss
generateSortAxiomsH1 :: Sign -> NAME -> [Named CASL_AS.CASLFORMULA]
generateSortAxiomsH1 sig s =
[makeNamed ("gen_sort_1_" ++ show s) formula | not $ null argNames]
where Just argNames = getArgumentNames s sig
Just argTypes = getArgumentTypes s sig
args = map makeVar argNames
elName = Token "gen_y" nr
el = makeVar elName
formula = makeForall
argNames
(CASL_AS.mkImpl
(CASL_AS.mkNeg (makeTypeHyps argTypes args sig))
(makeForall
[elName]
(CASL_AS.mkNeg
(makePredication s (args ++ [el]) sig))))
generateSortAxiomsH2 :: Sign -> NAME -> [Named CASL_AS.CASLFORMULA]
generateSortAxiomsH2 sig s =
if ar == 0
then [makeNamed ("gen_sort_2_" ++ show s) formula0]
else [makeNamed ("gen_sort_2_" ++ show s) formula1,
makeNamed ("gen_sort_3_" ++ show s) formula2]
where Just ar = getSymbolArity s sig
argNames1 = makeArgNames "x" ar
argNames2 = makeArgNames "y" ar
elName = Token "z" nr
args1 = map makeVar argNames1
args2 = map makeVar argNames2
el = makeVar elName
formula1 = makeForall
argNames1
$ CASL_AS.mkNeg $ makePredication s (args1 ++ [bot]) sig
formula2 = makeForall
(argNames1 ++ argNames2 ++ [elName])
$ CASL_AS.mkImpl
(CASL_AS.conjunct
[makePredication s (args1 ++ [el]) sig,
makePredication s (args2 ++ [el]) sig])
$ CASL_AS.conjunct
$ zipWith CASL_AS.mkStEq args1 args2
formula0 = CASL_AS.mkNeg $ makePredication s [bot] sig
generateSortAxiomsH3 :: Sign -> [NAME] -> Named CASL_AS.CASLFORMULA
generateSortAxiomsH3 sig ss =
makeNamed "gen_sort_4" formula
where elName = Token "y" nr
el = makeVar elName
ar s = fromJust $ getSymbolArity s sig
argNames s = makeArgNames "x" (ar s)
args s = map makeVar (argNames s)
formula = makeForall
[elName]
(CASL_AS.mkImpl
(CASL_AS.mkNeg (CASL_AS.mkStEq el bot))
(CASL_AS.disjunct $ map subformula ss))
subformula s = if ar s == 0
then makePredication s [el] sig
else makeExists
(argNames s)
$ makePredication s (args s ++ [el]) sig
generateSortAxiomsH4 :: Sign -> [NAME] -> [Named CASL_AS.CASLFORMULA]
generateSortAxiomsH4 sig ss =
map (generateSortAxiomsH4H sig) [ (s1, s2) | s1 <- ss, s2 <- ss, s1 < s2 ]
generateSortAxiomsH4H :: Sign -> (NAME, NAME) -> Named CASL_AS.CASLFORMULA
generateSortAxiomsH4H sig (s1, s2) =
makeNamed ("gen_sort_5_" ++ show s1 ++ "_" ++ show s2) formula
where Just ar1 = getSymbolArity s1 sig
Just ar2 = getSymbolArity s2 sig
argNames1 = makeArgNames "x" ar1
argNames2 = makeArgNames "y" ar2
elName = Token "z" nr
args1 = map makeVar argNames1
args2 = map makeVar argNames2
el = makeVar elName
formula = makeForall (argNames1 ++ argNames2 ++ [elName])
$ CASL_AS.mkImpl (makePredication s1 (args1 ++ [el]) sig)
$ CASL_AS.mkNeg $ makePredication s2 (args2 ++ [el]) sig
-- make argument names
makeArgNames :: String -> Int -> [NAME]
makeArgNames var n = map (\ i -> Token (var ++ "_" ++ show i) nr) [1 .. n]
-- make a variable
makeVar :: NAME -> CASL_AS.CASLTERM
makeVar var = CASL_AS.Qual_var var sort nr
-- make an application
makeApplication :: NAME -> [CASL_AS.CASLTERM] -> Sign -> CASL_AS.CASLTERM
makeApplication f as sig =
CASL_AS.Application
(CASL_AS.Qual_op_name
(mkId [f])
(CASL_AS.Op_type CASL_AS.Total (folType arity) sort nr)
nr)
as
nr
where Just arity = getSymbolArity f sig
-- make a predication
makePredication :: NAME -> [CASL_AS.CASLTERM] -> Sign -> CASL_AS.CASLFORMULA
makePredication p as sig =
CASL_AS.Predication
(CASL_AS.Qual_pred_name
(mkId [p])
(CASL_AS.Pred_type (folType arity1) nr)
nr)
as
nr
where Just kind = getSymbolKind p sig
Just arity = getSymbolArity p sig
arity1 = if kind == SortKind then arity + 1 else arity
-- make a universal quantification
makeForall :: [NAME] -> CASL_AS.CASLFORMULA -> CASL_AS.CASLFORMULA
makeForall vars = CASL_AS.mkForall [CASL_AS.Var_decl vars sort nr]
-- make an existential quantification
makeExists :: [NAME] -> CASL_AS.CASLFORMULA -> CASL_AS.CASLFORMULA
makeExists vars = CASL_AS.mkExist [CASL_AS.Var_decl vars sort nr]
-- make a type hypothesis
makeTypeHyp :: TYPE -> CASL_AS.CASLTERM -> Sign -> CASL_AS.CASLFORMULA
makeTypeHyp t term sig = makePredication s (args ++ [term]) sig
where Univ sortterm = t
(s, as) = termFlatForm sortterm
args = map (termTransl sig) as
-- make type hypotheses
makeTypeHyps :: [TYPE] -> [CASL_AS.CASLTERM]
-> Sign -> CASL_AS.CASLFORMULA
makeTypeHyps ts terms sig =
CASL_AS.conjunct $ map (\ (t, term) -> makeTypeHyp t term sig) $ zip ts terms
-- term translation
termTransl :: Sign -> TERM -> CASL_AS.CASLTERM
termTransl sig (Identifier x) = if not (isConstant x sig)
then CASL_AS.Qual_var x sort nr
else makeApplication x [] sig
termTransl sig t = makeApplication f (map (termTransl sig) as) sig
where (f, as) = termFlatForm t
-- signature translation
sigTransl :: Sign -> (CASL_Sign.CASLSign, [Named CASL_AS.CASLFORMULA])
sigTransl sig = (sigMap sig, generateAxioms sig)
-- theory translation
theoryTransl :: (Sign, [Named FORMULA]) ->
(CASL_Sign.CASLSign, [Named CASL_AS.CASLFORMULA])
theoryTransl (sig, fs) = (sigCASL, axCASL ++ fsCASL)
where (sigCASL, axCASL) = sigTransl sig
fsCASL = map (namedSenTransl sig) fs
-- morphism translation
morphTransl :: Morphism -> CASL_Morphism.CASLMor
morphTransl (Morphism sig1 sig2 sym_map) =
foldl (addSymbolTransl sig1) init_morph $ Map.toList sym_map
where init_morph = CASL_Morphism.Morphism
{ CASL_Morphism.msource = fst $ sigTransl sig1
, CASL_Morphism.mtarget = fst $ sigTransl sig2
, CASL_Morphism.sort_map = Map.empty
, CASL_Morphism.op_map = Map.empty
, CASL_Morphism.pred_map = Map.empty
, CASL_Morphism.extended_map = ()
}
addSymbolTransl :: Sign -> CASL_Morphism.CASLMor -> (NAME, NAME) ->
CASL_Morphism.CASLMor
addSymbolTransl sig m (f, g) = case kind of
FuncKind -> let
f1 = (mkId [f], CASL_Sign.OpType CASL_AS.Partial (folType arity) sort)
g1 = (mkId [g], CASL_AS.Total)
in m {CASL_Morphism.op_map = Map.insert f1 g1
$ CASL_Morphism.op_map m}
PredKind -> let
f1 = (mkId [f], CASL_Sign.PredType (folType arity))
g1 = mkId [g]
in m {CASL_Morphism.pred_map = Map.insert f1 g1
$ CASL_Morphism.pred_map m}
SortKind -> let
f1 = (mkId [f], CASL_Sign.PredType (folType (arity + 1)))
g1 = mkId [g]
in m {CASL_Morphism.pred_map = Map.insert f1 g1
$ CASL_Morphism.pred_map m}
where Just kind = getSymbolKind f sig
Just arity = getSymbolArity f sig
makeTypesAndVars :: [DECL] -> ([TYPE], [NAME], [CASL_AS.CASLTERM])
makeTypesAndVars ds = let varNames = getVarsFromDecls ds in
( concatMap (\ (ns, t1) -> replicate (length ns) t1) ds
, varNames, map makeVar varNames)
-- sentence translation
senTransl :: Sign -> FORMULA -> CASL_AS.CASLFORMULA
senTransl sig frm = case frm of
T -> CASL_AS.trueForm
F -> CASL_AS.falseForm
Pred t -> makePredication p (map (termTransl sig) as) sig
where (p, as) = termFlatForm t
Equality t1 t2 -> CASL_AS.mkStEq (termTransl sig t1) (termTransl sig t2)
Negation f -> CASL_AS.mkNeg (senTransl sig f)
Conjunction fs -> CASL_AS.conjunct (map (senTransl sig) fs)
Disjunction fs -> CASL_AS.disjunct (map (senTransl sig) fs)
Implication f g -> CASL_AS.mkImpl (senTransl sig f) (senTransl sig g)
Equivalence f g -> CASL_AS.mkEqv (senTransl sig f) (senTransl sig g)
Forall ds f -> let (types, varNames, vars) = makeTypesAndVars ds in
makeForall varNames
(CASL_AS.mkImpl (makeTypeHyps types vars sig) (senTransl sig f))
Exists ds f -> let (types, varNames, vars) = makeTypesAndVars ds in
makeExists varNames
(CASL_AS.conjunct [makeTypeHyps types vars sig, senTransl sig f])
-- named sentence translation
namedSenTransl :: Sign -> Named FORMULA -> Named CASL_AS.CASLFORMULA
namedSenTransl sig nf = nf {sentence = senTransl sig $ sentence nf}
-- symbol translation
symbolTransl :: Sign -> Symbol -> Set.Set CASL_Sign.Symbol
symbolTransl sig sym =
Set.singleton $ CASL_Sign.Symbol (mkId [n])
$ case kind of
PredKind -> CASL_Sign.PredAsItemType
$ CASL_Sign.PredType (folType arity)
FuncKind -> CASL_Sign.OpAsItemType
$ CASL_Sign.mkTotOpType (folType arity) sort
SortKind -> CASL_Sign.PredAsItemType
$ CASL_Sign.PredType (folType (arity + 1))
where n = name sym
Just kind = getSymbolKind n sig
Just arity = getSymbolArity n sig
| spechub/Hets | DFOL/Comorphism.hs | gpl-2.0 | 15,055 | 0 | 18 | 4,477 | 4,416 | 2,268 | 2,148 | 295 | 11 |
{-|
Module : Idris.Parser.Expr
Description : Parse Expressions.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE ConstraintKinds, GeneralizedNewtypeDeriving, PatternGuards,
TupleSections #-}
module Idris.Parser.Expr where
import Idris.AbsSyntax
import Idris.Core.TT
import Idris.DSL
import Idris.Parser.Helpers
import Idris.Parser.Ops
import Prelude hiding (pi)
import Control.Applicative
import Control.Monad
import Control.Monad.State.Strict
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char
import Data.Function (on)
import qualified Data.HashSet as HS
import Data.List
import qualified Data.List.Split as Spl
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Debug.Trace
import qualified Text.Parser.Char as Chr
import Text.Parser.Expression
import Text.Parser.LookAhead
import qualified Text.Parser.Token as Tok
import qualified Text.Parser.Token.Highlight as Hi
import Text.Trifecta hiding (Err, char, charLiteral, natural, span, string,
stringLiteral, symbol, whiteSpace)
import Text.Trifecta.Delta
-- | Allow implicit type declarations
allowImp :: SyntaxInfo -> SyntaxInfo
allowImp syn = syn { implicitAllowed = True,
constraintAllowed = False }
-- | Disallow implicit type declarations
disallowImp :: SyntaxInfo -> SyntaxInfo
disallowImp syn = syn { implicitAllowed = False,
constraintAllowed = False }
-- | Allow scoped constraint arguments
allowConstr :: SyntaxInfo -> SyntaxInfo
allowConstr syn = syn { constraintAllowed = True }
{-| Parses an expression as a whole
@
FullExpr ::= Expr EOF_t;
@
-}
fullExpr :: SyntaxInfo -> IdrisParser PTerm
fullExpr syn = do x <- expr syn
eof
i <- get
return $ debindApp syn (desugar syn i x)
tryFullExpr :: SyntaxInfo -> IState -> String -> Either Err PTerm
tryFullExpr syn st input =
case runparser (fullExpr syn) st "" input of
Success tm -> Right tm
Failure e -> Left (Msg (show e))
{- | Parses an expression
@
Expr ::= Pi
@
-}
expr :: SyntaxInfo -> IdrisParser PTerm
expr = pi
{- | Parses an expression with possible operator applied
@
OpExpr ::= {- Expression Parser with Operators based on Expr' -};
@
-}
opExpr :: SyntaxInfo -> IdrisParser PTerm
opExpr syn = do i <- get
buildExpressionParser (table (idris_infixes i))
(expr' syn)
{- | Parses either an internally defined expression or
a user-defined one
@
Expr' ::= "External (User-defined) Syntax"
| InternalExpr;
@
-}
expr' :: SyntaxInfo -> IdrisParser PTerm
expr' syn = try (externalExpr syn)
<|> internalExpr syn
<?> "expression"
{- | Parses a user-defined expression -}
externalExpr :: SyntaxInfo -> IdrisParser PTerm
externalExpr syn = do i <- get
(FC fn start _) <- getFC
expr <- extensions syn (syntaxRulesList $ syntax_rules i)
(FC _ _ end) <- getFC
let outerFC = FC fn start end
return (mapPTermFC (fixFC outerFC) (fixFCH fn outerFC) expr)
<?> "user-defined expression"
where -- Fix non-highlighting FCs by approximating with the span of the syntax application
fixFC outer inner | inner `fcIn` outer = inner
| otherwise = outer
-- Fix highlighting FCs by making them useless, to avoid spurious highlights
fixFCH fn outer inner | inner `fcIn` outer = inner
| otherwise = FileFC fn
{- | Parses a simple user-defined expression -}
simpleExternalExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExternalExpr syn = do i <- get
extensions syn (filter isSimple (syntaxRulesList $ syntax_rules i))
where
isSimple (Rule (Expr x:xs) _ _) = False
isSimple (Rule (SimpleExpr x:xs) _ _) = False
isSimple (Rule [Keyword _] _ _) = True
isSimple (Rule [Symbol _] _ _) = True
isSimple (Rule (_:xs) _ _) = case last xs of
Keyword _ -> True
Symbol _ -> True
_ -> False
isSimple _ = False
{- | Tries to parse a user-defined expression given a list of syntactic extensions -}
extensions :: SyntaxInfo -> [Syntax] -> IdrisParser PTerm
extensions syn rules = extension syn [] (filter isValid rules)
<?> "user-defined expression"
where
isValid :: Syntax -> Bool
isValid (Rule _ _ AnySyntax) = True
isValid (Rule _ _ PatternSyntax) = inPattern syn
isValid (Rule _ _ TermSyntax) = not (inPattern syn)
isValid (DeclRule _ _) = False
data SynMatch = SynTm PTerm | SynBind FC Name -- ^ the FC is for highlighting information
deriving Show
extension :: SyntaxInfo -> [Maybe (Name, SynMatch)] -> [Syntax] -> IdrisParser PTerm
extension syn ns rules =
choice $ flip map (groupBy (ruleGroup `on` syntaxSymbols) rules) $ \rs ->
case head rs of -- can never be []
Rule (symb:_) _ _ -> try $ do
n <- extensionSymbol symb
extension syn (n : ns) [Rule ss t ctx | (Rule (_:ss) t ctx) <- rs]
-- If we have more than one Rule in this bucket, our grammar is
-- nondeterministic.
Rule [] ptm _ -> return (flatten (updateSynMatch (mapMaybe id ns) ptm))
where
ruleGroup [] [] = True
ruleGroup (s1:_) (s2:_) = s1 == s2
ruleGroup _ _ = False
extensionSymbol :: SSymbol -> IdrisParser (Maybe (Name, SynMatch))
extensionSymbol (Keyword n) = do fc <- reservedFC (show n)
highlightP fc AnnKeyword
return Nothing
extensionSymbol (Expr n) = do tm <- expr syn
return $ Just (n, SynTm tm)
extensionSymbol (SimpleExpr n) = do tm <- simpleExpr syn
return $ Just (n, SynTm tm)
extensionSymbol (Binding n) = do (b, fc) <- name
return $ Just (n, SynBind fc b)
extensionSymbol (Symbol s) = do fc <- symbolFC s
highlightP fc AnnKeyword
return Nothing
flatten :: PTerm -> PTerm -- flatten application
flatten (PApp fc (PApp _ f as) bs) = flatten (PApp fc f (as ++ bs))
flatten t = t
updateSynMatch = update
where
updateB :: [(Name, SynMatch)] -> (Name, FC) -> (Name, FC)
updateB ns (n, fc) = case lookup n ns of
Just (SynBind tfc t) -> (t, tfc)
_ -> (n, fc)
update :: [(Name, SynMatch)] -> PTerm -> PTerm
update ns (PRef fc hls n) = case lookup n ns of
Just (SynTm t) -> t
_ -> PRef fc hls n
update ns (PPatvar fc n) = uncurry (flip PPatvar) $ updateB ns (n, fc)
update ns (PLam fc n nfc ty sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLam fc n' nfc' (update ns ty) (update (dropn n ns) sc)
update ns (PPi p n fc ty sc)
= let (n', nfc') = updateB ns (n, fc)
in PPi (updTacImp ns p) n' nfc'
(update ns ty) (update (dropn n ns) sc)
update ns (PLet fc n nfc ty val sc)
= let (n', nfc') = updateB ns (n, nfc)
in PLet fc n' nfc' (update ns ty)
(update ns val) (update (dropn n ns) sc)
update ns (PApp fc t args)
= PApp fc (update ns t) (map (fmap (update ns)) args)
update ns (PAppBind fc t args)
= PAppBind fc (update ns t) (map (fmap (update ns)) args)
update ns (PMatchApp fc n) = let (n', nfc') = updateB ns (n, fc)
in PMatchApp nfc' n'
update ns (PIfThenElse fc c t f)
= PIfThenElse fc (update ns c) (update ns t) (update ns f)
update ns (PCase fc c opts)
= PCase fc (update ns c) (map (pmap (update ns)) opts)
update ns (PRewrite fc by eq tm mty)
= PRewrite fc by (update ns eq) (update ns tm) (fmap (update ns) mty)
update ns (PPair fc hls p l r) = PPair fc hls p (update ns l) (update ns r)
update ns (PDPair fc hls p l t r)
= PDPair fc hls p (update ns l) (update ns t) (update ns r)
update ns (PAs fc n t) = PAs fc (fst $ updateB ns (n, NoFC)) (update ns t)
update ns (PAlternative ms a as) = PAlternative ms a (map (update ns) as)
update ns (PHidden t) = PHidden (update ns t)
update ns (PGoal fc r n sc) = PGoal fc (update ns r) n (update ns sc)
update ns (PDoBlock ds) = PDoBlock $ map (upd ns) ds
where upd :: [(Name, SynMatch)] -> PDo -> PDo
upd ns (DoExp fc t) = DoExp fc (update ns t)
upd ns (DoBind fc n nfc t) = DoBind fc n nfc (update ns t)
upd ns (DoLet fc n nfc ty t) = DoLet fc n nfc (update ns ty) (update ns t)
upd ns (DoBindP fc i t ts)
= DoBindP fc (update ns i) (update ns t)
(map (\(l,r) -> (update ns l, update ns r)) ts)
upd ns (DoLetP fc i t) = DoLetP fc (update ns i) (update ns t)
update ns (PIdiom fc t) = PIdiom fc $ update ns t
update ns (PMetavar fc n) = uncurry (flip PMetavar) $ updateB ns (n, fc)
update ns (PProof tacs) = PProof $ map (updTactic ns) tacs
update ns (PTactics tacs) = PTactics $ map (updTactic ns) tacs
update ns (PDisamb nsps t) = PDisamb nsps $ update ns t
update ns (PUnifyLog t) = PUnifyLog $ update ns t
update ns (PNoImplicits t) = PNoImplicits $ update ns t
update ns (PQuasiquote tm mty) = PQuasiquote (update ns tm) (fmap (update ns) mty)
update ns (PUnquote t) = PUnquote $ update ns t
update ns (PQuoteName n res fc) = let (n', fc') = (updateB ns (n, fc))
in PQuoteName n' res fc'
update ns (PRunElab fc t nsp) = PRunElab fc (update ns t) nsp
update ns (PConstSugar fc t) = PConstSugar fc $ update ns t
-- PConstSugar probably can't contain anything substitutable, but it's hard to track
update ns t = t
updTactic :: [(Name, SynMatch)] -> PTactic -> PTactic
-- handle all the ones with Names explicitly, then use fmap for the rest with PTerms
updTactic ns (Intro ns') = Intro $ map (fst . updateB ns . (, NoFC)) ns'
updTactic ns (Focus n) = Focus . fst $ updateB ns (n, NoFC)
updTactic ns (Refine n bs) = Refine (fst $ updateB ns (n, NoFC)) bs
updTactic ns (Claim n t) = Claim (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (MatchRefine n) = MatchRefine (fst $ updateB ns (n, NoFC))
updTactic ns (LetTac n t) = LetTac (fst $ updateB ns (n, NoFC)) (update ns t)
updTactic ns (LetTacTy n ty tm) = LetTacTy (fst $ updateB ns (n, NoFC)) (update ns ty) (update ns tm)
updTactic ns (ProofSearch rec prover depth top psns hints) = ProofSearch rec prover depth
(fmap (fst . updateB ns . (, NoFC)) top) (map (fst . updateB ns . (, NoFC)) psns) (map (fst . updateB ns . (, NoFC)) hints)
updTactic ns (Try l r) = Try (updTactic ns l) (updTactic ns r)
updTactic ns (TSeq l r) = TSeq (updTactic ns l) (updTactic ns r)
updTactic ns (GoalType s tac) = GoalType s $ updTactic ns tac
updTactic ns (TDocStr (Left n)) = TDocStr . Left . fst $ updateB ns (n, NoFC)
updTactic ns t = fmap (update ns) t
updTacImp ns (TacImp o st scr r) = TacImp o st (update ns scr) r
updTacImp _ x = x
dropn :: Name -> [(Name, a)] -> [(Name, a)]
dropn n [] = []
dropn n ((x,t) : xs) | n == x = xs
| otherwise = (x,t):dropn n xs
{- | Parses a (normal) built-in expression
@
InternalExpr ::=
UnifyLog
| RecordType
| SimpleExpr
| Lambda
| QuoteGoal
| Let
| If
| RewriteTerm
| CaseExpr
| DoBlock
| App
;
@
-}
internalExpr :: SyntaxInfo -> IdrisParser PTerm
internalExpr syn =
unifyLog syn
<|> runElab syn
<|> disamb syn
<|> noImplicits syn
<|> recordType syn
<|> if_ syn
<|> lambda syn
<|> quoteGoal syn
<|> let_ syn
<|> rewriteTerm syn
<|> doBlock syn
<|> caseExpr syn
<|> app syn
<?> "expression"
{- | Parses the "impossible" keyword
@
Impossible ::= 'impossible'
@
-}
impossible :: IdrisParser PTerm
impossible = do fc <- reservedFC "impossible"
highlightP fc AnnKeyword
return PImpossible
{- | Parses a case expression
@
CaseExpr ::=
'case' Expr 'of' OpenBlock CaseOption+ CloseBlock;
@
-}
caseExpr :: SyntaxInfo -> IdrisParser PTerm
caseExpr syn = do kw1 <- reservedFC "case"; fc <- getFC
scr <- expr syn; kw2 <- reservedFC "of";
opts <- indentedBlock1 (caseOption syn)
highlightP kw1 AnnKeyword
highlightP kw2 AnnKeyword
return (PCase fc scr opts)
<?> "case expression"
{- | Parses a case in a case expression
@
CaseOption ::=
Expr (Impossible | '=>' Expr) Terminator
;
@
-}
caseOption :: SyntaxInfo -> IdrisParser (PTerm, PTerm)
caseOption syn = do lhs <- expr (disallowImp (syn { inPattern = True }))
r <- impossible <|> symbol "=>" *> expr syn
return (lhs, r)
<?> "case option"
warnTacticDeprecation :: FC -> IdrisParser ()
warnTacticDeprecation fc =
parserWarning fc (Just NoOldTacticDeprecationWarnings) (Msg "This style of tactic proof is deprecated. See %runElab for the replacement.")
{- | Parses a proof block
@
ProofExpr ::=
'proof' OpenBlock Tactic'* CloseBlock
;
@
-}
proofExpr :: SyntaxInfo -> IdrisParser PTerm
proofExpr syn = do kw <- reservedFC "proof"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PProof ts
<?> "proof block"
{- | Parses a tactics block
@
TacticsExpr :=
'tactics' OpenBlock Tactic'* CloseBlock
;
@
-}
tacticsExpr :: SyntaxInfo -> IdrisParser PTerm
tacticsExpr syn = do kw <- reservedFC "tactics"
ts <- indentedBlock1 (tactic syn)
highlightP kw AnnKeyword
warnTacticDeprecation kw
return $ PTactics ts
<?> "tactics block"
{- | Parses a simple expression
@
SimpleExpr ::=
{- External (User-defined) Simple Expression -}
| '?' Name
| % 'implementation'
| 'Refl' ('{' Expr '}')?
| ProofExpr
| TacticsExpr
| FnName
| Idiom
| List
| Alt
| Bracketed
| Constant
| Type
| 'Void'
| Quasiquote
| NameQuote
| Unquote
| '_'
;
@
-}
simpleExpr :: SyntaxInfo -> IdrisParser PTerm
simpleExpr syn =
try (simpleExternalExpr syn)
<|> do (x, FC f (l, c) end) <- try (lchar '?' *> name)
return (PMetavar (FC f (l, c-1) end) x)
<|> do lchar '%'; fc <- getFC; reserved "implementation"; return (PResolveTC fc)
<|> do lchar '%'; fc <- getFC; reserved "instance"
parserWarning fc Nothing $ Msg "The use of %instance is deprecated, use %implementation instead."
return (PResolveTC fc)
<|> do reserved "elim_for"; fc <- getFC; t <- fst <$> fnName; return (PRef fc [] (SN $ ElimN t))
<|> proofExpr syn
<|> tacticsExpr syn
<|> try (do fc <- reservedFC "Type*"; return $ PUniverse fc AllTypes)
<|> do fc <- reservedFC "AnyType"; return $ PUniverse fc AllTypes
<|> PType <$> reservedFC "Type"
<|> do fc <- reservedFC "UniqueType"; return $ PUniverse fc UniqueType
<|> do fc <- reservedFC "NullType"; return $ PUniverse fc NullType
<|> do (c, cfc) <- constant
fc <- getFC
return (modifyConst syn fc (PConstant cfc c))
<|> do symbol "'"; fc <- getFC; str <- fst <$> name
return (PApp fc (PRef fc [] (sUN "Symbol_"))
[pexp (PConstant NoFC (Str (show str)))])
<|> do (x, fc) <- fnName
if inPattern syn
then option (PRef fc [fc] x)
(do reservedOp "@"
s <- simpleExpr syn
fcIn <- getFC
return (PAs fcIn x s))
else return (PRef fc [fc] x)
<|> idiom syn
<|> listExpr syn
<|> alt syn
<|> do reservedOp "!"
s <- simpleExpr syn
fc <- getFC
return (PAppBind fc s [])
<|> bracketed (disallowImp syn)
<|> quasiquote syn
<|> namequote syn
<|> unquote syn
<|> do lchar '_'; return Placeholder
<?> "expression"
{- |Parses an expression in parentheses
@
Bracketed ::= '(' Bracketed'
@
-}
bracketed :: SyntaxInfo -> IdrisParser PTerm
bracketed syn = do (FC fn (sl, sc) _) <- getFC
lchar '(' <?> "parenthesized expression"
bracketed' (FC fn (sl, sc) (sl, sc+1)) (syn { withAppAllowed = True })
{- |Parses the rest of an expression in braces
@
Bracketed' ::=
')'
| Expr ')'
| ExprList ')'
| DependentPair ')'
| Operator Expr ')'
| Expr Operator ')'
;
@
-}
bracketed' :: FC -> SyntaxInfo -> IdrisParser PTerm
bracketed' open syn =
do (FC f start (l, c)) <- getFC
lchar ')'
return $ PTrue (spanFC open (FC f start (l, c+1))) TypeOrTerm
<|> try (dependentPair TypeOrTerm [] open syn)
<|> try (do fc <- getFC; o <- operator; e <- expr syn; lchar ')'
-- No prefix operators! (bit of a hack here...)
if (o == "-" || o == "!")
then fail "minus not allowed in section"
else return $ PLam fc (sMN 1000 "ARG") NoFC Placeholder
(PApp fc (PRef fc [] (sUN o)) [pexp (PRef fc [] (sMN 1000 "ARG")),
pexp e]))
<|> try (do l <- simpleExpr syn
op <- option Nothing (do o <- operator
lchar ')'
return (Just o))
fc0 <- getFC
case op of
Nothing -> bracketedExpr syn open l
Just o -> return $ PLam fc0 (sMN 1000 "ARG") NoFC Placeholder
(PApp fc0 (PRef fc0 [] (sUN o)) [pexp l,
pexp (PRef fc0 [] (sMN 1000 "ARG"))]))
<|> do l <- expr (allowConstr syn)
bracketedExpr (allowConstr syn) open l
{-| Parses the rest of a dependent pair after '(' or '(Expr **' -}
dependentPair :: PunInfo -> [(PTerm, Maybe (FC, PTerm), FC)] -> FC -> SyntaxInfo -> IdrisParser PTerm
dependentPair pun prev openFC syn =
if null prev then
nametypePart <|> namePart
else
case pun of
IsType -> nametypePart <|> namePart <|> exprPart True
IsTerm -> exprPart False
TypeOrTerm -> nametypePart <|> namePart <|> exprPart False
where nametypePart = do
(ln, lnfc, colonFC) <- try $ do
(ln, lnfc) <- name
colonFC <- lcharFC ':'
return (ln, lnfc, colonFC)
lty <- expr' syn
starsFC <- reservedOpFC "**"
dependentPair IsType ((PRef lnfc [] ln, Just (colonFC, lty), starsFC):prev) openFC syn
namePart = try $ do
(ln, lnfc) <- name
starsFC <- reservedOpFC "**"
dependentPair pun ((PRef lnfc [] ln, Nothing, starsFC):prev) openFC syn
exprPart isEnd = do
e <- expr syn
sepFCE <-
let stars = (Left <$> reservedOpFC "**")
ending = (Right <$> lcharFC ')')
in if isEnd then ending else stars <|> ending
case sepFCE of
Left starsFC -> dependentPair IsTerm ((e, Nothing, starsFC):prev) openFC syn
Right closeFC ->
return (mkPDPairs pun openFC closeFC (reverse prev) e)
mkPDPairs pun openFC closeFC ((e, cfclty, starsFC):bnds) r =
(PDPair openFC ([openFC] ++ maybe [] ((: []) . fst) cfclty ++ [starsFC, closeFC] ++ (=<<) (\(_,cfclty,sfc) -> maybe [] ((: []) . fst) cfclty ++ [sfc]) bnds)
pun e (maybe Placeholder snd cfclty) (mergePDPairs pun starsFC bnds r))
mergePDPairs pun starsFC' [] r = r
mergePDPairs pun starsFC' ((e, cfclty, starsFC):bnds) r =
PDPair starsFC' [] pun e (maybe Placeholder snd cfclty) (mergePDPairs pun starsFC bnds r)
-- | Parse the contents of parentheses, after an expression has been parsed.
bracketedExpr :: SyntaxInfo -> FC -> PTerm -> IdrisParser PTerm
bracketedExpr syn openParenFC e =
do lchar ')'; return e
<|> do exprs <- some (do comma <- lcharFC ','
r <- expr syn
return (r, comma))
closeParenFC <- lcharFC ')'
let hilite = [openParenFC, closeParenFC] ++ map snd exprs
return $ PPair openParenFC hilite TypeOrTerm e (mergePairs exprs)
<|> do starsFC <- reservedOpFC "**"
dependentPair IsTerm [(e, Nothing, starsFC)] openParenFC syn
<?> "end of bracketed expression"
where mergePairs :: [(PTerm, FC)] -> PTerm
mergePairs [(t, fc)] = t
mergePairs ((t, fc):rs) = PPair fc [] TypeOrTerm t (mergePairs rs)
-- bit of a hack here. If the integer doesn't fit in an Int, treat it as a
-- big integer, otherwise try fromInteger and the constants as alternatives.
-- a better solution would be to fix fromInteger to work with Integer, as the
-- name suggests, rather than Int
{-| Finds optimal type for integer constant -}
modifyConst :: SyntaxInfo -> FC -> PTerm -> PTerm
modifyConst syn fc (PConstant inFC (BI x))
| not (inPattern syn)
= PConstSugar inFC $ -- wrap in original location for highlighting
PAlternative [] FirstSuccess
(PApp fc (PRef fc [] (sUN "fromInteger")) [pexp (PConstant NoFC (BI (fromInteger x)))]
: consts)
| otherwise = PConstSugar inFC $
PAlternative [] FirstSuccess consts
where
consts = [ PConstant inFC (BI x)
, PConstant inFC (I (fromInteger x))
, PConstant inFC (B8 (fromInteger x))
, PConstant inFC (B16 (fromInteger x))
, PConstant inFC (B32 (fromInteger x))
, PConstant inFC (B64 (fromInteger x))
]
modifyConst syn fc x = x
{- | Parses an alternative expression
@
Alt ::= '(|' Expr_List '|)';
Expr_List ::=
Expr'
| Expr' ',' Expr_List
;
@
-}
alt :: SyntaxInfo -> IdrisParser PTerm
alt syn = do symbol "(|"; alts <- sepBy1 (expr' (syn { withAppAllowed = False })) (lchar ','); symbol "|)"
return (PAlternative [] FirstSuccess alts)
{- | Parses a possibly hidden simple expression
@
HSimpleExpr ::=
'.' SimpleExpr
| SimpleExpr
;
@
-}
hsimpleExpr :: SyntaxInfo -> IdrisParser PTerm
hsimpleExpr syn =
do lchar '.'
e <- simpleExpr syn
return $ PHidden e
<|> simpleExpr syn
<?> "expression"
{- | Parses a unification log expression
UnifyLog ::=
'%' 'unifyLog' SimpleExpr
;
-}
unifyLog :: SyntaxInfo -> IdrisParser PTerm
unifyLog syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "unifyLog")
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return (PUnifyLog tm)
<?> "unification log expression"
{- | Parses a new-style tactics expression
RunTactics ::=
'%' 'runElab' SimpleExpr
;
-}
runElab :: SyntaxInfo -> IdrisParser PTerm
runElab syn = do (FC fn (sl, sc) kwEnd) <- try (lchar '%' *> reservedFC "runElab")
fc <- getFC
tm <- simpleExpr syn
highlightP (FC fn (sl, sc-1) kwEnd) AnnKeyword
return $ PRunElab fc tm (syn_namespace syn)
<?> "new-style tactics expression"
{- | Parses a disambiguation expression
Disamb ::=
'with' NameList Expr
;
-}
disamb :: SyntaxInfo -> IdrisParser PTerm
disamb syn = do kw <- reservedFC "with"
ns <- sepBy1 (fst <$> name) (lchar ',')
tm <- expr' syn
highlightP kw AnnKeyword
return (PDisamb (map tons ns) tm)
<?> "namespace disambiguation expression"
where tons (NS n s) = txt (show n) : s
tons n = [txt (show n)]
{- | Parses a no implicits expression
@
NoImplicits ::=
'%' 'noImplicits' SimpleExpr
;
@
-}
noImplicits :: SyntaxInfo -> IdrisParser PTerm
noImplicits syn = do try (lchar '%' *> reserved "noImplicits")
tm <- simpleExpr syn
return (PNoImplicits tm)
<?> "no implicits expression"
{- | Parses a function application expression
@
App ::=
'mkForeign' Arg Arg*
| MatchApp
| SimpleExpr Arg*
;
MatchApp ::=
SimpleExpr '<==' FnName
;
@
-}
app :: SyntaxInfo -> IdrisParser PTerm
app syn = do f <- simpleExpr syn
(do try $ reservedOp "<=="
fc <- getFC
ff <- fst <$> fnName
return (PLet fc (sMN 0 "match") NoFC
f
(PMatchApp fc ff)
(PRef fc [] (sMN 0 "match")))
<?> "matching application expression") <|>
(do fc <- getFC
i <- get
args <- many (do notEndApp; arg syn)
wargs <- if withAppAllowed syn && not (inPattern syn)
then many (do notEndApp; reservedOp "|"; expr' syn)
else return []
case args of
[] -> return f
_ -> return (withApp fc (flattenFromInt fc f args) wargs))
<?> "function application"
where
-- bit of a hack to deal with the situation where we're applying a
-- literal to an argument, which we may want for obscure applications
-- of fromInteger, and this will help disambiguate better.
-- We know, at least, it won't be one of the constants!
flattenFromInt fc (PAlternative _ x alts) args
| Just i <- getFromInt alts
= PApp fc (PRef fc [] (sUN "fromInteger")) (i : args)
flattenFromInt fc f args = PApp fc f args
withApp fc tm [] = tm
withApp fc tm (a : as) = withApp fc (PWithApp fc tm a) as
getFromInt ((PApp _ (PRef _ _ n) [a]) : _) | n == sUN "fromInteger" = Just a
getFromInt (_ : xs) = getFromInt xs
getFromInt _ = Nothing
{-| Parses a function argument
@
Arg ::=
ImplicitArg
| ConstraintArg
| SimpleExpr
;
@
-}
arg :: SyntaxInfo -> IdrisParser PArg
arg syn = implicitArg syn
<|> constraintArg syn
<|> do e <- simpleExpr syn
return (pexp e)
<?> "function argument"
{-| Parses an implicit function argument
@
ImplicitArg ::=
'{' Name ('=' Expr)? '}'
;
@
-}
implicitArg :: SyntaxInfo -> IdrisParser PArg
implicitArg syn = do lchar '{'
(n, nfc) <- name
fc <- getFC
v <- option (PRef nfc [nfc] n) (do lchar '='
expr syn)
lchar '}'
return (pimp n v True)
<?> "implicit function argument"
{-| Parses a constraint argument (for selecting a named interface implementation)
> ConstraintArg ::=
> '@{' Expr '}'
> ;
-}
constraintArg :: SyntaxInfo -> IdrisParser PArg
constraintArg syn = do symbol "@{"
e <- expr syn
symbol "}"
return (pconst e)
<?> "constraint argument"
{-| Parses a quasiquote expression (for building reflected terms using the elaborator)
> Quasiquote ::= '`(' Expr ')'
-}
quasiquote :: SyntaxInfo -> IdrisParser PTerm
quasiquote syn = do startFC <- symbolFC "`("
e <- expr syn { syn_in_quasiquote = (syn_in_quasiquote syn) + 1 ,
inPattern = False }
g <- optional $
do fc <- symbolFC ":"
ty <- expr syn { inPattern = False } -- don't allow antiquotes
return (ty, fc)
endFC <- symbolFC ")"
mapM_ (uncurry highlightP) [(startFC, AnnKeyword), (endFC, AnnKeyword), (spanFC startFC endFC, AnnQuasiquote)]
case g of
Just (_, fc) -> highlightP fc AnnKeyword
_ -> return ()
return $ PQuasiquote e (fst <$> g)
<?> "quasiquotation"
{-| Parses an unquoting inside a quasiquotation (for building reflected terms using the elaborator)
> Unquote ::= ',' Expr
-}
unquote :: SyntaxInfo -> IdrisParser PTerm
unquote syn = do guard (syn_in_quasiquote syn > 0)
startFC <- symbolFC "~"
e <- simpleExpr syn { syn_in_quasiquote = syn_in_quasiquote syn - 1 }
endFC <- getFC
highlightP startFC AnnKeyword
highlightP (spanFC startFC endFC) AnnAntiquote
return $ PUnquote e
<?> "unquotation"
{-| Parses a quotation of a name (for using the elaborator to resolve boring details)
> NameQuote ::= '`{' Name '}'
-}
namequote :: SyntaxInfo -> IdrisParser PTerm
namequote syn = do (startFC, res) <-
try (do fc <- symbolFC "`{{"
return (fc, False)) <|>
(do fc <- symbolFC "`{"
return (fc, True))
(n, nfc) <- fnName
endFC <- if res then symbolFC "}" else symbolFC "}}"
mapM_ (uncurry highlightP)
[ (startFC, AnnKeyword)
, (endFC, AnnKeyword)
, (spanFC startFC endFC, AnnQuasiquote)
]
return $ PQuoteName n res nfc
<?> "quoted name"
{-| Parses a record field setter expression
@
RecordType ::=
'record' '{' FieldTypeList '}';
@
@
FieldTypeList ::=
FieldType
| FieldType ',' FieldTypeList
;
@
@
FieldType ::=
FnName '=' Expr
;
@
-}
data SetOrUpdate = FieldSet PTerm | FieldUpdate PTerm
recordType :: SyntaxInfo -> IdrisParser PTerm
recordType syn =
do kw <- reservedFC "record"
lchar '{'
fgs <- fieldGetOrSet
lchar '}'
fc <- getFC
rec <- optional (do notEndApp; simpleExpr syn)
highlightP kw AnnKeyword
case fgs of
Left fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(applyAll fc fields (PRef fc [] (sMN 0 "fldx"))))
Just v -> return (applyAll fc fields v)
Right fields ->
case rec of
Nothing ->
return (PLam fc (sMN 0 "fldx") NoFC Placeholder
(getAll fc (reverse fields)
(PRef fc [] (sMN 0 "fldx"))))
Just v -> return (getAll fc (reverse fields) v)
<?> "record setting expression"
where fieldSet :: IdrisParser ([Name], SetOrUpdate)
fieldSet = do ns <- fieldGet
(do lchar '='
e <- expr syn
return (ns, FieldSet e))
<|> do symbol "$="
e <- expr syn
return (ns, FieldUpdate e)
<?> "field setter"
fieldGet :: IdrisParser [Name]
fieldGet = sepBy1 (fst <$> fnName) (symbol "->")
fieldGetOrSet :: IdrisParser (Either [([Name], SetOrUpdate)] [Name])
fieldGetOrSet = try (do fs <- sepBy1 fieldSet (lchar ',')
return (Left fs))
<|> do f <- fieldGet
return (Right f)
applyAll :: FC -> [([Name], SetOrUpdate)] -> PTerm -> PTerm
applyAll fc [] x = x
applyAll fc ((ns, e) : es) x
= applyAll fc es (doUpdate fc ns e x)
doUpdate fc ns (FieldUpdate e) get
= let get' = getAll fc (reverse ns) get in
doUpdate fc ns (FieldSet (PApp fc e [pexp get'])) get
doUpdate fc [n] (FieldSet e) get
= PApp fc (PRef fc [] (mkType n)) [pexp e, pexp get]
doUpdate fc (n : ns) e get
= PApp fc (PRef fc [] (mkType n))
[pexp (doUpdate fc ns e (PApp fc (PRef fc [] n) [pexp get])),
pexp get]
getAll :: FC -> [Name] -> PTerm -> PTerm
getAll fc [n] e = PApp fc (PRef fc [] n) [pexp e]
getAll fc (n:ns) e = PApp fc (PRef fc [] n) [pexp (getAll fc ns e)]
-- | Creates setters for record types on necessary functions
mkType :: Name -> Name
mkType (UN n) = sUN ("set_" ++ str n)
mkType (MN 0 n) = sMN 0 ("set_" ++ str n)
mkType (NS n s) = NS (mkType n) s
{- | Parses a type signature
@
TypeSig ::=
':' Expr
;
@
@
TypeExpr ::= ConstraintList? Expr;
@
-}
typeExpr :: SyntaxInfo -> IdrisParser PTerm
typeExpr syn = do cs <- if implicitAllowed syn then constraintList syn else return []
sc <- expr (allowConstr syn)
return (bindList (\r -> PPi (constraint { pcount = r })) cs sc)
<?> "type signature"
{- | Parses a lambda expression
@
Lambda ::=
'\\' TypeOptDeclList LambdaTail
| '\\' SimpleExprList LambdaTail
;
@
@
SimpleExprList ::=
SimpleExpr
| SimpleExpr ',' SimpleExprList
;
@
@
LambdaTail ::=
Impossible
| '=>' Expr
@
-}
lambda :: SyntaxInfo -> IdrisParser PTerm
lambda syn = do lchar '\\' <?> "lambda expression"
((do xt <- try $ tyOptDeclList (disallowImp syn)
fc <- getFC
sc <- lambdaTail
return (bindList (\r -> PLam fc) xt sc))
<|>
(do ps <- sepBy (do fc <- getFC
e <- simpleExpr (disallowImp (syn { inPattern = True }))
return (fc, e))
(lchar ',')
sc <- lambdaTail
return (pmList (zip [0..] ps) sc)))
<?> "lambda expression"
where pmList :: [(Int, (FC, PTerm))] -> PTerm -> PTerm
pmList [] sc = sc
pmList ((i, (fc, x)) : xs) sc
= PLam fc (sMN i "lamp") NoFC Placeholder
(PCase fc (PRef fc [] (sMN i "lamp"))
[(x, pmList xs sc)])
lambdaTail :: IdrisParser PTerm
lambdaTail = impossible <|> symbol "=>" *> expr syn
{- | Parses a term rewrite expression
@
RewriteTerm ::=
'rewrite' Expr ('==>' Expr)? 'in' Expr
;
@
-}
rewriteTerm :: SyntaxInfo -> IdrisParser PTerm
rewriteTerm syn = do kw <- reservedFC "rewrite"
fc <- getFC
prf <- expr syn
giving <- optional (do symbol "==>"; expr' syn)
using <- optional (do reserved "using"
(n, _) <- name
return n)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword
highlightP kw' AnnKeyword
return (PRewrite fc using prf sc giving)
<?> "term rewrite expression"
{- |Parses a let binding
@
Let ::=
'let' Name TypeSig'? '=' Expr 'in' Expr
| 'let' Expr' '=' Expr' 'in' Expr
TypeSig' ::=
':' Expr'
;
@
-}
let_ :: SyntaxInfo -> IdrisParser PTerm
let_ syn = try (do kw <- reservedFC "let"
ls <- indentedBlock (let_binding syn)
kw' <- reservedFC "in"; sc <- expr syn
highlightP kw AnnKeyword; highlightP kw' AnnKeyword
return (buildLets ls sc))
<?> "let binding"
where buildLets [] sc = sc
buildLets ((fc, PRef nfc _ n, ty, v, []) : ls) sc
= PLet fc n nfc ty v (buildLets ls sc)
buildLets ((fc, pat, ty, v, alts) : ls) sc
= PCase fc v ((pat, buildLets ls sc) : alts)
let_binding syn = do fc <- getFC;
pat <- expr' (syn { inPattern = True })
ty <- option Placeholder (do lchar ':'; expr' syn)
lchar '='
v <- expr (syn { withAppAllowed = isVar pat })
ts <- option [] (do lchar '|'
sepBy1 (do_alt syn) (lchar '|'))
return (fc,pat,ty,v,ts)
where isVar (PRef _ _ _) = True
isVar _ = False
{- | Parses a conditional expression
@
If ::= 'if' Expr 'then' Expr 'else' Expr
@
-}
if_ :: SyntaxInfo -> IdrisParser PTerm
if_ syn = (do ifFC <- reservedFC "if"
fc <- getFC
c <- expr syn
thenFC <- reservedFC "then"
t <- expr syn
elseFC <- reservedFC "else"
f <- expr syn
mapM_ (flip highlightP AnnKeyword) [ifFC, thenFC, elseFC]
return (PIfThenElse fc c t f))
<?> "conditional expression"
{- | Parses a quote goal
@
QuoteGoal ::=
'quoteGoal' Name 'by' Expr 'in' Expr
;
@
-}
quoteGoal :: SyntaxInfo -> IdrisParser PTerm
quoteGoal syn = do kw1 <- reservedFC "quoteGoal"; n <- fst <$> name;
kw2 <- reservedFC "by"
r <- expr syn
kw3 <- reservedFC "in"
fc <- getFC
sc <- expr syn
mapM_ (flip highlightP AnnKeyword) [kw1, kw2, kw3]
return (PGoal fc r n sc)
<?> "quote goal expression"
{- | Parses a dependent type signature
@
Pi ::= PiOpts Static? Pi'
@
@
Pi' ::=
OpExpr ('->' Pi)?
| '(' TypeDeclList ')' '->' Pi
| '{' TypeDeclList '}' '->' Pi
| '{' 'auto' TypeDeclList '}' '->' Pi
| '{' 'default' SimpleExpr TypeDeclList '}' '->' Pi
;
@
-}
bindsymbol opts st syn
= do symbol "->"
return (Exp opts st False RigW)
explicitPi opts st syn
= do xt <- try (lchar '(' *> typeDeclList syn <* lchar ')')
binder <- bindsymbol opts st syn
sc <- expr (allowConstr syn)
return (bindList (\r -> PPi (binder { pcount = r })) xt sc)
autoImplicit opts st syn
= do kw <- reservedFC "auto"
when (st == Static) $ fail "auto implicits can not be static"
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr (allowConstr syn)
highlightP kw AnnKeyword
return (bindList (\r -> PPi
(TacImp [] Dynamic (PTactics [ProofSearch True True 100 Nothing [] []]) r)) xt sc)
defaultImplicit opts st syn = do
kw <- reservedFC "default"
when (st == Static) $ fail "default implicits can not be static"
ist <- get
script' <- simpleExpr syn
let script = debindApp syn . desugar syn ist $ script'
xt <- typeDeclList syn
lchar '}'
symbol "->"
sc <- expr (allowConstr syn)
highlightP kw AnnKeyword
return (bindList (\r -> PPi (TacImp [] Dynamic script r)) xt sc)
normalImplicit opts st syn = do
xt <- typeDeclList syn <* lchar '}'
symbol "->"
cs <- constraintList syn
sc <- expr syn
let (im,cl)
= if implicitAllowed syn
then (Imp opts st False (Just (Impl False True False)) True RigW,
constraint)
else (Imp opts st False (Just (Impl False False False)) True RigW,
Imp opts st False (Just (Impl True False False)) True RigW)
return (bindList (\r -> PPi (im { pcount = r })) xt
(bindList (\r -> PPi (cl { pcount = r })) cs sc))
constraintPi opts st syn =
do cs <- constraintList1 syn
sc <- expr syn
if implicitAllowed syn
then return (bindList (\r -> PPi constraint { pcount = r }) cs sc)
else return (bindList (\r -> PPi (Imp opts st False (Just (Impl True False False)) True r))
cs sc)
implicitPi opts st syn =
autoImplicit opts st syn
<|> defaultImplicit opts st syn
<|> normalImplicit opts st syn
unboundPi opts st syn = do
x <- opExpr syn
(do binder <- bindsymbol opts st syn
sc <- expr syn
return (PPi binder (sUN "__pi_arg") NoFC x sc))
<|> return x
-- This is used when we need to disambiguate from a constraint list
unboundPiNoConstraint opts st syn = do
x <- opExpr syn
(do binder <- bindsymbol opts st syn
sc <- expr syn
notFollowedBy $ reservedOp "=>"
return (PPi binder (sUN "__pi_arg") NoFC x sc))
<|> do notFollowedBy $ reservedOp "=>"
return x
pi :: SyntaxInfo -> IdrisParser PTerm
pi syn =
do opts <- piOpts syn
st <- static
explicitPi opts st syn
<|> try (do lchar '{'; implicitPi opts st syn)
<|> if constraintAllowed syn
then try (unboundPiNoConstraint opts st syn)
<|> constraintPi opts st syn
else unboundPi opts st syn
<?> "dependent type signature"
{- | Parses Possible Options for Pi Expressions
@
PiOpts ::= '.'?
@
-}
piOpts :: SyntaxInfo -> IdrisParser [ArgOpt]
piOpts syn | implicitAllowed syn =
lchar '.' *> return [InaccessibleArg]
<|> return []
piOpts syn = return []
{- | Parses a type constraint list
@
ConstraintList ::=
'(' Expr_List ')' '=>'
| Expr '=>'
;
@
-}
constraintList :: SyntaxInfo -> IdrisParser [(RigCount, Name, FC, PTerm)]
constraintList syn = try (constraintList1 syn)
<|> return []
constraintList1 :: SyntaxInfo -> IdrisParser [(RigCount, Name, FC, PTerm)]
constraintList1 syn = try (do lchar '('
tys <- sepBy1 nexpr (lchar ',')
lchar ')'
reservedOp "=>"
return tys)
<|> try (do t <- opExpr (disallowImp syn)
reservedOp "=>"
return [(RigW, defname, NoFC, t)])
<?> "type constraint list"
where nexpr = try (do (n, fc) <- name; lchar ':'
e <- expr (disallowImp syn)
return (RigW, n, fc, e))
<|> do e <- expr (disallowImp syn)
return (RigW, defname, NoFC, e)
defname = sMN 0 "constraint"
{- | Parses a type declaration list
@
TypeDeclList ::=
FunctionSignatureList
| NameList TypeSig
;
@
@
FunctionSignatureList ::=
Name TypeSig
| Name TypeSig ',' FunctionSignatureList
;
@
-}
typeDeclList :: SyntaxInfo -> IdrisParser [(RigCount, Name, FC, PTerm)]
typeDeclList syn = try (sepBy1 (do rig <- option RigW rigCount
(x, xfc) <- fnName
lchar ':'
t <- typeExpr (disallowImp syn)
return (rig, x, xfc, t))
(lchar ','))
<|> do ns <- sepBy1 name (lchar ',')
lchar ':'
t <- typeExpr (disallowImp syn)
return (map (\(x, xfc) -> (RigW, x, xfc, t)) ns)
<?> "type declaration list"
where
rigCount = do lchar '1'; return Rig1
<|> do lchar '0'; return Rig0
{- | Parses a type declaration list with optional parameters
@
TypeOptDeclList ::=
NameOrPlaceholder TypeSig?
| NameOrPlaceholder TypeSig? ',' TypeOptDeclList
;
@
@
NameOrPlaceHolder ::= Name | '_';
@
-}
tyOptDeclList :: SyntaxInfo -> IdrisParser [(RigCount, Name, FC, PTerm)]
tyOptDeclList syn = sepBy1 (do (x, fc) <- nameOrPlaceholder
t <- option Placeholder (do lchar ':'
expr syn)
return (RigW, x, fc, t))
(lchar ',')
<?> "type declaration list"
where nameOrPlaceholder :: IdrisParser (Name, FC)
nameOrPlaceholder = fnName
<|> do symbol "_"
return (sMN 0 "underscore", NoFC)
<?> "name or placeholder"
{- | Parses a list literal expression e.g. [1,2,3] or a comprehension [ (x, y) | x <- xs , y <- ys ]
@
ListExpr ::=
'[' ']'
| '[' Expr '|' DoList ']'
| '[' ExprList ']'
;
@
@
DoList ::=
Do
| Do ',' DoList
;
@
@
ExprList ::=
Expr
| Expr ',' ExprList
;
@
-}
listExpr :: SyntaxInfo -> IdrisParser PTerm
listExpr syn = do (FC f (l, c) _) <- getFC
lchar '['; fc <- getFC;
(try . token $ do (char ']' <?> "end of list expression")
(FC _ _ (l', c')) <- getFC
return (mkNil (FC f (l, c) (l', c'))))
<|> (do x <- expr (syn { withAppAllowed = False }) <?> "expression"
(do try (lchar '|') <?> "list comprehension"
qs <- sepBy1 (do_ syn) (lchar ',')
lchar ']'
return (PDoBlock (map addGuard qs ++
[DoExp fc (PApp fc (PRef fc [] (sUN "pure"))
[pexp x])]))) <|>
(do xs <- many (do (FC fn (sl, sc) _) <- getFC
lchar ',' <?> "list element"
let commaFC = FC fn (sl, sc) (sl, sc + 1)
elt <- expr syn
return (elt, commaFC))
(FC fn (sl, sc) _) <- getFC
lchar ']' <?> "end of list expression"
let rbrackFC = FC fn (sl, sc) (sl, sc+1)
return (mkList fc rbrackFC ((x, (FC f (l, c) (l, c+1))) : xs))))
<?> "list expression"
where
mkNil :: FC -> PTerm
mkNil fc = PRef fc [fc] (sUN "Nil")
mkList :: FC -> FC -> [(PTerm, FC)] -> PTerm
mkList errFC nilFC [] = PRef nilFC [nilFC] (sUN "Nil")
mkList errFC nilFC ((x, fc) : xs) = PApp errFC (PRef fc [fc] (sUN "::")) [pexp x, pexp (mkList errFC nilFC xs)]
addGuard :: PDo -> PDo
addGuard (DoExp fc e) = DoExp fc (PApp fc (PRef fc [] (sUN "guard"))
[pexp e])
addGuard x = x
{- | Parses a do-block
@
Do' ::= Do KeepTerminator;
@
@
DoBlock ::=
'do' OpenBlock Do'+ CloseBlock
;
@
-}
doBlock :: SyntaxInfo -> IdrisParser PTerm
doBlock syn
= do kw <- reservedFC "do"
ds <- indentedBlock1 (do_ syn)
highlightP kw AnnKeyword
return (PDoBlock ds)
<?> "do block"
{- | Parses an expression inside a do block
@
Do ::=
'let' Name TypeSig'? '=' Expr
| 'let' Expr' '=' Expr
| Name '<-' Expr
| Expr' '<-' Expr
| Expr
;
@
-}
do_ :: SyntaxInfo -> IdrisParser PDo
do_ syn
= try (do kw <- reservedFC "let"
(i, ifc) <- name
ty <- option Placeholder (do lchar ':'
expr' syn)
reservedOp "="
fc <- getFC
e <- expr syn
highlightP kw AnnKeyword
return (DoLet fc i ifc ty e))
<|> try (do kw <- reservedFC "let"
i <- expr' syn
reservedOp "="
fc <- getFC
sc <- expr syn
highlightP kw AnnKeyword
return (DoLetP fc i sc))
<|> try (do (i, ifc) <- name
symbol "<-"
fc <- getFC
e <- expr (syn { withAppAllowed = False });
option (DoBind fc i ifc e)
(do lchar '|'
ts <- sepBy1 (do_alt (syn { withAppAllowed = False })) (lchar '|')
return (DoBindP fc (PRef ifc [ifc] i) e ts)))
<|> try (do i <- expr' syn
symbol "<-"
fc <- getFC
e <- expr (syn { withAppAllowed = False });
option (DoBindP fc i e [])
(do lchar '|'
ts <- sepBy1 (do_alt (syn { withAppAllowed = False })) (lchar '|')
return (DoBindP fc i e ts)))
<|> do e <- expr syn
fc <- getFC
return (DoExp fc e)
<?> "do block expression"
do_alt syn = do l <- expr' syn
option (Placeholder, l)
(do symbol "=>"
r <- expr' syn
return (l, r))
{- | Parses an expression in idiom brackets
@
Idiom ::= '[|' Expr '|]';
@
-}
idiom :: SyntaxInfo -> IdrisParser PTerm
idiom syn
= do symbol "[|"
fc <- getFC
e <- expr (syn { withAppAllowed = False })
symbol "|]"
return (PIdiom fc e)
<?> "expression in idiom brackets"
{- |Parses a constant or literal expression
@
Constant ::=
'Integer'
| 'Int'
| 'Char'
| 'Double'
| 'String'
| 'Bits8'
| 'Bits16'
| 'Bits32'
| 'Bits64'
| Float_t
| Natural_t
| VerbatimString_t
| String_t
| Char_t
;
@
-}
constants :: [(String, Idris.Core.TT.Const)]
constants =
[ ("Integer", AType (ATInt ITBig))
, ("Int", AType (ATInt ITNative))
, ("Char", AType (ATInt ITChar))
, ("Double", AType ATFloat)
, ("String", StrType)
, ("prim__WorldType", WorldType)
, ("prim__TheWorld", TheWorld)
, ("Bits8", AType (ATInt (ITFixed IT8)))
, ("Bits16", AType (ATInt (ITFixed IT16)))
, ("Bits32", AType (ATInt (ITFixed IT32)))
, ("Bits64", AType (ATInt (ITFixed IT64)))
]
-- | Parse a constant and its source span
constant :: IdrisParser (Idris.Core.TT.Const, FC)
constant = choice [ do fc <- reservedFC name; return (ty, fc)
| (name, ty) <- constants
]
<|> do (f, fc) <- try float; return (Fl f, fc)
<|> do (i, fc) <- natural; return (BI i, fc)
<|> do (s, fc) <- verbatimStringLiteral; return (Str s, fc)
<|> do (s, fc) <- stringLiteral; return (Str s, fc)
<|> do (c, fc) <- try charLiteral; return (Ch c, fc) --Currently ambigous with symbols
<?> "constant or literal"
{- | Parses a verbatim multi-line string literal (triple-quoted)
@
VerbatimString_t ::=
'\"\"\"' ~'\"\"\"' '\"\"\"'
;
@
-}
verbatimStringLiteral :: MonadicParsing m => m (String, FC)
verbatimStringLiteral = token $ do (FC f start _) <- getFC
try $ string "\"\"\""
str <- manyTill anyChar $ try (string "\"\"\"")
(FC _ _ end) <- getFC
return (str, FC f start end)
{- | Parses a static modifier
@
Static ::=
'%static'
;
@
-}
static :: IdrisParser Static
static = do reserved "%static"; return Static
<|> do reserved "[static]"
fc <- getFC
parserWarning fc Nothing (Msg "The use of [static] is deprecated, use %static instead.")
return Static
<|> return Dynamic
<?> "static modifier"
{- | Parses a tactic script
@
Tactic ::= 'intro' NameList?
| 'intros'
| 'refine' Name Imp+
| 'mrefine' Name
| 'rewrite' Expr
| 'induction' Expr
| 'equiv' Expr
| 'let' Name ':' Expr' '=' Expr
| 'let' Name '=' Expr
| 'focus' Name
| 'exact' Expr
| 'applyTactic' Expr
| 'reflect' Expr
| 'fill' Expr
| 'try' Tactic '|' Tactic
| '{' TacticSeq '}'
| 'compute'
| 'trivial'
| 'solve'
| 'attack'
| 'state'
| 'term'
| 'undo'
| 'qed'
| 'abandon'
| ':' 'q'
;
Imp ::= '?' | '_';
TacticSeq ::=
Tactic ';' Tactic
| Tactic ';' TacticSeq
;
@
-}
-- | A specification of the arguments that tactics can take
data TacticArg = NameTArg -- ^ Names: n1, n2, n3, ... n
| ExprTArg
| AltsTArg
| StringLitTArg
-- The FIXMEs are Issue #1766 in the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1766
-- | A list of available tactics and their argument requirements
tactics :: [([String], Maybe TacticArg, SyntaxInfo -> IdrisParser PTactic)]
tactics =
[ (["intro"], Nothing, const $ -- FIXME syntax for intro (fresh name)
do ns <- sepBy (spaced (fst <$> name)) (lchar ','); return $ Intro ns)
, noArgs ["intros"] Intros
, noArgs ["unfocus"] Unfocus
, (["refine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
imps <- many imp
return $ Refine n imps)
, (["claim"], Nothing, \syn ->
do n <- indentPropHolds gtProp *> (fst <$> name)
goal <- indentPropHolds gtProp *> expr syn
return $ Claim n goal)
, (["mrefine"], Just ExprTArg, const $
do n <- spaced (fst <$> fnName)
return $ MatchRefine n)
, expressionTactic ["rewrite"] Rewrite
, expressionTactic ["case"] CaseTac
, expressionTactic ["induction"] Induction
, expressionTactic ["equiv"] Equiv
, (["let"], Nothing, \syn -> -- FIXME syntax for let
do n <- (indentPropHolds gtProp *> (fst <$> name))
(do indentPropHolds gtProp *> lchar ':'
ty <- indentPropHolds gtProp *> expr' syn
indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTacTy n (desugar syn i ty) (desugar syn i t))
<|> (do indentPropHolds gtProp *> lchar '='
t <- indentPropHolds gtProp *> expr syn
i <- get
return $ LetTac n (desugar syn i t)))
, (["focus"], Just ExprTArg, const $
do n <- spaced (fst <$> name)
return $ Focus n)
, expressionTactic ["exact"] Exact
, expressionTactic ["applyTactic"] ApplyTactic
, expressionTactic ["byReflection"] ByReflection
, expressionTactic ["reflect"] Reflect
, expressionTactic ["fill"] Fill
, (["try"], Just AltsTArg, \syn ->
do t <- spaced (tactic syn)
lchar '|'
t1 <- spaced (tactic syn)
return $ Try t t1)
, noArgs ["compute"] Compute
, noArgs ["trivial"] Trivial
, noArgs ["unify"] DoUnify
, (["search"], Nothing, const $
do depth <- option 10 $ fst <$> natural
return (ProofSearch True True (fromInteger depth) Nothing [] []))
, noArgs ["implementation"] TCImplementation
, noArgs ["solve"] Solve
, noArgs ["attack"] Attack
, noArgs ["state", ":state"] ProofState
, noArgs ["term", ":term"] ProofTerm
, noArgs ["undo", ":undo"] Undo
, noArgs ["qed", ":qed"] Qed
, noArgs ["abandon", ":q"] Abandon
, noArgs ["skip"] Skip
, noArgs ["sourceLocation"] SourceFC
, expressionTactic [":e", ":eval"] TEval
, expressionTactic [":t", ":type"] TCheck
, expressionTactic [":search"] TSearch
, (["fail"], Just StringLitTArg, const $
do msg <- fst <$> stringLiteral
return $ TFail [Idris.Core.TT.TextPart msg])
, ([":doc"], Just ExprTArg, const $
do whiteSpace
doc <- (Right . fst <$> constant) <|> (Left . fst <$> fnName)
eof
return (TDocStr doc))
]
where
expressionTactic names tactic = (names, Just ExprTArg, \syn ->
do t <- spaced (expr syn)
i <- get
return $ tactic (desugar syn i t))
noArgs names tactic = (names, Nothing, const (return tactic))
spaced parser = indentPropHolds gtProp *> parser
imp :: IdrisParser Bool
imp = do lchar '?'; return False
<|> do lchar '_'; return True
tactic :: SyntaxInfo -> IdrisParser PTactic
tactic syn = choice [ do choice (map reserved names); parser syn
| (names, _, parser) <- tactics ]
<|> do lchar '{'
t <- tactic syn;
lchar ';';
ts <- sepBy1 (tactic syn) (lchar ';')
lchar '}'
return $ TSeq t (mergeSeq ts)
<|> ((lchar ':' >> empty) <?> "prover command")
<?> "tactic"
where
mergeSeq :: [PTactic] -> PTactic
mergeSeq [t] = t
mergeSeq (t:ts) = TSeq t (mergeSeq ts)
-- | Parses a tactic as a whole
fullTactic :: SyntaxInfo -> IdrisParser PTactic
fullTactic syn = do t <- tactic syn
eof
return t
| jmitchell/Idris-dev | src/Idris/Parser/Expr.hs | bsd-3-clause | 57,438 | 1 | 38 | 20,942 | 17,600 | 8,679 | 8,921 | 1,020 | 51 |
{-|
Module : Idris.IBC
Description : Core representations and code to generate IBC files.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.IBC (loadIBC, loadPkgIndex,
writeIBC, writePkgIndex,
hasValidIBCVersion, IBCPhase(..)) where
import Idris.AbsSyntax
import Idris.Core.Binary
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.DeepSeq
import Idris.Delaborate
import Idris.Docstrings (Docstring)
import qualified Idris.Docstrings as D
import Idris.Error
import Idris.Imports
import Idris.Output
import IRTS.System (getIdrisLibDir)
import Paths_idris
import qualified Cheapskate.Types as CT
import Codec.Archive.Zip
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict hiding (get, put)
import qualified Control.Monad.State.Strict as ST
import Data.Binary
import Data.ByteString.Lazy as B hiding (elem, length, map)
import Data.Functor
import Data.List as L
import Data.Maybe (catMaybes)
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Vector.Binary
import Debug.Trace
import System.Directory
import System.FilePath
ibcVersion :: Word16
ibcVersion = 160
-- | When IBC is being loaded - we'll load different things (and omit
-- different structures/definitions) depending on which phase we're in.
data IBCPhase = IBC_Building -- ^ when building the module tree
| IBC_REPL Bool -- ^ when loading modules for the REPL Bool = True for top level module
deriving (Show, Eq)
data IBCFile = IBCFile {
ver :: Word16
, sourcefile :: FilePath
, ibc_reachablenames :: ![Name]
, ibc_imports :: ![(Bool, FilePath)]
, ibc_importdirs :: ![FilePath]
, ibc_sourcedirs :: ![FilePath]
, ibc_implicits :: ![(Name, [PArg])]
, ibc_fixes :: ![FixDecl]
, ibc_statics :: ![(Name, [Bool])]
, ibc_interfaces :: ![(Name, InterfaceInfo)]
, ibc_records :: ![(Name, RecordInfo)]
, ibc_implementations :: ![(Bool, Bool, Name, Name)]
, ibc_dsls :: ![(Name, DSL)]
, ibc_datatypes :: ![(Name, TypeInfo)]
, ibc_optimise :: ![(Name, OptInfo)]
, ibc_syntax :: ![Syntax]
, ibc_keywords :: ![String]
, ibc_objs :: ![(Codegen, FilePath)]
, ibc_libs :: ![(Codegen, String)]
, ibc_cgflags :: ![(Codegen, String)]
, ibc_dynamic_libs :: ![String]
, ibc_hdrs :: ![(Codegen, String)]
, ibc_totcheckfail :: ![(FC, String)]
, ibc_flags :: ![(Name, [FnOpt])]
, ibc_fninfo :: ![(Name, FnInfo)]
, ibc_cg :: ![(Name, CGInfo)]
, ibc_docstrings :: ![(Name, (Docstring D.DocTerm, [(Name, Docstring D.DocTerm)]))]
, ibc_moduledocs :: ![(Name, Docstring D.DocTerm)]
, ibc_transforms :: ![(Name, (Term, Term))]
, ibc_errRev :: ![(Term, Term)]
, ibc_coercions :: ![Name]
, ibc_lineapps :: ![(FilePath, Int, PTerm)]
, ibc_namehints :: ![(Name, Name)]
, ibc_metainformation :: ![(Name, MetaInformation)]
, ibc_errorhandlers :: ![Name]
, ibc_function_errorhandlers :: ![(Name, Name, Name)] -- fn, arg, handler
, ibc_metavars :: ![(Name, (Maybe Name, Int, [Name], Bool, Bool))]
, ibc_patdefs :: ![(Name, ([([(Name, Term)], Term, Term)], [PTerm]))]
, ibc_postulates :: ![Name]
, ibc_externs :: ![(Name, Int)]
, ibc_parsedSpan :: !(Maybe FC)
, ibc_usage :: ![(Name, Int)]
, ibc_exports :: ![Name]
, ibc_autohints :: ![(Name, Name)]
, ibc_deprecated :: ![(Name, String)]
, ibc_defs :: ![(Name, Def)]
, ibc_total :: ![(Name, Totality)]
, ibc_injective :: ![(Name, Injectivity)]
, ibc_access :: ![(Name, Accessibility)]
, ibc_fragile :: ![(Name, String)]
, ibc_constraints :: ![(FC, UConstraint)]
}
deriving Show
{-!
deriving instance Binary IBCFile
!-}
initIBC :: IBCFile
initIBC = IBCFile ibcVersion "" [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] Nothing [] [] [] [] [] [] [] [] [] []
hasValidIBCVersion :: FilePath -> Idris Bool
hasValidIBCVersion fp = do
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> return False
Right archive -> do ver <- getEntry 0 "ver" archive
return (ver == ibcVersion)
loadIBC :: Bool -- ^ True = reexport, False = make everything private
-> IBCPhase
-> FilePath -> Idris ()
loadIBC reexport phase fp
= do imps <- getImported
case lookup fp imps of
Nothing -> load True
Just p -> if (not p && reexport) then load False else return ()
where
load fullLoad = do
logIBC 1 $ "Loading ibc " ++ fp ++ " " ++ show reexport
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> do
ifail $ fp ++ " isn't loadable, it may have an old ibc format.\n"
++ "Please clean and rebuild it."
Right archive -> do
if fullLoad
then process reexport phase archive fp
else unhide phase archive
addImported reexport fp
-- | Load an entire package from its index file
loadPkgIndex :: String -> Idris ()
loadPkgIndex pkg = do ddir <- runIO getIdrisLibDir
addImportDir (ddir </> pkg)
fp <- findPkgIndex pkg
loadIBC True IBC_Building fp
makeEntry :: (Binary b) => String -> [b] -> Maybe Entry
makeEntry name val = if L.null val
then Nothing
else Just $ toEntry name 0 (encode val)
entries :: IBCFile -> [Entry]
entries i = catMaybes [Just $ toEntry "ver" 0 (encode $ ver i),
makeEntry "sourcefile" (sourcefile i),
makeEntry "ibc_imports" (ibc_imports i),
makeEntry "ibc_importdirs" (ibc_importdirs i),
makeEntry "ibc_sourcedirs" (ibc_sourcedirs i),
makeEntry "ibc_implicits" (ibc_implicits i),
makeEntry "ibc_fixes" (ibc_fixes i),
makeEntry "ibc_statics" (ibc_statics i),
makeEntry "ibc_interfaces" (ibc_interfaces i),
makeEntry "ibc_records" (ibc_records i),
makeEntry "ibc_implementations" (ibc_implementations i),
makeEntry "ibc_dsls" (ibc_dsls i),
makeEntry "ibc_datatypes" (ibc_datatypes i),
makeEntry "ibc_optimise" (ibc_optimise i),
makeEntry "ibc_syntax" (ibc_syntax i),
makeEntry "ibc_keywords" (ibc_keywords i),
makeEntry "ibc_objs" (ibc_objs i),
makeEntry "ibc_libs" (ibc_libs i),
makeEntry "ibc_cgflags" (ibc_cgflags i),
makeEntry "ibc_dynamic_libs" (ibc_dynamic_libs i),
makeEntry "ibc_hdrs" (ibc_hdrs i),
makeEntry "ibc_totcheckfail" (ibc_totcheckfail i),
makeEntry "ibc_flags" (ibc_flags i),
makeEntry "ibc_fninfo" (ibc_fninfo i),
makeEntry "ibc_cg" (ibc_cg i),
makeEntry "ibc_docstrings" (ibc_docstrings i),
makeEntry "ibc_moduledocs" (ibc_moduledocs i),
makeEntry "ibc_transforms" (ibc_transforms i),
makeEntry "ibc_errRev" (ibc_errRev i),
makeEntry "ibc_coercions" (ibc_coercions i),
makeEntry "ibc_lineapps" (ibc_lineapps i),
makeEntry "ibc_namehints" (ibc_namehints i),
makeEntry "ibc_metainformation" (ibc_metainformation i),
makeEntry "ibc_errorhandlers" (ibc_errorhandlers i),
makeEntry "ibc_function_errorhandlers" (ibc_function_errorhandlers i),
makeEntry "ibc_metavars" (ibc_metavars i),
makeEntry "ibc_patdefs" (ibc_patdefs i),
makeEntry "ibc_postulates" (ibc_postulates i),
makeEntry "ibc_externs" (ibc_externs i),
toEntry "ibc_parsedSpan" 0 . encode <$> ibc_parsedSpan i,
makeEntry "ibc_usage" (ibc_usage i),
makeEntry "ibc_exports" (ibc_exports i),
makeEntry "ibc_autohints" (ibc_autohints i),
makeEntry "ibc_deprecated" (ibc_deprecated i),
makeEntry "ibc_defs" (ibc_defs i),
makeEntry "ibc_total" (ibc_total i),
makeEntry "ibc_injective" (ibc_injective i),
makeEntry "ibc_access" (ibc_access i),
makeEntry "ibc_fragile" (ibc_fragile i)]
-- TODO: Put this back in shortly after minimising/pruning constraints
-- makeEntry "ibc_constraints" (ibc_constraints i)]
writeArchive :: FilePath -> IBCFile -> Idris ()
writeArchive fp i = do let a = L.foldl (\x y -> addEntryToArchive y x) emptyArchive (entries i)
runIO $ B.writeFile fp (fromArchive a)
writeIBC :: FilePath -> FilePath -> Idris ()
writeIBC src f
= do
logIBC 1 $ "Writing IBC for: " ++ show f
iReport 2 $ "Writing IBC for: " ++ show f
i <- getIState
-- case (Data.List.map fst (idris_metavars i)) \\ primDefs of
-- (_:_) -> ifail "Can't write ibc when there are unsolved metavariables"
-- [] -> return ()
resetNameIdx
ibcf <- mkIBC (ibc_write i) (initIBC { sourcefile = src })
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
-- | Write a package index containing all the imports in the current
-- IState Used for ':search' of an entire package, to ensure
-- everything is loaded.
writePkgIndex :: FilePath -> Idris ()
writePkgIndex f
= do i <- getIState
let imps = map (\ (x, y) -> (True, x)) $ idris_imported i
logIBC 1 $ "Writing package index " ++ show f ++ " including\n" ++
show (map snd imps)
resetNameIdx
let ibcf = initIBC { ibc_imports = imps }
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
mkIBC :: [IBCWrite] -> IBCFile -> Idris IBCFile
mkIBC [] f = return f
mkIBC (i:is) f = do ist <- getIState
logIBC 5 $ show i ++ " " ++ show (L.length is)
f' <- ibc ist i f
mkIBC is f'
ibc :: IState -> IBCWrite -> IBCFile -> Idris IBCFile
ibc i (IBCFix d) f = return f { ibc_fixes = d : ibc_fixes f }
ibc i (IBCImp n) f = case lookupCtxtExact n (idris_implicits i) of
Just v -> return f { ibc_implicits = (n,v): ibc_implicits f }
_ -> ifail "IBC write failed"
ibc i (IBCStatic n) f
= case lookupCtxtExact n (idris_statics i) of
Just v -> return f { ibc_statics = (n,v): ibc_statics f }
_ -> ifail "IBC write failed"
ibc i (IBCInterface n) f
= case lookupCtxtExact n (idris_interfaces i) of
Just v -> return f { ibc_interfaces = (n,v): ibc_interfaces f }
_ -> ifail "IBC write failed"
ibc i (IBCRecord n) f
= case lookupCtxtExact n (idris_records i) of
Just v -> return f { ibc_records = (n,v): ibc_records f }
_ -> ifail "IBC write failed"
ibc i (IBCImplementation int res n ins) f
= return f { ibc_implementations = (int, res, n, ins) : ibc_implementations f }
ibc i (IBCDSL n) f
= case lookupCtxtExact n (idris_dsls i) of
Just v -> return f { ibc_dsls = (n,v): ibc_dsls f }
_ -> ifail "IBC write failed"
ibc i (IBCData n) f
= case lookupCtxtExact n (idris_datatypes i) of
Just v -> return f { ibc_datatypes = (n,v): ibc_datatypes f }
_ -> ifail "IBC write failed"
ibc i (IBCOpt n) f = case lookupCtxtExact n (idris_optimisation i) of
Just v -> return f { ibc_optimise = (n,v): ibc_optimise f }
_ -> ifail "IBC write failed"
ibc i (IBCSyntax n) f = return f { ibc_syntax = n : ibc_syntax f }
ibc i (IBCKeyword n) f = return f { ibc_keywords = n : ibc_keywords f }
ibc i (IBCImport n) f = return f { ibc_imports = n : ibc_imports f }
ibc i (IBCImportDir n) f = return f { ibc_importdirs = n : ibc_importdirs f }
ibc i (IBCSourceDir n) f = return f { ibc_sourcedirs = n : ibc_sourcedirs f }
ibc i (IBCObj tgt n) f = return f { ibc_objs = (tgt, n) : ibc_objs f }
ibc i (IBCLib tgt n) f = return f { ibc_libs = (tgt, n) : ibc_libs f }
ibc i (IBCCGFlag tgt n) f = return f { ibc_cgflags = (tgt, n) : ibc_cgflags f }
ibc i (IBCDyLib n) f = return f {ibc_dynamic_libs = n : ibc_dynamic_libs f }
ibc i (IBCHeader tgt n) f = return f { ibc_hdrs = (tgt, n) : ibc_hdrs f }
ibc i (IBCDef n) f
= do f' <- case lookupDefExact n (tt_ctxt i) of
Just v -> return f { ibc_defs = (n,v) : ibc_defs f }
_ -> ifail "IBC write failed"
case lookupCtxtExact n (idris_patdefs i) of
Just v -> return f' { ibc_patdefs = (n,v) : ibc_patdefs f }
_ -> return f' -- Not a pattern definition
ibc i (IBCDoc n) f = case lookupCtxtExact n (idris_docstrings i) of
Just v -> return f { ibc_docstrings = (n,v) : ibc_docstrings f }
_ -> ifail "IBC write failed"
ibc i (IBCCG n) f = case lookupCtxtExact n (idris_callgraph i) of
Just v -> return f { ibc_cg = (n,v) : ibc_cg f }
_ -> ifail "IBC write failed"
ibc i (IBCCoercion n) f = return f { ibc_coercions = n : ibc_coercions f }
ibc i (IBCAccess n a) f = return f { ibc_access = (n,a) : ibc_access f }
ibc i (IBCFlags n) f
= case lookupCtxtExact n (idris_flags i) of
Just a -> return f { ibc_flags = (n,a): ibc_flags f }
_ -> ifail "IBC write failed"
ibc i (IBCFnInfo n a) f = return f { ibc_fninfo = (n,a) : ibc_fninfo f }
ibc i (IBCTotal n a) f = return f { ibc_total = (n,a) : ibc_total f }
ibc i (IBCInjective n a) f = return f { ibc_injective = (n,a) : ibc_injective f }
ibc i (IBCTrans n t) f = return f { ibc_transforms = (n, t) : ibc_transforms f }
ibc i (IBCErrRev t) f = return f { ibc_errRev = t : ibc_errRev f }
ibc i (IBCLineApp fp l t) f
= return f { ibc_lineapps = (fp,l,t) : ibc_lineapps f }
ibc i (IBCNameHint (n, ty)) f
= return f { ibc_namehints = (n, ty) : ibc_namehints f }
ibc i (IBCMetaInformation n m) f = return f { ibc_metainformation = (n,m) : ibc_metainformation f }
ibc i (IBCErrorHandler n) f = return f { ibc_errorhandlers = n : ibc_errorhandlers f }
ibc i (IBCFunctionErrorHandler fn a n) f =
return f { ibc_function_errorhandlers = (fn, a, n) : ibc_function_errorhandlers f }
ibc i (IBCMetavar n) f =
case lookup n (idris_metavars i) of
Nothing -> return f
Just t -> return f { ibc_metavars = (n, t) : ibc_metavars f }
ibc i (IBCPostulate n) f = return f { ibc_postulates = n : ibc_postulates f }
ibc i (IBCExtern n) f = return f { ibc_externs = n : ibc_externs f }
ibc i (IBCTotCheckErr fc err) f = return f { ibc_totcheckfail = (fc, err) : ibc_totcheckfail f }
ibc i (IBCParsedRegion fc) f = return f { ibc_parsedSpan = Just fc }
ibc i (IBCModDocs n) f = case lookupCtxtExact n (idris_moduledocs i) of
Just v -> return f { ibc_moduledocs = (n,v) : ibc_moduledocs f }
_ -> ifail "IBC write failed"
ibc i (IBCUsage n) f = return f { ibc_usage = n : ibc_usage f }
ibc i (IBCExport n) f = return f { ibc_exports = n : ibc_exports f }
ibc i (IBCAutoHint n h) f = return f { ibc_autohints = (n, h) : ibc_autohints f }
ibc i (IBCDeprecate n r) f = return f { ibc_deprecated = (n, r) : ibc_deprecated f }
ibc i (IBCFragile n r) f = return f { ibc_fragile = (n,r) : ibc_fragile f }
ibc i (IBCConstraint fc u) f = return f { ibc_constraints = (fc, u) : ibc_constraints f }
getEntry :: (Binary b, NFData b) => b -> FilePath -> Archive -> Idris b
getEntry alt f a = case findEntryByPath f a of
Nothing -> return alt
Just e -> return $! (force . decode . fromEntry) e
unhide :: IBCPhase -> Archive -> Idris ()
unhide phase ar = do
processImports True phase ar
processAccess True phase ar
process :: Bool -- ^ Reexporting
-> IBCPhase
-> Archive -> FilePath -> Idris ()
process reexp phase archive fn = do
ver <- getEntry 0 "ver" archive
when (ver /= ibcVersion) $ do
logIBC 1 "ibc out of date"
let e = if ver < ibcVersion
then "an earlier" else "a later"
ldir <- runIO $ getIdrisLibDir
let start = if ldir `L.isPrefixOf` fn
then "This external module"
else "This module"
let end = case L.stripPrefix ldir fn of
Nothing -> "Please clean and rebuild."
Just ploc -> unwords ["Please reinstall:", L.head $ splitDirectories ploc]
ifail $ unlines [ unwords ["Incompatible ibc version for:", show fn]
, unwords [start
, "was built with"
, e
, "version of Idris."]
, end
]
source <- getEntry "" "sourcefile" archive
srcok <- runIO $ doesFileExist source
when srcok $ timestampOlder source fn
processImportDirs archive
processSourceDirs archive
processImports reexp phase archive
processImplicits archive
processInfix archive
processStatics archive
processInterfaces archive
processRecords archive
processImplementations archive
processDSLs archive
processDatatypes archive
processOptimise archive
processSyntax archive
processKeywords archive
processObjectFiles archive
processLibs archive
processCodegenFlags archive
processDynamicLibs archive
processHeaders archive
processPatternDefs archive
processFlags archive
processFnInfo archive
processTotalityCheckError archive
processCallgraph archive
processDocs archive
processModuleDocs archive
processCoercions archive
processTransforms archive
processErrRev archive
processLineApps archive
processNameHints archive
processMetaInformation archive
processErrorHandlers archive
processFunctionErrorHandlers archive
processMetaVars archive
processPostulates archive
processExterns archive
processParsedSpan archive
processUsage archive
processExports archive
processAutoHints archive
processDeprecate archive
processDefs archive
processTotal archive
processInjective archive
processAccess reexp phase archive
processFragile archive
processConstraints archive
timestampOlder :: FilePath -> FilePath -> Idris ()
timestampOlder src ibc = do
srct <- runIO $ getModificationTime src
ibct <- runIO $ getModificationTime ibc
if (srct > ibct)
then ifail $ unlines [ "Module needs reloading:"
, unwords ["\tSRC :", show src]
, unwords ["\tModified at:", show srct]
, unwords ["\tIBC :", show ibc]
, unwords ["\tModified at:", show ibct]
]
else return ()
processPostulates :: Archive -> Idris ()
processPostulates ar = do
ns <- getEntry [] "ibc_postulates" ar
updateIState (\i -> i { idris_postulates = idris_postulates i `S.union` S.fromList ns })
processExterns :: Archive -> Idris ()
processExterns ar = do
ns <- getEntry [] "ibc_externs" ar
updateIState (\i -> i{ idris_externs = idris_externs i `S.union` S.fromList ns })
processParsedSpan :: Archive -> Idris ()
processParsedSpan ar = do
fc <- getEntry Nothing "ibc_parsedSpan" ar
updateIState (\i -> i { idris_parsedSpan = fc })
processUsage :: Archive -> Idris ()
processUsage ar = do
ns <- getEntry [] "ibc_usage" ar
updateIState (\i -> i { idris_erasureUsed = ns ++ idris_erasureUsed i })
processExports :: Archive -> Idris ()
processExports ar = do
ns <- getEntry [] "ibc_exports" ar
updateIState (\i -> i { idris_exports = ns ++ idris_exports i })
processAutoHints :: Archive -> Idris ()
processAutoHints ar = do
ns <- getEntry [] "ibc_autohints" ar
mapM_ (\(n,h) -> addAutoHint n h) ns
processDeprecate :: Archive -> Idris ()
processDeprecate ar = do
ns <- getEntry [] "ibc_deprecated" ar
mapM_ (\(n,reason) -> addDeprecated n reason) ns
processFragile :: Archive -> Idris ()
processFragile ar = do
ns <- getEntry [] "ibc_fragile" ar
mapM_ (\(n,reason) -> addFragile n reason) ns
processConstraints :: Archive -> Idris ()
processConstraints ar = do
cs <- getEntry [] "ibc_constraints" ar
mapM_ (\ (fc, c) -> addConstraints fc (0, [c])) cs
processImportDirs :: Archive -> Idris ()
processImportDirs ar = do
fs <- getEntry [] "ibc_importdirs" ar
mapM_ addImportDir fs
processSourceDirs :: Archive -> Idris ()
processSourceDirs ar = do
fs <- getEntry [] "ibc_sourcedirs" ar
mapM_ addSourceDir fs
processImports :: Bool -> IBCPhase -> Archive -> Idris ()
processImports reexp phase ar = do
fs <- getEntry [] "ibc_imports" ar
mapM_ (\(re, f) -> do
i <- getIState
ibcsd <- valIBCSubDir i
ids <- allImportDirs
fp <- findImport ids ibcsd f
-- if (f `elem` imported i)
-- then logLvl 1 $ "Already read " ++ f
putIState (i { imported = f : imported i })
let phase' = case phase of
IBC_REPL _ -> IBC_REPL False
p -> p
case fp of
LIDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IBC fn src -> loadIBC (reexp && re) phase' fn) fs
processImplicits :: Archive -> Idris ()
processImplicits ar = do
imps <- getEntry [] "ibc_implicits" ar
mapM_ (\ (n, imp) -> do
i <- getIState
case lookupDefAccExact n False (tt_ctxt i) of
Just (n, Hidden) -> return ()
Just (n, Private) -> return ()
_ -> putIState (i { idris_implicits = addDef n imp (idris_implicits i) })) imps
processInfix :: Archive -> Idris ()
processInfix ar = do
f <- getEntry [] "ibc_fixes" ar
updateIState (\i -> i { idris_infixes = sort $ f ++ idris_infixes i })
processStatics :: Archive -> Idris ()
processStatics ar = do
ss <- getEntry [] "ibc_statics" ar
mapM_ (\ (n, s) ->
updateIState (\i -> i { idris_statics = addDef n s (idris_statics i) })) ss
processInterfaces :: Archive -> Idris ()
processInterfaces ar = do
cs <- getEntry [] "ibc_interfaces" ar
mapM_ (\ (n, c) -> do
i <- getIState
-- Don't lose implementations from previous IBCs, which
-- could have loaded in any order
let is = case lookupCtxtExact n (idris_interfaces i) of
Just ci -> interface_implementations ci
_ -> []
let c' = c { interface_implementations = interface_implementations c ++ is }
putIState (i { idris_interfaces = addDef n c' (idris_interfaces i) })) cs
processRecords :: Archive -> Idris ()
processRecords ar = do
rs <- getEntry [] "ibc_records" ar
mapM_ (\ (n, r) ->
updateIState (\i -> i { idris_records = addDef n r (idris_records i) })) rs
processImplementations :: Archive -> Idris ()
processImplementations ar = do
cs <- getEntry [] "ibc_implementations" ar
mapM_ (\ (i, res, n, ins) -> addImplementation i res n ins) cs
processDSLs :: Archive -> Idris ()
processDSLs ar = do
cs <- getEntry [] "ibc_dsls" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_dsls = addDef n c (idris_dsls i) })) cs
processDatatypes :: Archive -> Idris ()
processDatatypes ar = do
cs <- getEntry [] "ibc_datatypes" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_datatypes = addDef n c (idris_datatypes i) })) cs
processOptimise :: Archive -> Idris ()
processOptimise ar = do
cs <- getEntry [] "ibc_optimise" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_optimisation = addDef n c (idris_optimisation i) })) cs
processSyntax :: Archive -> Idris ()
processSyntax ar = do
s <- getEntry [] "ibc_syntax" ar
updateIState (\i -> i { syntax_rules = updateSyntaxRules s (syntax_rules i) })
processKeywords :: Archive -> Idris ()
processKeywords ar = do
k <- getEntry [] "ibc_keywords" ar
updateIState (\i -> i { syntax_keywords = k ++ syntax_keywords i })
processObjectFiles :: Archive -> Idris ()
processObjectFiles ar = do
os <- getEntry [] "ibc_objs" ar
mapM_ (\ (cg, obj) -> do
dirs <- allImportDirs
o <- runIO $ findInPath dirs obj
addObjectFile cg o) os
processLibs :: Archive -> Idris ()
processLibs ar = do
ls <- getEntry [] "ibc_libs" ar
mapM_ (uncurry addLib) ls
processCodegenFlags :: Archive -> Idris ()
processCodegenFlags ar = do
ls <- getEntry [] "ibc_cgflags" ar
mapM_ (uncurry addFlag) ls
processDynamicLibs :: Archive -> Idris ()
processDynamicLibs ar = do
ls <- getEntry [] "ibc_dynamic_libs" ar
res <- mapM (addDyLib . return) ls
mapM_ checkLoad res
where
checkLoad (Left _) = return ()
checkLoad (Right err) = ifail err
processHeaders :: Archive -> Idris ()
processHeaders ar = do
hs <- getEntry [] "ibc_hdrs" ar
mapM_ (uncurry addHdr) hs
processPatternDefs :: Archive -> Idris ()
processPatternDefs ar = do
ds <- getEntry [] "ibc_patdefs" ar
mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_patdefs = addDef n (force d) (idris_patdefs i) })) ds
processDefs :: Archive -> Idris ()
processDefs ar = do
ds <- getEntry [] "ibc_defs" ar
mapM_ (\ (n, d) -> do
d' <- updateDef d
case d' of
TyDecl _ _ -> return ()
_ -> do
logIBC 1 $ "SOLVING " ++ show n
solveDeferred emptyFC n
updateIState (\i -> i { tt_ctxt = addCtxtDef n d' (tt_ctxt i) })) ds
where
updateDef (CaseOp c t args o s cd) = do
o' <- mapM updateOrig o
cd' <- updateCD cd
return $ CaseOp c t args o' s cd'
updateDef t = return t
updateOrig (Left t) = liftM Left (update t)
updateOrig (Right (l, r)) = do
l' <- update l
r' <- update r
return $ Right (l', r')
updateCD (CaseDefs (cs, c) (rs, r)) = do
c' <- updateSC c
r' <- updateSC r
return $ CaseDefs (cs, c') (rs, r')
updateSC (Case t n alts) = do
alts' <- mapM updateAlt alts
return (Case t n alts')
updateSC (ProjCase t alts) = do
alts' <- mapM updateAlt alts
return (ProjCase t alts')
updateSC (STerm t) = do
t' <- update t
return (STerm t')
updateSC c = return c
updateAlt (ConCase n i ns t) = do
t' <- updateSC t
return (ConCase n i ns t')
updateAlt (FnCase n ns t) = do
t' <- updateSC t
return (FnCase n ns t')
updateAlt (ConstCase c t) = do
t' <- updateSC t
return (ConstCase c t')
updateAlt (SucCase n t) = do
t' <- updateSC t
return (SucCase n t')
updateAlt (DefaultCase t) = do
t' <- updateSC t
return (DefaultCase t')
-- We get a lot of repetition in sub terms and can save a fair chunk
-- of memory if we make sure they're shared. addTT looks for a term
-- and returns it if it exists already, while also keeping stats of
-- how many times a subterm is repeated.
update t = do
tm <- addTT t
case tm of
Nothing -> update' t
Just t' -> return t'
update' (P t n ty) = do
n' <- getSymbol n
return $ P t n' ty
update' (App s f a) = liftM2 (App s) (update' f) (update' a)
update' (Bind n b sc) = do
b' <- updateB b
sc' <- update sc
return $ Bind n b' sc'
where
updateB (Let t v) = liftM2 Let (update' t) (update' v)
updateB b = do
ty' <- update' (binderTy b)
return (b { binderTy = ty' })
update' (Proj t i) = do
t' <- update' t
return $ Proj t' i
update' t = return t
processDocs :: Archive -> Idris ()
processDocs ar = do
ds <- getEntry [] "ibc_docstrings" ar
mapM_ (\(n, a) -> addDocStr n (fst a) (snd a)) ds
processModuleDocs :: Archive -> Idris ()
processModuleDocs ar = do
ds <- getEntry [] "ibc_moduledocs" ar
mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_moduledocs = addDef n d (idris_moduledocs i) })) ds
processAccess :: Bool -- ^ Reexporting?
-> IBCPhase
-> Archive -> Idris ()
processAccess reexp phase ar = do
ds <- getEntry [] "ibc_access" ar
mapM_ (\ (n, a_in) -> do
let a = if reexp then a_in else Hidden
logIBC 3 $ "Setting " ++ show (a, n) ++ " to " ++ show a
updateIState (\i -> i { tt_ctxt = setAccess n a (tt_ctxt i) })
if (not reexp)
then do
logIBC 1 $ "Not exporting " ++ show n
setAccessibility n Hidden
else logIBC 1 $ "Exporting " ++ show n
-- Everything should be available at the REPL from
-- things imported publicly
when (phase == IBC_REPL True) $ setAccessibility n Public) ds
processFlags :: Archive -> Idris ()
processFlags ar = do
ds <- getEntry [] "ibc_flags" ar
mapM_ (\ (n, a) -> setFlags n a) ds
processFnInfo :: Archive -> Idris ()
processFnInfo ar = do
ds <- getEntry [] "ibc_fninfo" ar
mapM_ (\ (n, a) -> setFnInfo n a) ds
processTotal :: Archive -> Idris ()
processTotal ar = do
ds <- getEntry [] "ibc_total" ar
mapM_ (\ (n, a) -> updateIState (\i -> i { tt_ctxt = setTotal n a (tt_ctxt i) })) ds
processInjective :: Archive -> Idris ()
processInjective ar = do
ds <- getEntry [] "ibc_injective" ar
mapM_ (\ (n, a) -> updateIState (\i -> i { tt_ctxt = setInjective n a (tt_ctxt i) })) ds
processTotalityCheckError :: Archive -> Idris ()
processTotalityCheckError ar = do
es <- getEntry [] "ibc_totcheckfail" ar
updateIState (\i -> i { idris_totcheckfail = idris_totcheckfail i ++ es })
processCallgraph :: Archive -> Idris ()
processCallgraph ar = do
ds <- getEntry [] "ibc_cg" ar
mapM_ (\ (n, a) -> addToCG n a) ds
processCoercions :: Archive -> Idris ()
processCoercions ar = do
ns <- getEntry [] "ibc_coercions" ar
mapM_ (\ n -> addCoercion n) ns
processTransforms :: Archive -> Idris ()
processTransforms ar = do
ts <- getEntry [] "ibc_transforms" ar
mapM_ (\ (n, t) -> addTrans n t) ts
processErrRev :: Archive -> Idris ()
processErrRev ar = do
ts <- getEntry [] "ibc_errRev" ar
mapM_ addErrRev ts
processLineApps :: Archive -> Idris ()
processLineApps ar = do
ls <- getEntry [] "ibc_lineapps" ar
mapM_ (\ (f, i, t) -> addInternalApp f i t) ls
processNameHints :: Archive -> Idris ()
processNameHints ar = do
ns <- getEntry [] "ibc_namehints" ar
mapM_ (\ (n, ty) -> addNameHint n ty) ns
processMetaInformation :: Archive -> Idris ()
processMetaInformation ar = do
ds <- getEntry [] "ibc_metainformation" ar
mapM_ (\ (n, m) -> updateIState (\i ->
i { tt_ctxt = setMetaInformation n m (tt_ctxt i) })) ds
processErrorHandlers :: Archive -> Idris ()
processErrorHandlers ar = do
ns <- getEntry [] "ibc_errorhandlers" ar
updateIState (\i -> i { idris_errorhandlers = idris_errorhandlers i ++ ns })
processFunctionErrorHandlers :: Archive -> Idris ()
processFunctionErrorHandlers ar = do
ns <- getEntry [] "ibc_function_errorhandlers" ar
mapM_ (\ (fn,arg,handler) -> addFunctionErrorHandlers fn arg [handler]) ns
processMetaVars :: Archive -> Idris ()
processMetaVars ar = do
ns <- getEntry [] "ibc_metavars" ar
updateIState (\i -> i { idris_metavars = L.reverse ns ++ idris_metavars i })
----- For Cheapskate and docstrings
instance Binary a => Binary (D.Docstring a) where
put (D.DocString opts lines) = do put opts ; put lines
get = do opts <- get
lines <- get
return (D.DocString opts lines)
instance Binary CT.Options where
put (CT.Options x1 x2 x3 x4) = do put x1 ; put x2 ; put x3 ; put x4
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (CT.Options x1 x2 x3 x4)
instance Binary D.DocTerm where
put D.Unchecked = putWord8 0
put (D.Checked t) = putWord8 1 >> put t
put (D.Example t) = putWord8 2 >> put t
put (D.Failing e) = putWord8 3 >> put e
get = do i <- getWord8
case i of
0 -> return D.Unchecked
1 -> fmap D.Checked get
2 -> fmap D.Example get
3 -> fmap D.Failing get
_ -> error "Corrupted binary data for DocTerm"
instance Binary a => Binary (D.Block a) where
put (D.Para lines) = do putWord8 0 ; put lines
put (D.Header i lines) = do putWord8 1 ; put i ; put lines
put (D.Blockquote bs) = do putWord8 2 ; put bs
put (D.List b t xs) = do putWord8 3 ; put b ; put t ; put xs
put (D.CodeBlock attr txt src) = do putWord8 4 ; put attr ; put txt ; put src
put (D.HtmlBlock txt) = do putWord8 5 ; put txt
put D.HRule = putWord8 6
get = do i <- getWord8
case i of
0 -> fmap D.Para get
1 -> liftM2 D.Header get get
2 -> fmap D.Blockquote get
3 -> liftM3 D.List get get get
4 -> liftM3 D.CodeBlock get get get
5 -> liftM D.HtmlBlock get
6 -> return D.HRule
_ -> error "Corrupted binary data for Block"
instance Binary a => Binary (D.Inline a) where
put (D.Str txt) = do putWord8 0 ; put txt
put D.Space = putWord8 1
put D.SoftBreak = putWord8 2
put D.LineBreak = putWord8 3
put (D.Emph xs) = putWord8 4 >> put xs
put (D.Strong xs) = putWord8 5 >> put xs
put (D.Code xs tm) = putWord8 6 >> put xs >> put tm
put (D.Link a b c) = putWord8 7 >> put a >> put b >> put c
put (D.Image a b c) = putWord8 8 >> put a >> put b >> put c
put (D.Entity a) = putWord8 9 >> put a
put (D.RawHtml x) = putWord8 10 >> put x
get = do i <- getWord8
case i of
0 -> liftM D.Str get
1 -> return D.Space
2 -> return D.SoftBreak
3 -> return D.LineBreak
4 -> liftM D.Emph get
5 -> liftM D.Strong get
6 -> liftM2 D.Code get get
7 -> liftM3 D.Link get get get
8 -> liftM3 D.Image get get get
9 -> liftM D.Entity get
10 -> liftM D.RawHtml get
_ -> error "Corrupted binary data for Inline"
instance Binary CT.ListType where
put (CT.Bullet c) = putWord8 0 >> put c
put (CT.Numbered nw i) = putWord8 1 >> put nw >> put i
get = do i <- getWord8
case i of
0 -> liftM CT.Bullet get
1 -> liftM2 CT.Numbered get get
_ -> error "Corrupted binary data for ListType"
instance Binary CT.CodeAttr where
put (CT.CodeAttr a b) = put a >> put b
get = liftM2 CT.CodeAttr get get
instance Binary CT.NumWrapper where
put (CT.PeriodFollowing) = putWord8 0
put (CT.ParenFollowing) = putWord8 1
get = do i <- getWord8
case i of
0 -> return CT.PeriodFollowing
1 -> return CT.ParenFollowing
_ -> error "Corrupted binary data for NumWrapper"
----- Generated by 'derive'
instance Binary SizeChange where
put x
= case x of
Smaller -> putWord8 0
Same -> putWord8 1
Bigger -> putWord8 2
Unknown -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return Smaller
1 -> return Same
2 -> return Bigger
3 -> return Unknown
_ -> error "Corrupted binary data for SizeChange"
instance Binary CGInfo where
put (CGInfo x1 x2 x3 x4)
= do put x1
-- put x3 -- Already used SCG info for totality check
put x2
put x4
get
= do x1 <- get
x2 <- get
x3 <- get
return (CGInfo x1 x2 [] x3)
instance Binary CaseType where
put x = case x of
Updatable -> putWord8 0
Shared -> putWord8 1
get = do i <- getWord8
case i of
0 -> return Updatable
1 -> return Shared
_ -> error "Corrupted binary data for CaseType"
instance Binary SC where
put x
= case x of
Case x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
ProjCase x1 x2 -> do putWord8 1
put x1
put x2
STerm x1 -> do putWord8 2
put x1
UnmatchedCase x1 -> do putWord8 3
put x1
ImpossibleCase -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Case x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (ProjCase x1 x2)
2 -> do x1 <- get
return (STerm x1)
3 -> do x1 <- get
return (UnmatchedCase x1)
4 -> return ImpossibleCase
_ -> error "Corrupted binary data for SC"
instance Binary CaseAlt where
put x
= {-# SCC "putCaseAlt" #-}
case x of
ConCase x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
ConstCase x1 x2 -> do putWord8 1
put x1
put x2
DefaultCase x1 -> do putWord8 2
put x1
FnCase x1 x2 x3 -> do putWord8 3
put x1
put x2
put x3
SucCase x1 x2 -> do putWord8 4
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (ConCase x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
return (ConstCase x1 x2)
2 -> do x1 <- get
return (DefaultCase x1)
3 -> do x1 <- get
x2 <- get
x3 <- get
return (FnCase x1 x2 x3)
4 -> do x1 <- get
x2 <- get
return (SucCase x1 x2)
_ -> error "Corrupted binary data for CaseAlt"
instance Binary CaseDefs where
put (CaseDefs x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (CaseDefs x1 x2)
instance Binary CaseInfo where
put x@(CaseInfo x1 x2 x3) = do put x1
put x2
put x3
get = do x1 <- get
x2 <- get
x3 <- get
return (CaseInfo x1 x2 x3)
instance Binary Def where
put x
= {-# SCC "putDef" #-}
case x of
Function x1 x2 -> do putWord8 0
put x1
put x2
TyDecl x1 x2 -> do putWord8 1
put x1
put x2
-- all primitives just get added at the start, don't write
Operator x1 x2 x3 -> do return ()
-- no need to add/load original patterns, because they're not
-- used again after totality checking
CaseOp x1 x2 x3 _ _ x4 -> do putWord8 3
put x1
put x2
put x3
put x4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (Function x1 x2)
1 -> do x1 <- get
x2 <- get
return (TyDecl x1 x2)
-- Operator isn't written, don't read
3 -> do x1 <- get
x2 <- get
x3 <- get
-- x4 <- get
-- x3 <- get always []
x5 <- get
return (CaseOp x1 x2 x3 [] [] x5)
_ -> error "Corrupted binary data for Def"
instance Binary Accessibility where
put x
= case x of
Public -> putWord8 0
Frozen -> putWord8 1
Private -> putWord8 2
Hidden -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return Public
1 -> return Frozen
2 -> return Private
3 -> return Hidden
_ -> error "Corrupted binary data for Accessibility"
safeToEnum :: (Enum a, Bounded a, Integral int) => String -> int -> a
safeToEnum label x' = result
where
x = fromIntegral x'
result
| x < fromEnum (minBound `asTypeOf` result)
|| x > fromEnum (maxBound `asTypeOf` result)
= error $ label ++ ": corrupted binary representation in IBC"
| otherwise = toEnum x
instance Binary PReason where
put x
= case x of
Other x1 -> do putWord8 0
put x1
Itself -> putWord8 1
NotCovering -> putWord8 2
NotPositive -> putWord8 3
Mutual x1 -> do putWord8 4
put x1
NotProductive -> putWord8 5
BelieveMe -> putWord8 6
UseUndef x1 -> do putWord8 7
put x1
ExternalIO -> putWord8 8
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Other x1)
1 -> return Itself
2 -> return NotCovering
3 -> return NotPositive
4 -> do x1 <- get
return (Mutual x1)
5 -> return NotProductive
6 -> return BelieveMe
7 -> do x1 <- get
return (UseUndef x1)
8 -> return ExternalIO
_ -> error "Corrupted binary data for PReason"
instance Binary Totality where
put x
= case x of
Total x1 -> do putWord8 0
put x1
Partial x1 -> do putWord8 1
put x1
Unchecked -> do putWord8 2
Productive -> do putWord8 3
Generated -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Total x1)
1 -> do x1 <- get
return (Partial x1)
2 -> return Unchecked
3 -> return Productive
4 -> return Generated
_ -> error "Corrupted binary data for Totality"
instance Binary MetaInformation where
put x
= case x of
EmptyMI -> do putWord8 0
DataMI x1 -> do putWord8 1
put x1
get = do i <- getWord8
case i of
0 -> return EmptyMI
1 -> do x1 <- get
return (DataMI x1)
_ -> error "Corrupted binary data for MetaInformation"
instance Binary DataOpt where
put x = case x of
Codata -> putWord8 0
DefaultEliminator -> putWord8 1
DataErrRev -> putWord8 2
DefaultCaseFun -> putWord8 3
get = do i <- getWord8
case i of
0 -> return Codata
1 -> return DefaultEliminator
2 -> return DataErrRev
3 -> return DefaultCaseFun
_ -> error "Corrupted binary data for DataOpt"
instance Binary FnOpt where
put x
= case x of
Inlinable -> putWord8 0
TotalFn -> putWord8 1
Dictionary -> putWord8 2
AssertTotal -> putWord8 3
Specialise x -> do putWord8 4
put x
AllGuarded -> putWord8 5
PartialFn -> putWord8 6
Implicit -> putWord8 7
Reflection -> putWord8 8
ErrorHandler -> putWord8 9
ErrorReverse -> putWord8 10
CoveringFn -> putWord8 11
NoImplicit -> putWord8 12
Constructor -> putWord8 13
CExport x1 -> do putWord8 14
put x1
AutoHint -> putWord8 15
PEGenerated -> putWord8 16
StaticFn -> putWord8 17
OverlappingDictionary -> putWord8 18
UnfoldIface x ns -> do putWord8 19
put x
put ns
get
= do i <- getWord8
case i of
0 -> return Inlinable
1 -> return TotalFn
2 -> return Dictionary
3 -> return AssertTotal
4 -> do x <- get
return (Specialise x)
5 -> return AllGuarded
6 -> return PartialFn
7 -> return Implicit
8 -> return Reflection
9 -> return ErrorHandler
10 -> return ErrorReverse
11 -> return CoveringFn
12 -> return NoImplicit
13 -> return Constructor
14 -> do x1 <- get
return $ CExport x1
15 -> return AutoHint
16 -> return PEGenerated
17 -> return StaticFn
18 -> return OverlappingDictionary
19 -> do x <- get
ns <- get
return (UnfoldIface x ns)
_ -> error "Corrupted binary data for FnOpt"
instance Binary Fixity where
put x
= case x of
Infixl x1 -> do putWord8 0
put x1
Infixr x1 -> do putWord8 1
put x1
InfixN x1 -> do putWord8 2
put x1
PrefixN x1 -> do putWord8 3
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Infixl x1)
1 -> do x1 <- get
return (Infixr x1)
2 -> do x1 <- get
return (InfixN x1)
3 -> do x1 <- get
return (PrefixN x1)
_ -> error "Corrupted binary data for Fixity"
instance Binary FixDecl where
put (Fix x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (Fix x1 x2)
instance Binary ArgOpt where
put x
= case x of
HideDisplay -> putWord8 0
InaccessibleArg -> putWord8 1
AlwaysShow -> putWord8 2
UnknownImp -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return HideDisplay
1 -> return InaccessibleArg
2 -> return AlwaysShow
3 -> return UnknownImp
_ -> error "Corrupted binary data for Static"
instance Binary Static where
put x
= case x of
Static -> putWord8 0
Dynamic -> putWord8 1
get
= do i <- getWord8
case i of
0 -> return Static
1 -> return Dynamic
_ -> error "Corrupted binary data for Static"
instance Binary Plicity where
put x
= case x of
Imp x1 x2 x3 x4 _ x5 ->
do putWord8 0
put x1
put x2
put x3
put x4
put x5
Exp x1 x2 x3 x4 ->
do putWord8 1
put x1
put x2
put x3
put x4
Constraint x1 x2 x3 ->
do putWord8 2
put x1
put x2
put x3
TacImp x1 x2 x3 x4 ->
do putWord8 3
put x1
put x2
put x3
put x4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (Imp x1 x2 x3 x4 False x5)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (Exp x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
return (Constraint x1 x2 x3)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (TacImp x1 x2 x3 x4)
_ -> error "Corrupted binary data for Plicity"
instance (Binary t) => Binary (PDecl' t) where
put x
= case x of
PFix x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
PTy x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PClauses x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PData x1 x2 x3 x4 x5 x6 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
put x6
PParams x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
PNamespace x1 x2 x3 -> do putWord8 5
put x1
put x2
put x3
PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 ->
do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PInterface x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12
-> do putWord8 7
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PImplementation x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 ->
do putWord8 8
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
put x13
put x14
put x15
PDSL x1 x2 -> do putWord8 9
put x1
put x2
PCAF x1 x2 x3 -> do putWord8 10
put x1
put x2
put x3
PMutual x1 x2 -> do putWord8 11
put x1
put x2
PPostulate x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 12
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PSyntax x1 x2 -> do putWord8 13
put x1
put x2
PDirective x1 -> error "Cannot serialize PDirective"
PProvider x1 x2 x3 x4 x5 x6 ->
do putWord8 15
put x1
put x2
put x3
put x4
put x5
put x6
PTransform x1 x2 x3 x4 -> do putWord8 16
put x1
put x2
put x3
put x4
PRunElabDecl x1 x2 x3 -> do putWord8 17
put x1
put x2
put x3
POpenInterfaces x1 x2 x3 -> do putWord8 18
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (PFix x1 x2 x3)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PTy x1 x2 x3 x4 x5 x6 x7 x8)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauses x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PData x1 x2 x3 x4 x5 x6)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (PParams x1 x2 x3)
5 -> do x1 <- get
x2 <- get
x3 <- get
return (PNamespace x1 x2 x3)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
7 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PInterface x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
8 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
x13 <- get
x14 <- get
x15 <- get
return (PImplementation x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15)
9 -> do x1 <- get
x2 <- get
return (PDSL x1 x2)
10 -> do x1 <- get
x2 <- get
x3 <- get
return (PCAF x1 x2 x3)
11 -> do x1 <- get
x2 <- get
return (PMutual x1 x2)
12 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PPostulate x1 x2 x3 x4 x5 x6 x7 x8)
13 -> do x1 <- get
x2 <- get
return (PSyntax x1 x2)
14 -> do error "Cannot deserialize PDirective"
15 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PProvider x1 x2 x3 x4 x5 x6)
16 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PTransform x1 x2 x3 x4)
17 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElabDecl x1 x2 x3)
18 -> do x1 <- get
x2 <- get
x3 <- get
return (POpenInterfaces x1 x2 x3)
_ -> error "Corrupted binary data for PDecl'"
instance Binary t => Binary (ProvideWhat' t) where
put (ProvTerm x1 x2) = do putWord8 0
put x1
put x2
put (ProvPostulate x1) = do putWord8 1
put x1
get = do y <- getWord8
case y of
0 -> do x1 <- get
x2 <- get
return (ProvTerm x1 x2)
1 -> do x1 <- get
return (ProvPostulate x1)
_ -> error "Corrupted binary data for ProvideWhat"
instance Binary Using where
put (UImplicit x1 x2) = do putWord8 0; put x1; put x2
put (UConstraint x1 x2) = do putWord8 1; put x1; put x2
get = do i <- getWord8
case i of
0 -> do x1 <- get; x2 <- get; return (UImplicit x1 x2)
1 -> do x1 <- get; x2 <- get; return (UConstraint x1 x2)
_ -> error "Corrupted binary data for Using"
instance Binary SyntaxInfo where
put (Syn x1 x2 x3 x4 _ _ x5 x6 x7 _ _ x8 _ _ _)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (Syn x1 x2 x3 x4 [] id x5 x6 x7 Nothing 0 x8 0 True True)
instance (Binary t) => Binary (PClause' t) where
put x
= case x of
PClause x1 x2 x3 x4 x5 x6 -> do putWord8 0
put x1
put x2
put x3
put x4
put x5
put x6
PWith x1 x2 x3 x4 x5 x6 x7 -> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
PClauseR x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PWithR x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PClause x1 x2 x3 x4 x5 x6)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
return (PWith x1 x2 x3 x4 x5 x6 x7)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauseR x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PWithR x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PClause'"
instance (Binary t) => Binary (PData' t) where
put x
= case x of
PDatadecl x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
PLaterdecl x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PDatadecl x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PLaterdecl x1 x2 x3)
_ -> error "Corrupted binary data for PData'"
instance Binary PunInfo where
put x
= case x of
TypeOrTerm -> putWord8 0
IsType -> putWord8 1
IsTerm -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return TypeOrTerm
1 -> return IsType
2 -> return IsTerm
_ -> error "Corrupted binary data for PunInfo"
instance Binary PTerm where
put x
= case x of
PQuote x1 -> do putWord8 0
put x1
PRef x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
PInferRef x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
PPatvar x1 x2 -> do putWord8 3
put x1
put x2
PLam x1 x2 x3 x4 x5 -> do putWord8 4
put x1
put x2
put x3
put x4
put x5
PPi x1 x2 x3 x4 x5 -> do putWord8 5
put x1
put x2
put x3
put x4
put x5
PLet x1 x2 x3 x4 x5 x6 -> do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
PTyped x1 x2 -> do putWord8 7
put x1
put x2
PAppImpl x1 x2 -> error "PAppImpl in final term"
PApp x1 x2 x3 -> do putWord8 8
put x1
put x2
put x3
PAppBind x1 x2 x3 -> do putWord8 9
put x1
put x2
put x3
PMatchApp x1 x2 -> do putWord8 10
put x1
put x2
PCase x1 x2 x3 -> do putWord8 11
put x1
put x2
put x3
PTrue x1 x2 -> do putWord8 12
put x1
put x2
PResolveTC x1 -> do putWord8 15
put x1
PRewrite x1 x2 x3 x4 x5 -> do putWord8 17
put x1
put x2
put x3
put x4
put x5
PPair x1 x2 x3 x4 x5 -> do putWord8 18
put x1
put x2
put x3
put x4
put x5
PDPair x1 x2 x3 x4 x5 x6 -> do putWord8 19
put x1
put x2
put x3
put x4
put x5
put x6
PAlternative x1 x2 x3 -> do putWord8 20
put x1
put x2
put x3
PHidden x1 -> do putWord8 21
put x1
PType x1 -> do putWord8 22
put x1
PGoal x1 x2 x3 x4 -> do putWord8 23
put x1
put x2
put x3
put x4
PConstant x1 x2 -> do putWord8 24
put x1
put x2
Placeholder -> putWord8 25
PDoBlock x1 -> do putWord8 26
put x1
PIdiom x1 x2 -> do putWord8 27
put x1
put x2
PMetavar x1 x2 -> do putWord8 29
put x1
put x2
PProof x1 -> do putWord8 30
put x1
PTactics x1 -> do putWord8 31
put x1
PImpossible -> putWord8 33
PCoerced x1 -> do putWord8 34
put x1
PUnifyLog x1 -> do putWord8 35
put x1
PNoImplicits x1 -> do putWord8 36
put x1
PDisamb x1 x2 -> do putWord8 37
put x1
put x2
PUniverse x1 x2 -> do putWord8 38
put x1
put x2
PRunElab x1 x2 x3 -> do putWord8 39
put x1
put x2
put x3
PAs x1 x2 x3 -> do putWord8 40
put x1
put x2
put x3
PElabError x1 -> do putWord8 41
put x1
PQuasiquote x1 x2 -> do putWord8 42
put x1
put x2
PUnquote x1 -> do putWord8 43
put x1
PQuoteName x1 x2 x3 -> do putWord8 44
put x1
put x2
put x3
PIfThenElse x1 x2 x3 x4 -> do putWord8 45
put x1
put x2
put x3
put x4
PConstSugar x1 x2 -> do putWord8 46
put x1
put x2
PWithApp x1 x2 x3 -> do putWord8 47
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (PQuote x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PRef x1 x2 x3)
2 -> do x1 <- get
x2 <- get
x3 <- get
return (PInferRef x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (PPatvar x1 x2)
4 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PLam x1 x2 x3 x4 x5)
5 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPi x1 x2 x3 x4 x5)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PLet x1 x2 x3 x4 x5 x6)
7 -> do x1 <- get
x2 <- get
return (PTyped x1 x2)
8 -> do x1 <- get
x2 <- get
x3 <- get
return (PApp x1 x2 x3)
9 -> do x1 <- get
x2 <- get
x3 <- get
return (PAppBind x1 x2 x3)
10 -> do x1 <- get
x2 <- get
return (PMatchApp x1 x2)
11 -> do x1 <- get
x2 <- get
x3 <- get
return (PCase x1 x2 x3)
12 -> do x1 <- get
x2 <- get
return (PTrue x1 x2)
15 -> do x1 <- get
return (PResolveTC x1)
17 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PRewrite x1 x2 x3 x4 x5)
18 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPair x1 x2 x3 x4 x5)
19 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PDPair x1 x2 x3 x4 x5 x6)
20 -> do x1 <- get
x2 <- get
x3 <- get
return (PAlternative x1 x2 x3)
21 -> do x1 <- get
return (PHidden x1)
22 -> do x1 <- get
return (PType x1)
23 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PGoal x1 x2 x3 x4)
24 -> do x1 <- get
x2 <- get
return (PConstant x1 x2)
25 -> return Placeholder
26 -> do x1 <- get
return (PDoBlock x1)
27 -> do x1 <- get
x2 <- get
return (PIdiom x1 x2)
29 -> do x1 <- get
x2 <- get
return (PMetavar x1 x2)
30 -> do x1 <- get
return (PProof x1)
31 -> do x1 <- get
return (PTactics x1)
33 -> return PImpossible
34 -> do x1 <- get
return (PCoerced x1)
35 -> do x1 <- get
return (PUnifyLog x1)
36 -> do x1 <- get
return (PNoImplicits x1)
37 -> do x1 <- get
x2 <- get
return (PDisamb x1 x2)
38 -> do x1 <- get
x2 <- get
return (PUniverse x1 x2)
39 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElab x1 x2 x3)
40 -> do x1 <- get
x2 <- get
x3 <- get
return (PAs x1 x2 x3)
41 -> do x1 <- get
return (PElabError x1)
42 -> do x1 <- get
x2 <- get
return (PQuasiquote x1 x2)
43 -> do x1 <- get
return (PUnquote x1)
44 -> do x1 <- get
x2 <- get
x3 <- get
return (PQuoteName x1 x2 x3)
45 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PIfThenElse x1 x2 x3 x4)
46 -> do x1 <- get
x2 <- get
return (PConstSugar x1 x2)
47 -> do x1 <- get
x2 <- get
x3 <- get
return (PWithApp x1 x2 x3)
_ -> error "Corrupted binary data for PTerm"
instance Binary PAltType where
put x
= case x of
ExactlyOne x1 -> do putWord8 0
put x1
FirstSuccess -> putWord8 1
TryImplicit -> putWord8 2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (ExactlyOne x1)
1 -> return FirstSuccess
2 -> return TryImplicit
_ -> error "Corrupted binary data for PAltType"
instance (Binary t) => Binary (PTactic' t) where
put x
= case x of
Intro x1 -> do putWord8 0
put x1
Focus x1 -> do putWord8 1
put x1
Refine x1 x2 -> do putWord8 2
put x1
put x2
Rewrite x1 -> do putWord8 3
put x1
LetTac x1 x2 -> do putWord8 4
put x1
put x2
Exact x1 -> do putWord8 5
put x1
Compute -> putWord8 6
Trivial -> putWord8 7
Solve -> putWord8 8
Attack -> putWord8 9
ProofState -> putWord8 10
ProofTerm -> putWord8 11
Undo -> putWord8 12
Try x1 x2 -> do putWord8 13
put x1
put x2
TSeq x1 x2 -> do putWord8 14
put x1
put x2
Qed -> putWord8 15
ApplyTactic x1 -> do putWord8 16
put x1
Reflect x1 -> do putWord8 17
put x1
Fill x1 -> do putWord8 18
put x1
Induction x1 -> do putWord8 19
put x1
ByReflection x1 -> do putWord8 20
put x1
ProofSearch x1 x2 x3 x4 x5 x6 -> do putWord8 21
put x1
put x2
put x3
put x4
put x5
put x6
DoUnify -> putWord8 22
CaseTac x1 -> do putWord8 23
put x1
SourceFC -> putWord8 24
Intros -> putWord8 25
Equiv x1 -> do putWord8 26
put x1
Claim x1 x2 -> do putWord8 27
put x1
put x2
Unfocus -> putWord8 28
MatchRefine x1 -> do putWord8 29
put x1
LetTacTy x1 x2 x3 -> do putWord8 30
put x1
put x2
put x3
TCImplementation -> putWord8 31
GoalType x1 x2 -> do putWord8 32
put x1
put x2
TCheck x1 -> do putWord8 33
put x1
TEval x1 -> do putWord8 34
put x1
TDocStr x1 -> do putWord8 35
put x1
TSearch x1 -> do putWord8 36
put x1
Skip -> putWord8 37
TFail x1 -> do putWord8 38
put x1
Abandon -> putWord8 39
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Intro x1)
1 -> do x1 <- get
return (Focus x1)
2 -> do x1 <- get
x2 <- get
return (Refine x1 x2)
3 -> do x1 <- get
return (Rewrite x1)
4 -> do x1 <- get
x2 <- get
return (LetTac x1 x2)
5 -> do x1 <- get
return (Exact x1)
6 -> return Compute
7 -> return Trivial
8 -> return Solve
9 -> return Attack
10 -> return ProofState
11 -> return ProofTerm
12 -> return Undo
13 -> do x1 <- get
x2 <- get
return (Try x1 x2)
14 -> do x1 <- get
x2 <- get
return (TSeq x1 x2)
15 -> return Qed
16 -> do x1 <- get
return (ApplyTactic x1)
17 -> do x1 <- get
return (Reflect x1)
18 -> do x1 <- get
return (Fill x1)
19 -> do x1 <- get
return (Induction x1)
20 -> do x1 <- get
return (ByReflection x1)
21 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (ProofSearch x1 x2 x3 x4 x5 x6)
22 -> return DoUnify
23 -> do x1 <- get
return (CaseTac x1)
24 -> return SourceFC
25 -> return Intros
26 -> do x1 <- get
return (Equiv x1)
27 -> do x1 <- get
x2 <- get
return (Claim x1 x2)
28 -> return Unfocus
29 -> do x1 <- get
return (MatchRefine x1)
30 -> do x1 <- get
x2 <- get
x3 <- get
return (LetTacTy x1 x2 x3)
31 -> return TCImplementation
32 -> do x1 <- get
x2 <- get
return (GoalType x1 x2)
33 -> do x1 <- get
return (TCheck x1)
34 -> do x1 <- get
return (TEval x1)
35 -> do x1 <- get
return (TDocStr x1)
36 -> do x1 <- get
return (TSearch x1)
37 -> return Skip
38 -> do x1 <- get
return (TFail x1)
39 -> return Abandon
_ -> error "Corrupted binary data for PTactic'"
instance (Binary t) => Binary (PDo' t) where
put x
= case x of
DoExp x1 x2 -> do putWord8 0
put x1
put x2
DoBind x1 x2 x3 x4 -> do putWord8 1
put x1
put x2
put x3
put x4
DoBindP x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
DoLet x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
DoLetP x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (DoExp x1 x2)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBind x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBindP x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (DoLet x1 x2 x3 x4 x5)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (DoLetP x1 x2 x3)
_ -> error "Corrupted binary data for PDo'"
instance (Binary t) => Binary (PArg' t) where
put x
= case x of
PImp x1 x2 x3 x4 x5 ->
do putWord8 0
put x1
put x2
put x3
put x4
put x5
PExp x1 x2 x3 x4 ->
do putWord8 1
put x1
put x2
put x3
put x4
PConstraint x1 x2 x3 x4 ->
do putWord8 2
put x1
put x2
put x3
put x4
PTacImplicit x1 x2 x3 x4 x5 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PImp x1 x2 x3 x4 x5)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PExp x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PConstraint x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PTacImplicit x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PArg'"
instance Binary InterfaceInfo where
put (CI x1 x2 x3 x4 x5 x6 x7 _ x8)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (CI x1 x2 x3 x4 x5 x6 x7 [] x8)
instance Binary RecordInfo where
put (RI x1 x2 x3)
= do put x1
put x2
put x3
get
= do x1 <- get
x2 <- get
x3 <- get
return (RI x1 x2 x3)
instance Binary OptInfo where
put (Optimise x1 x2 x3)
= do put x1
put x2
put x3
get
= do x1 <- get
x2 <- get
x3 <- get
return (Optimise x1 x2 x3)
instance Binary FnInfo where
put (FnInfo x1)
= put x1
get
= do x1 <- get
return (FnInfo x1)
instance Binary TypeInfo where
put (TI x1 x2 x3 x4 x5 x6) = do put x1
put x2
put x3
put x4
put x5
put x6
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (TI x1 x2 x3 x4 x5 x6)
instance Binary SynContext where
put x
= case x of
PatternSyntax -> putWord8 0
TermSyntax -> putWord8 1
AnySyntax -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return PatternSyntax
1 -> return TermSyntax
2 -> return AnySyntax
_ -> error "Corrupted binary data for SynContext"
instance Binary Syntax where
put (Rule x1 x2 x3)
= do putWord8 0
put x1
put x2
put x3
put (DeclRule x1 x2)
= do putWord8 1
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Rule x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (DeclRule x1 x2)
_ -> error "Corrupted binary data for Syntax"
instance (Binary t) => Binary (DSL' t) where
put (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
return (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9)
instance Binary SSymbol where
put x
= case x of
Keyword x1 -> do putWord8 0
put x1
Symbol x1 -> do putWord8 1
put x1
Expr x1 -> do putWord8 2
put x1
SimpleExpr x1 -> do putWord8 3
put x1
Binding x1 -> do putWord8 4
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Keyword x1)
1 -> do x1 <- get
return (Symbol x1)
2 -> do x1 <- get
return (Expr x1)
3 -> do x1 <- get
return (SimpleExpr x1)
4 -> do x1 <- get
return (Binding x1)
_ -> error "Corrupted binary data for SSymbol"
instance Binary Codegen where
put x
= case x of
Via ir str -> do putWord8 0
put ir
put str
Bytecode -> putWord8 1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (Via x1 x2)
1 -> return Bytecode
_ -> error "Corrupted binary data for Codegen"
instance Binary IRFormat where
put x = case x of
IBCFormat -> putWord8 0
JSONFormat -> putWord8 1
get = do i <- getWord8
case i of
0 -> return IBCFormat
1 -> return JSONFormat
_ -> error "Corrupted binary data for IRFormat"
| bravit/Idris-dev | src/Idris/IBC.hs | bsd-3-clause | 102,501 | 0 | 21 | 56,836 | 27,999 | 12,852 | 15,147 | 2,455 | 17 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Turtlesim.TeleportAbsoluteRequest where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.SrvInfo
import Foreign.Storable (Storable(..))
import qualified Ros.Internal.Util.StorableMonad as SM
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data TeleportAbsoluteRequest = TeleportAbsoluteRequest { _x :: P.Float
, _y :: P.Float
, _theta :: P.Float
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''TeleportAbsoluteRequest)
instance RosBinary TeleportAbsoluteRequest where
put obj' = put (_x obj') *> put (_y obj') *> put (_theta obj')
get = TeleportAbsoluteRequest <$> get <*> get <*> get
instance Storable TeleportAbsoluteRequest where
sizeOf _ = sizeOf (P.undefined::P.Float) +
sizeOf (P.undefined::P.Float) +
sizeOf (P.undefined::P.Float)
alignment _ = 8
peek = SM.runStorable (TeleportAbsoluteRequest <$> SM.peek <*> SM.peek <*> SM.peek)
poke ptr' obj' = SM.runStorable store' ptr'
where store' = SM.poke (_x obj') *> SM.poke (_y obj') *> SM.poke (_theta obj')
instance MsgInfo TeleportAbsoluteRequest where
sourceMD5 _ = "a130bc60ee6513855dc62ea83fcc5b20"
msgTypeName _ = "turtlesim/TeleportAbsoluteRequest"
instance D.Default TeleportAbsoluteRequest
instance SrvInfo TeleportAbsoluteRequest where
srvMD5 _ = "a130bc60ee6513855dc62ea83fcc5b20"
srvTypeName _ = "turtlesim/TeleportAbsolute"
| acowley/roshask | msgs/Turtlesim/Ros/Turtlesim/TeleportAbsoluteRequest.hs | bsd-3-clause | 1,940 | 1 | 12 | 419 | 505 | 284 | 221 | 41 | 0 |
{-# LANGUAGE TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.BufferType
-- Description : Abstract representation of request and response buffer types.
-- Copyright : See LICENSE file
-- License : BSD
--
-- Maintainer : Ganesh Sittampalam <http@projects.haskell.org>
-- Stability : experimental
-- Portability : non-portable (not tested)
--
-- In order to give the user freedom in how request and response content
-- is represented, a sufficiently abstract representation is needed of
-- these internally. The "Network.BufferType" module provides this, defining
-- the 'BufferType' class and its ad-hoc representation of buffer operations
-- via the 'BufferOp' record.
--
-- This module provides definitions for the standard buffer types that the
-- package supports, i.e., for @String@ and @ByteString@ (strict and lazy.)
--
-----------------------------------------------------------------------------
module Network.BufferType
(
BufferType(..)
, BufferOp(..)
, strictBufferOp
, lazyBufferOp
, stringBufferOp
) where
import qualified Data.ByteString as Strict hiding ( unpack, pack, span )
import qualified Data.ByteString.Char8 as Strict ( unpack, pack, span )
import qualified Data.ByteString.Lazy as Lazy hiding ( pack, unpack,span )
import qualified Data.ByteString.Lazy.Char8 as Lazy ( pack, unpack, span )
import System.IO ( Handle )
import Data.Word ( Word8 )
import Network.HTTP.Utils ( crlf, lf )
-- | The @BufferType@ class encodes, in a mixed-mode way, the interface
-- that the library requires to operate over data embedded in HTTP
-- requests and responses. That is, we use explicit dictionaries
-- for the operations, but overload the name of the dicts themselves.
--
class BufferType bufType where
bufferOps :: BufferOp bufType
instance BufferType Lazy.ByteString where
bufferOps = lazyBufferOp
instance BufferType Strict.ByteString where
bufferOps = strictBufferOp
instance BufferType String where
bufferOps = stringBufferOp
-- | @BufferOp@ encodes the I/O operations of the underlying buffer over
-- a Handle in an (explicit) dictionary type. May not be needed, but gives
-- us flexibility in explicit overriding and wrapping up of these methods.
--
-- Along with IO operations is an ad-hoc collection of functions for working
-- with these abstract buffers, as needed by the internals of the code
-- that processes requests and responses.
--
-- We supply three default @BufferOp@ values, for @String@ along with the
-- strict and lazy versions of @ByteString@. To add others, provide @BufferOp@
-- definitions for
data BufferOp a
= BufferOp
{ buf_hGet :: Handle -> Int -> IO a
, buf_hGetContents :: Handle -> IO a
, buf_hPut :: Handle -> a -> IO ()
, buf_hGetLine :: Handle -> IO a
, buf_empty :: a
, buf_append :: a -> a -> a
, buf_concat :: [a] -> a
, buf_fromStr :: String -> a
, buf_toStr :: a -> String
, buf_snoc :: a -> Word8 -> a
, buf_splitAt :: Int -> a -> (a,a)
, buf_span :: (Char -> Bool) -> a -> (a,a)
, buf_isLineTerm :: a -> Bool
, buf_isEmpty :: a -> Bool
}
instance Eq (BufferOp a) where
_ == _ = False
-- | @strictBufferOp@ is the 'BufferOp' definition over @ByteString@s,
-- the non-lazy kind.
strictBufferOp :: BufferOp Strict.ByteString
strictBufferOp =
BufferOp
{ buf_hGet = Strict.hGet
, buf_hGetContents = Strict.hGetContents
, buf_hPut = Strict.hPut
, buf_hGetLine = Strict.hGetLine
, buf_append = Strict.append
, buf_concat = Strict.concat
, buf_fromStr = Strict.pack
, buf_toStr = Strict.unpack
, buf_snoc = Strict.snoc
, buf_splitAt = Strict.splitAt
, buf_span = Strict.span
, buf_empty = Strict.empty
, buf_isLineTerm = \ b -> Strict.length b == 2 && p_crlf == b ||
Strict.length b == 1 && p_lf == b
, buf_isEmpty = Strict.null
}
where
p_crlf = Strict.pack crlf
p_lf = Strict.pack lf
-- | @lazyBufferOp@ is the 'BufferOp' definition over @ByteString@s,
-- the non-strict kind.
lazyBufferOp :: BufferOp Lazy.ByteString
lazyBufferOp =
BufferOp
{ buf_hGet = Lazy.hGet
, buf_hGetContents = Lazy.hGetContents
, buf_hPut = Lazy.hPut
, buf_hGetLine = \ h -> Strict.hGetLine h >>= \ l -> return (Lazy.fromChunks [l])
, buf_append = Lazy.append
, buf_concat = Lazy.concat
, buf_fromStr = Lazy.pack
, buf_toStr = Lazy.unpack
, buf_snoc = Lazy.snoc
, buf_splitAt = \ i x -> Lazy.splitAt (fromIntegral i) x
, buf_span = Lazy.span
, buf_empty = Lazy.empty
, buf_isLineTerm = \ b -> Lazy.length b == 2 && p_crlf == b ||
Lazy.length b == 1 && p_lf == b
, buf_isEmpty = Lazy.null
}
where
p_crlf = Lazy.pack crlf
p_lf = Lazy.pack lf
-- | @stringBufferOp@ is the 'BufferOp' definition over @String@s.
-- It is defined in terms of @strictBufferOp@ operations,
-- unpacking/converting to @String@ when needed.
stringBufferOp :: BufferOp String
stringBufferOp =BufferOp
{ buf_hGet = \ h n -> buf_hGet strictBufferOp h n >>= return . Strict.unpack
, buf_hGetContents = \ h -> buf_hGetContents strictBufferOp h >>= return . Strict.unpack
, buf_hPut = \ h s -> buf_hPut strictBufferOp h (Strict.pack s)
, buf_hGetLine = \ h -> buf_hGetLine strictBufferOp h >>= return . Strict.unpack
, buf_append = (++)
, buf_concat = concat
, buf_fromStr = id
, buf_toStr = id
, buf_snoc = \ a x -> a ++ [toEnum (fromIntegral x)]
, buf_splitAt = splitAt
, buf_span = \ p a ->
case Strict.span p (Strict.pack a) of
(x,y) -> (Strict.unpack x, Strict.unpack y)
, buf_empty = []
, buf_isLineTerm = \ b -> b == crlf || b == lf
, buf_isEmpty = null
}
| beni55/HTTP | Network/BufferType.hs | bsd-3-clause | 6,368 | 0 | 16 | 1,803 | 1,199 | 711 | 488 | 99 | 1 |
{-# LANGUAGE MultiParamTypeClasses, ScopedTypeVariables
, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : CspCASL instance of type class logic
Copyright : (c) Markus Roggenbach, Till Mossakowski and Uni Bremen 2003
License : GPLv2 or higher, see LICENSE.txt
Maintainer : M.Roggenbach@swansea.ac.uk
Stability : experimental
Portability : non-portable(import Logic.Logic)
Here is the place where the class Logic is instantiated for CspCASL. A
CspCASL signature is a CASL signature with a set of named channels and
processes. Every process has a profile. Morphisms are supposed to allow
renaming of channels and processes, too. Also sublogics (as a superset of some
CASL sublogics) are still missing.
-}
module CspCASL.Logic_CspCASL
( GenCspCASL (..)
, CspCASLSemantics
, CspCASL
, cspCASL
, Trace (..)
, traceCspCASL
, Failure (..)
, failureCspCASL
) where
import Logic.Logic
import Logic.Prover
import CASL.Logic_CASL
import CASL.Parse_AS_Basic
import CASL.Morphism
import CASL.Sign
import CASL.ToDoc
import qualified CASL.MapSentence as MapSen
import qualified CASL.SimplifySen as SimpSen
import CspCASL.ATC_CspCASL ()
import CspCASL.CspCASL_Keywords
import CspCASL.Morphism as CspCASL_Morphism
import CspCASL.Parse_CspCASL ()
import CspCASL.Print_CspCASL ()
import qualified CspCASL.SignCSP as SignCSP
import qualified CspCASL.SimplifySen as SimplifySen
import CspCASL.StatAnaCSP
import CspCASL.SymbItems
import CspCASL.Symbol
import CspCASL.SymMapAna
import CspCASLProver.CspCASLProver (cspCASLProver)
-- | a generic logic id for CspCASL with different semantics
data GenCspCASL a = GenCspCASL a deriving Show
cspCASL :: GenCspCASL ()
cspCASL = GenCspCASL ()
-- | The top-level logic with the loosest semantics (and without provers)
type CspCASL = GenCspCASL ()
instance Show a => Language (GenCspCASL a) where
language_name (GenCspCASL a) = "CspCASL"
++ let s = show a in if s == "()" then "" else '_' : s
description _ =
"CspCASL - see\n\n" ++
"http://www.cs.swan.ac.uk/~csmarkus/ProcessesAndData/"
-- | Instance of Sentences for CspCASL
instance Show a => Sentences (GenCspCASL a)
-- sentence
SignCSP.CspCASLSen
-- signature
SignCSP.CspCASLSign
-- morphism
CspCASL_Morphism.CspCASLMorphism
-- symbol
CspSymbol
where
map_sen (GenCspCASL _) m = return . MapSen.mapSen mapSen m
sym_name (GenCspCASL _) = cspSymName
symmap_of (GenCspCASL _) = cspMorphismToCspSymbMap
simplify_sen (GenCspCASL _) =
SimpSen.simplifySen (const return) SimplifySen.simplifySen
sym_of (GenCspCASL _) = symSets
print_named (GenCspCASL _) = printTheoryFormula
-- | Syntax of CspCASL
instance Show a => Syntax (GenCspCASL a)
CspBasicSpec -- basic_spec
CspSymbol
CspSymbItems
CspSymbMapItems
where
parse_symb_items (GenCspCASL _) = Just cspSymbItems
parse_symb_map_items (GenCspCASL _) = Just cspSymbMapItems
parse_basic_spec (GenCspCASL _) = Just $ basicSpec startCspKeywords
-- lattices (for sublogics) missing
class Show a => CspCASLSemantics a where
cspProvers :: a
-> [Prover SignCSP.CspCASLSign SignCSP.CspCASLSen
CspCASL_Morphism.CspCASLMorphism () ()]
cspProvers _ = []
{- further dummy types for the trace of the failure semantics can be added
and made an instance of CspCASLSemantics.
"identity" Comorphisms between these different logics still need to be
defined.
-}
instance CspCASLSemantics ()
data Trace = Trace deriving Show
data Failure = Failure deriving Show
traceCspCASL :: GenCspCASL Trace
traceCspCASL = GenCspCASL Trace
failureCspCASL :: GenCspCASL Failure
failureCspCASL = GenCspCASL Failure
instance CspCASLSemantics Trace where
cspProvers _ = [cspCASLProver]
instance CspCASLSemantics Failure
-- | Instance of Logic for CspCASL
instance CspCASLSemantics a => Logic (GenCspCASL a)
-- Sublogics (missing)
()
-- basic_spec
CspBasicSpec
-- sentence
SignCSP.CspCASLSen
-- symb_items
CspSymbItems
-- symb_map_items
CspSymbMapItems
-- signature
SignCSP.CspCASLSign
-- morphism
CspCASL_Morphism.CspCASLMorphism
CspSymbol
CspRawSymbol
-- proof_tree (missing)
()
where
stability (GenCspCASL _) = Experimental
data_logic (GenCspCASL _) = Just (Logic CASL)
empty_proof_tree _ = ()
provers (GenCspCASL _) = cspProvers (undefined :: a)
-- | Static Analysis for CspCASL
instance Show a => StaticAnalysis (GenCspCASL a)
-- basic_spec
CspBasicSpec
-- sentence
SignCSP.CspCASLSen
-- symb_items
CspSymbItems
-- symb_map_items
CspSymbMapItems
-- signature
SignCSP.CspCASLSign
-- morphism
CspCASL_Morphism.CspCASLMorphism
CspSymbol
CspRawSymbol
where
basic_analysis (GenCspCASL _) = Just basicAnalysisCspCASL
stat_symb_items (GenCspCASL _) = cspStatSymbItems
stat_symb_map_items (GenCspCASL _) = cspStatSymbMapItems
id_to_raw (GenCspCASL _) = idToCspRaw
symbol_to_raw (GenCspCASL _) = ACspSymbol
matches (GenCspCASL _) = cspMatches
empty_signature (GenCspCASL _) = SignCSP.emptyCspCASLSign
is_subsig (GenCspCASL _) = SignCSP.isCspCASLSubSig
subsig_inclusion (GenCspCASL _) = cspSubsigInclusion
signature_union (GenCspCASL _) = SignCSP.unionCspCASLSign
signatureDiff (GenCspCASL _) s = return . diffSig SignCSP.diffCspSig s
morphism_union (GenCspCASL _) =
morphismUnion CspCASL_Morphism.cspAddMorphismUnion
SignCSP.cspSignUnion
induced_from_morphism (GenCspCASL _) = cspInducedFromMorphism
induced_from_to_morphism (GenCspCASL _) = cspInducedFromToMorphism
cogenerated_sign (GenCspCASL _) = cspCogeneratedSign
generated_sign (GenCspCASL _) = cspGeneratedSign
| keithodulaigh/Hets | CspCASL/Logic_CspCASL.hs | gpl-2.0 | 5,912 | 0 | 11 | 1,205 | 1,127 | 602 | 525 | 119 | 1 |
{-# language MultiParamTypeClasses #-}
{-# language FlexibleInstances #-}
module DPLL.Pattern where
import DPLL.Data
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Reporter
import Control.Applicative ( (<$>) )
import System.Random
data Pattern v = Any | This v
instance ToDoc v => ToDoc (Pattern v) where
toDoc p = case p of
Any -> text "*"
This x -> toDoc x
instance Reader v => Reader (Pattern v) where
reader = const Any <$> my_symbol "*"
<|> This <$> reader
class Matches p d where
matches :: p -> d -> Reporter ()
instance (Eq d, ToDoc d) => Matches d d where
matches x y = when (x /= y) $ reject $ vcat
[ text "Objekt" <+> toDoc y
, text "paßt nicht zum Muster" <+> toDoc x
]
instance (Eq d, ToDoc d, Matches p d) => Matches (Pattern p) d where
matches p y = case p of
Any -> return ()
This x -> matches x y
instance (ToDoc d, ToDoc p, Matches p d) => Matches [p] [d] where
matches [] [] = return ()
matches (p:ps) [] = reject $ vcat
[ text "leere Liste"
, text "paßt nicht zu nicht-leerem Muster" <+> toDoc (p:ps)
]
matches [] (d:ds) = reject $ vcat
[ text "Objekt" <+> toDoc (d:ds)
, text "paßt nicht zu leerem Muster"
]
matches (p:ps) (d:ds) = do
matches p d
matches ps ds
class Punch d p where
embed :: d -> p
punch :: d -> IO p
punches :: Punch d p => Int -> d -> IO p
punches k d = undefined
-- | introduce exactly one pattern
instance Punch d (Pattern d) where
embed d = This d
punch d = return Any
instance Punch (Pattern d) (Pattern d) where
embed d = d
punch d = return Any
instance Punch d p => Punch [d] [p] where
embed = map embed
punch xs = do
i <- randomRIO (0, length xs - 1)
let (pre, this : post) = splitAt i xs
that <- punch this
return $ map embed pre ++ that : map embed post
| marcellussiegburg/autotool | collection/src/FD/Pattern.hs | gpl-2.0 | 1,974 | 0 | 12 | 608 | 818 | 407 | 411 | 56 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
A ``lint'' pass to check for Core correctness
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fprof-auto #-}
module CoreLint (
lintCoreBindings, lintUnfolding,
lintPassResult, lintInteractiveExpr, lintExpr,
lintAnnots,
-- ** Debug output
CoreLint.showPass, showPassIO, endPass, endPassIO,
dumpPassResult,
CoreLint.dumpIfSet,
) where
#include "HsVersions.h"
import CoreSyn
import CoreFVs
import CoreUtils
import CoreMonad
import Bag
import Literal
import DataCon
import TysWiredIn
import TysPrim
import Var
import VarEnv
import VarSet
import Name
import Id
import PprCore
import ErrUtils
import Coercion
import SrcLoc
import Kind
import Type
import TypeRep
import TyCon
import CoAxiom
import BasicTypes
import ErrUtils as Err
import StaticFlags
import ListSetOps
import PrelNames
import Outputable
import FastString
import Util
import InstEnv ( instanceDFunId )
import OptCoercion ( checkAxInstCo )
import UniqSupply
import CoreArity ( typeArity )
import Demand ( splitStrictSig, isBotRes )
import HscTypes
import DynFlags
import Control.Monad
import MonadUtils
import Data.Maybe
import Pair
{-
Note [GHC Formalism]
~~~~~~~~~~~~~~~~~~~~
This file implements the type-checking algorithm for System FC, the "official"
name of the Core language. Type safety of FC is heart of the claim that
executables produced by GHC do not have segmentation faults. Thus, it is
useful to be able to reason about System FC independently of reading the code.
To this purpose, there is a document ghc.pdf built in docs/core-spec that
contains a formalism of the types and functions dealt with here. If you change
just about anything in this file or you change other types/functions throughout
the Core language (all signposted to this note), you should update that
formalism. See docs/core-spec/README for more info about how to do so.
Summary of checks
~~~~~~~~~~~~~~~~~
Checks that a set of core bindings is well-formed. The PprStyle and String
just control what we print in the event of an error. The Bool value
indicates whether we have done any specialisation yet (in which case we do
some extra checks).
We check for
(a) type errors
(b) Out-of-scope type variables
(c) Out-of-scope local variables
(d) Ill-kinded types
(e) Incorrect unsafe coercions
If we have done specialisation the we check that there are
(a) No top-level bindings of primitive (unboxed type)
Outstanding issues:
-- Things are *not* OK if:
--
-- * Unsaturated type app before specialisation has been done;
--
-- * Oversaturated type app after specialisation (eta reduction
-- may well be happening...);
Note [Linting type lets]
~~~~~~~~~~~~~~~~~~~~~~~~
In the desugarer, it's very very convenient to be able to say (in effect)
let a = Type Int in <body>
That is, use a type let. See Note [Type let] in CoreSyn.
However, when linting <body> we need to remember that a=Int, else we might
reject a correct program. So we carry a type substitution (in this example
[a -> Int]) and apply this substitution before comparing types. The functin
lintInTy :: Type -> LintM Type
returns a substituted type; that's the only reason it returns anything.
When we encounter a binder (like x::a) we must apply the substitution
to the type of the binding variable. lintBinders does this.
For Ids, the type-substituted Id is added to the in_scope set (which
itself is part of the TvSubst we are carrying down), and when we
find an occurrence of an Id, we fetch it from the in-scope set.
Note [Bad unsafe coercion]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For discussion see https://ghc.haskell.org/trac/ghc/wiki/BadUnsafeCoercions
Linter introduces additional rules that checks improper coercion between
different types, called bad coercions. Following coercions are forbidden:
(a) coercions between boxed and unboxed values;
(b) coercions between unlifted values of the different sizes, here
active size is checked, i.e. size of the actual value but not
the space allocated for value;
(c) coercions between floating and integral boxed values, this check
is not yet supported for unboxed tuples, as no semantics were
specified for that;
(d) coercions from / to vector type
(e) If types are unboxed tuples then tuple (# A_1,..,A_n #) can be
coerced to (# B_1,..,B_m #) if n=m and for each pair A_i, B_i rules
(a-e) holds.
************************************************************************
* *
Beginning and ending passes
* *
************************************************************************
These functions are not CoreM monad stuff, but they probably ought to
be, and it makes a conveneint place. place for them. They print out
stuff before and after core passes, and do Core Lint when necessary.
-}
showPass :: CoreToDo -> CoreM ()
showPass pass = do { dflags <- getDynFlags
; liftIO $ showPassIO dflags pass }
showPassIO :: DynFlags -> CoreToDo -> IO ()
showPassIO dflags pass = Err.showPass dflags (showPpr dflags pass)
endPass :: CoreToDo -> CoreProgram -> [CoreRule] -> CoreM ()
endPass pass binds rules
= do { hsc_env <- getHscEnv
; print_unqual <- getPrintUnqualified
; liftIO $ endPassIO hsc_env print_unqual pass binds rules }
endPassIO :: HscEnv -> PrintUnqualified
-> CoreToDo -> CoreProgram -> [CoreRule] -> IO ()
-- Used by the IO-is CorePrep too
endPassIO hsc_env print_unqual pass binds rules
= do { dumpPassResult dflags print_unqual mb_flag
(ppr pass) (pprPassDetails pass) binds rules
; lintPassResult hsc_env pass binds }
where
dflags = hsc_dflags hsc_env
mb_flag = case coreDumpFlag pass of
Just flag | dopt flag dflags -> Just flag
| dopt Opt_D_verbose_core2core dflags -> Just flag
_ -> Nothing
dumpIfSet :: DynFlags -> Bool -> CoreToDo -> SDoc -> SDoc -> IO ()
dumpIfSet dflags dump_me pass extra_info doc
= Err.dumpIfSet dflags dump_me (showSDoc dflags (ppr pass <+> extra_info)) doc
dumpPassResult :: DynFlags
-> PrintUnqualified
-> Maybe DumpFlag -- Just df => show details in a file whose
-- name is specified by df
-> SDoc -- Header
-> SDoc -- Extra info to appear after header
-> CoreProgram -> [CoreRule]
-> IO ()
dumpPassResult dflags unqual mb_flag hdr extra_info binds rules
| Just flag <- mb_flag
= Err.dumpSDoc dflags unqual flag (showSDoc dflags hdr) dump_doc
| otherwise
= Err.debugTraceMsg dflags 2 size_doc
-- Report result size
-- This has the side effect of forcing the intermediate to be evaluated
where
size_doc = sep [text "Result size of" <+> hdr, nest 2 (equals <+> ppr (coreBindsStats binds))]
dump_doc = vcat [ nest 2 extra_info
, size_doc
, blankLine
, pprCoreBindings binds
, ppUnless (null rules) pp_rules ]
pp_rules = vcat [ blankLine
, ptext (sLit "------ Local rules for imported ids --------")
, pprRules rules ]
coreDumpFlag :: CoreToDo -> Maybe DumpFlag
coreDumpFlag (CoreDoSimplify {}) = Just Opt_D_verbose_core2core
coreDumpFlag (CoreDoPluginPass {}) = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoFloatInwards = Just Opt_D_verbose_core2core
coreDumpFlag (CoreDoFloatOutwards {}) = Just Opt_D_verbose_core2core
coreDumpFlag CoreLiberateCase = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoStaticArgs = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoCallArity = Just Opt_D_dump_call_arity
coreDumpFlag CoreDoStrictness = Just Opt_D_dump_stranal
coreDumpFlag CoreDoWorkerWrapper = Just Opt_D_dump_worker_wrapper
coreDumpFlag CoreDoSpecialising = Just Opt_D_dump_spec
coreDumpFlag CoreDoSpecConstr = Just Opt_D_dump_spec
coreDumpFlag CoreCSE = Just Opt_D_dump_cse
coreDumpFlag CoreDoVectorisation = Just Opt_D_dump_vect
coreDumpFlag CoreDesugar = Just Opt_D_dump_ds
coreDumpFlag CoreDesugarOpt = Just Opt_D_dump_ds
coreDumpFlag CoreTidy = Just Opt_D_dump_simpl
coreDumpFlag CorePrep = Just Opt_D_dump_prep
coreDumpFlag CoreDoPrintCore = Nothing
coreDumpFlag (CoreDoRuleCheck {}) = Nothing
coreDumpFlag CoreDoNothing = Nothing
coreDumpFlag (CoreDoPasses {}) = Nothing
{-
************************************************************************
* *
Top-level interfaces
* *
************************************************************************
-}
lintPassResult :: HscEnv -> CoreToDo -> CoreProgram -> IO ()
lintPassResult hsc_env pass binds
| not (gopt Opt_DoCoreLinting dflags)
= return ()
| otherwise
= do { let (warns, errs) = lintCoreBindings dflags pass (interactiveInScope hsc_env) binds
; Err.showPass dflags ("Core Linted result of " ++ showPpr dflags pass)
; displayLintResults dflags pass warns errs binds }
where
dflags = hsc_dflags hsc_env
displayLintResults :: DynFlags -> CoreToDo
-> Bag Err.MsgDoc -> Bag Err.MsgDoc -> CoreProgram
-> IO ()
displayLintResults dflags pass warns errs binds
| not (isEmptyBag errs)
= do { log_action dflags dflags Err.SevDump noSrcSpan defaultDumpStyle
(vcat [ lint_banner "errors" (ppr pass), Err.pprMessageBag errs
, ptext (sLit "*** Offending Program ***")
, pprCoreBindings binds
, ptext (sLit "*** End of Offense ***") ])
; Err.ghcExit dflags 1 }
| not (isEmptyBag warns)
, not opt_NoDebugOutput
, showLintWarnings pass
= log_action dflags dflags Err.SevDump noSrcSpan defaultDumpStyle
(lint_banner "warnings" (ppr pass) $$ Err.pprMessageBag warns)
| otherwise = return ()
where
lint_banner :: String -> SDoc -> SDoc
lint_banner string pass = ptext (sLit "*** Core Lint") <+> text string
<+> ptext (sLit ": in result of") <+> pass
<+> ptext (sLit "***")
showLintWarnings :: CoreToDo -> Bool
-- Disable Lint warnings on the first simplifier pass, because
-- there may be some INLINE knots still tied, which is tiresomely noisy
showLintWarnings (CoreDoSimplify _ (SimplMode { sm_phase = InitialPhase })) = False
showLintWarnings _ = True
lintInteractiveExpr :: String -> HscEnv -> CoreExpr -> IO ()
lintInteractiveExpr what hsc_env expr
| not (gopt Opt_DoCoreLinting dflags)
= return ()
| Just err <- lintExpr dflags (interactiveInScope hsc_env) expr
= do { display_lint_err err
; Err.ghcExit dflags 1 }
| otherwise
= return ()
where
dflags = hsc_dflags hsc_env
display_lint_err err
= do { log_action dflags dflags Err.SevDump noSrcSpan defaultDumpStyle
(vcat [ lint_banner "errors" (text what)
, err
, ptext (sLit "*** Offending Program ***")
, pprCoreExpr expr
, ptext (sLit "*** End of Offense ***") ])
; Err.ghcExit dflags 1 }
interactiveInScope :: HscEnv -> [Var]
-- In GHCi we may lint expressions, or bindings arising from 'deriving'
-- clauses, that mention variables bound in the interactive context.
-- These are Local things (see Note [Interactively-bound Ids in GHCi] in HscTypes).
-- So we have to tell Lint about them, lest it reports them as out of scope.
--
-- We do this by find local-named things that may appear free in interactive
-- context. This function is pretty revolting and quite possibly not quite right.
-- When we are not in GHCi, the interactive context (hsc_IC hsc_env) is empty
-- so this is a (cheap) no-op.
--
-- See Trac #8215 for an example
interactiveInScope hsc_env
= varSetElems tyvars ++ ids
where
-- C.f. TcRnDriver.setInteractiveContext, Desugar.deSugarExpr
ictxt = hsc_IC hsc_env
(cls_insts, _fam_insts) = ic_instances ictxt
te1 = mkTypeEnvWithImplicits (ic_tythings ictxt)
te = extendTypeEnvWithIds te1 (map instanceDFunId cls_insts)
ids = typeEnvIds te
tyvars = mapUnionVarSet (tyVarsOfType . idType) ids
-- Why the type variables? How can the top level envt have free tyvars?
-- I think it's because of the GHCi debugger, which can bind variables
-- f :: [t] -> [t]
-- where t is a RuntimeUnk (see TcType)
lintCoreBindings :: DynFlags -> CoreToDo -> [Var] -> CoreProgram -> (Bag MsgDoc, Bag MsgDoc)
-- Returns (warnings, errors)
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreBindings dflags pass local_in_scope binds
= initL dflags flags $
addLoc TopLevelBindings $
addInScopeVars local_in_scope $
addInScopeVars binders $
-- Put all the top-level binders in scope at the start
-- This is because transformation rules can bring something
-- into use 'unexpectedly'
do { checkL (null dups) (dupVars dups)
; checkL (null ext_dups) (dupExtVars ext_dups)
; mapM lint_bind binds }
where
flags = LF { lf_check_global_ids = check_globals
, lf_check_inline_loop_breakers = check_lbs }
-- See Note [Checking for global Ids]
check_globals = case pass of
CoreTidy -> False
CorePrep -> False
_ -> True
-- See Note [Checking for INLINE loop breakers]
check_lbs = case pass of
CoreDesugar -> False
CoreDesugarOpt -> False
_ -> True
binders = bindersOfBinds binds
(_, dups) = removeDups compare binders
-- dups_ext checks for names with different uniques
-- but but the same External name M.n. We don't
-- allow this at top level:
-- M.n{r3} = ...
-- M.n{r29} = ...
-- because they both get the same linker symbol
ext_dups = snd (removeDups ord_ext (map Var.varName binders))
ord_ext n1 n2 | Just m1 <- nameModule_maybe n1
, Just m2 <- nameModule_maybe n2
= compare (m1, nameOccName n1) (m2, nameOccName n2)
| otherwise = LT
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lint_bind (Rec prs) = mapM_ (lintSingleBinding TopLevel Recursive) prs
lint_bind (NonRec bndr rhs) = lintSingleBinding TopLevel NonRecursive (bndr,rhs)
{-
************************************************************************
* *
\subsection[lintUnfolding]{lintUnfolding}
* *
************************************************************************
We use this to check all unfoldings that come in from interfaces
(it is very painful to catch errors otherwise):
-}
lintUnfolding :: DynFlags
-> SrcLoc
-> [Var] -- Treat these as in scope
-> CoreExpr
-> Maybe MsgDoc -- Nothing => OK
lintUnfolding dflags locn vars expr
| isEmptyBag errs = Nothing
| otherwise = Just (pprMessageBag errs)
where
(_warns, errs) = initL dflags defaultLintFlags linter
linter = addLoc (ImportedUnfolding locn) $
addInScopeVars vars $
lintCoreExpr expr
lintExpr :: DynFlags
-> [Var] -- Treat these as in scope
-> CoreExpr
-> Maybe MsgDoc -- Nothing => OK
lintExpr dflags vars expr
| isEmptyBag errs = Nothing
| otherwise = Just (pprMessageBag errs)
where
(_warns, errs) = initL dflags defaultLintFlags linter
linter = addLoc TopLevelBindings $
addInScopeVars vars $
lintCoreExpr expr
{-
************************************************************************
* *
\subsection[lintCoreBinding]{lintCoreBinding}
* *
************************************************************************
Check a core binding, returning the list of variables bound.
-}
lintSingleBinding :: TopLevelFlag -> RecFlag -> (Id, CoreExpr) -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintSingleBinding top_lvl_flag rec_flag (binder,rhs)
= addLoc (RhsOf binder) $
-- Check the rhs
do { ty <- lintCoreExpr rhs
; lintBinder binder -- Check match to RHS type
; binder_ty <- applySubstTy binder_ty
; checkTys binder_ty ty (mkRhsMsg binder (ptext (sLit "RHS")) ty)
-- Check the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
; checkL (not (isUnLiftedType binder_ty)
|| (isNonRec rec_flag && exprOkForSpeculation rhs))
(mkRhsPrimMsg binder rhs)
-- Check that if the binder is top-level or recursive, it's not demanded
; checkL (not (isStrictId binder)
|| (isNonRec rec_flag && not (isTopLevel top_lvl_flag)))
(mkStrictMsg binder)
-- Check that if the binder is local, it is not marked as exported
; checkL (not (isExportedId binder) || isTopLevel top_lvl_flag)
(mkNonTopExportedMsg binder)
-- Check that if the binder is local, it does not have an external name
; checkL (not (isExternalName (Var.varName binder)) || isTopLevel top_lvl_flag)
(mkNonTopExternalNameMsg binder)
-- Check whether binder's specialisations contain any out-of-scope variables
; mapM_ (checkBndrIdInScope binder) bndr_vars
; flags <- getLintFlags
; when (lf_check_inline_loop_breakers flags
&& isStrongLoopBreaker (idOccInfo binder)
&& isInlinePragma (idInlinePragma binder))
(addWarnL (ptext (sLit "INLINE binder is (non-rule) loop breaker:") <+> ppr binder))
-- Only non-rule loop breakers inhibit inlining
-- Check whether arity and demand type are consistent (only if demand analysis
-- already happened)
--
-- Note (Apr 2014): this is actually ok. See Note [Demand analysis for trivial right-hand sides]
-- in DmdAnal. After eta-expansion in CorePrep the rhs is no longer trivial.
-- ; let dmdTy = idStrictness binder
-- ; checkL (case dmdTy of
-- StrictSig dmd_ty -> idArity binder >= dmdTypeDepth dmd_ty || exprIsTrivial rhs)
-- (mkArityMsg binder)
-- Check that the binder's arity is within the bounds imposed by
-- the type and the strictness signature. See Note [exprArity invariant]
-- and Note [Trimming arity]
; checkL (idArity binder <= length (typeArity (idType binder)))
(ptext (sLit "idArity") <+> ppr (idArity binder) <+>
ptext (sLit "exceeds typeArity") <+>
ppr (length (typeArity (idType binder))) <> colon <+>
ppr binder)
; case splitStrictSig (idStrictness binder) of
(demands, result_info) | isBotRes result_info ->
checkL (idArity binder <= length demands)
(ptext (sLit "idArity") <+> ppr (idArity binder) <+>
ptext (sLit "exceeds arity imposed by the strictness signature") <+>
ppr (idStrictness binder) <> colon <+>
ppr binder)
_ -> return ()
; lintIdUnfolding binder binder_ty (idUnfolding binder) }
-- We should check the unfolding, if any, but this is tricky because
-- the unfolding is a SimplifiableCoreExpr. Give up for now.
where
binder_ty = idType binder
bndr_vars = varSetElems (idFreeVars binder)
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintBinder var | isId var = lintIdBndr var $ \_ -> (return ())
| otherwise = return ()
lintIdUnfolding :: Id -> Type -> Unfolding -> LintM ()
lintIdUnfolding bndr bndr_ty (CoreUnfolding { uf_tmpl = rhs, uf_src = src })
| isStableSource src
= do { ty <- lintCoreExpr rhs
; checkTys bndr_ty ty (mkRhsMsg bndr (ptext (sLit "unfolding")) ty) }
lintIdUnfolding _ _ _
= return () -- We could check more
{-
Note [Checking for INLINE loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very suspicious if a strong loop breaker is marked INLINE.
However, the desugarer generates instance methods with INLINE pragmas
that form a mutually recursive group. Only after a round of
simplification are they unravelled. So we suppress the test for
the desugarer.
************************************************************************
* *
\subsection[lintCoreExpr]{lintCoreExpr}
* *
************************************************************************
-}
--type InKind = Kind -- Substitution not yet applied
type InType = Type
type InCoercion = Coercion
type InVar = Var
type InTyVar = TyVar
type OutKind = Kind -- Substitution has been applied to this,
-- but has not been linted yet
type LintedKind = Kind -- Substitution applied, and type is linted
type OutType = Type -- Substitution has been applied to this,
-- but has not been linted yet
type LintedType = Type -- Substitution applied, and type is linted
type OutCoercion = Coercion
type OutVar = Var
type OutTyVar = TyVar
lintCoreExpr :: CoreExpr -> LintM OutType
-- The returned type has the substitution from the monad
-- already applied to it:
-- lintCoreExpr e subst = exprType (subst e)
--
-- The returned "type" can be a kind, if the expression is (Type ty)
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreExpr (Var var)
= do { checkL (not (var == oneTupleDataConId))
(ptext (sLit "Illegal one-tuple"))
; checkL (isId var && not (isCoVar var))
(ptext (sLit "Non term variable") <+> ppr var)
; checkDeadIdOcc var
; var' <- lookupIdInScope var
; return (idType var') }
lintCoreExpr (Lit lit)
= return (literalType lit)
lintCoreExpr (Cast expr co)
= do { expr_ty <- lintCoreExpr expr
; co' <- applySubstCo co
; (_, from_ty, to_ty, r) <- lintCoercion co'
; checkRole co' Representational r
; checkTys from_ty expr_ty (mkCastErr expr co' from_ty expr_ty)
; return to_ty }
lintCoreExpr (Tick (Breakpoint _ ids) expr)
= do forM_ ids $ \id -> do
checkDeadIdOcc id
lookupIdInScope id
lintCoreExpr expr
lintCoreExpr (Tick _other_tickish expr)
= lintCoreExpr expr
lintCoreExpr (Let (NonRec tv (Type ty)) body)
| isTyVar tv
= -- See Note [Linting type lets]
do { ty' <- applySubstTy ty
; lintTyBndr tv $ \ tv' ->
do { addLoc (RhsOf tv) $ checkTyKind tv' ty'
-- Now extend the substitution so we
-- take advantage of it in the body
; extendSubstL tv' ty' $
addLoc (BodyOfLetRec [tv]) $
lintCoreExpr body } }
lintCoreExpr (Let (NonRec bndr rhs) body)
| isId bndr
= do { lintSingleBinding NotTopLevel NonRecursive (bndr,rhs)
; addLoc (BodyOfLetRec [bndr])
(lintAndScopeId bndr $ \_ -> (lintCoreExpr body)) }
| otherwise
= failWithL (mkLetErr bndr rhs) -- Not quite accurate
lintCoreExpr (Let (Rec pairs) body)
= lintAndScopeIds bndrs $ \_ ->
do { checkL (null dups) (dupVars dups)
; mapM_ (lintSingleBinding NotTopLevel Recursive) pairs
; addLoc (BodyOfLetRec bndrs) (lintCoreExpr body) }
where
bndrs = map fst pairs
(_, dups) = removeDups compare bndrs
lintCoreExpr e@(App _ _)
= do { fun_ty <- lintCoreExpr fun
; addLoc (AnExpr e) $ foldM lintCoreArg fun_ty args }
where
(fun, args) = collectArgs e
lintCoreExpr (Lam var expr)
= addLoc (LambdaBodyOf var) $
lintBinder var $ \ var' ->
do { body_ty <- lintCoreExpr expr
; if isId var' then
return (mkFunTy (idType var') body_ty)
else
return (mkForAllTy var' body_ty)
}
-- The applySubstTy is needed to apply the subst to var
lintCoreExpr e@(Case scrut var alt_ty alts) =
-- Check the scrutinee
do { scrut_ty <- lintCoreExpr scrut
; alt_ty <- lintInTy alt_ty
; var_ty <- lintInTy (idType var)
-- See Note [No alternatives lint check]
; when (null alts) $
do { checkL (not (exprIsHNF scrut))
(ptext (sLit "No alternatives for a case scrutinee in head-normal form:") <+> ppr scrut)
; checkL (exprIsBottom scrut)
(ptext (sLit "No alternatives for a case scrutinee not known to diverge for sure:") <+> ppr scrut)
}
; case tyConAppTyCon_maybe (idType var) of
Just tycon
| debugIsOn &&
isAlgTyCon tycon &&
not (isFamilyTyCon tycon || isAbstractTyCon tycon) &&
null (tyConDataCons tycon) ->
pprTrace "Lint warning: case binder's type has no constructors" (ppr var <+> ppr (idType var))
-- This can legitimately happen for type families
$ return ()
_otherwise -> return ()
-- Don't use lintIdBndr on var, because unboxed tuple is legitimate
; subst <- getTvSubst
; checkTys var_ty scrut_ty (mkScrutMsg var var_ty scrut_ty subst)
; lintAndScopeId var $ \_ ->
do { -- Check the alternatives
mapM_ (lintCoreAlt scrut_ty alt_ty) alts
; checkCaseAlts e scrut_ty alts
; return alt_ty } }
-- This case can't happen; linting types in expressions gets routed through
-- lintCoreArgs
lintCoreExpr (Type ty)
= pprPanic "lintCoreExpr" (ppr ty)
lintCoreExpr (Coercion co)
= do { (_kind, ty1, ty2, role) <- lintInCo co
; return (mkCoercionType role ty1 ty2) }
{-
Note [Kind instantiation in coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following coercion axiom:
ax_co [(k_ag :: BOX), (f_aa :: k_ag -> Constraint)] :: T k_ag f_aa ~ f_aa
Consider the following instantiation:
ax_co <* -> *> <Monad>
We need to split the co_ax_tvs into kind and type variables in order
to find out the coercion kind instantiations. Those can only be Refl
since we don't have kind coercions. This is just a way to represent
kind instantiation.
We use the number of kind variables to know how to split the coercions
instantiations between kind coercions and type coercions. We lint the
kind coercions and produce the following substitution which is to be
applied in the type variables:
k_ag ~~> * -> *
Note [No alternatives lint check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Case expressions with no alternatives are odd beasts, and worth looking at
in the linter (cf Trac #10180). We check two things:
* exprIsHNF is false: certainly, it would be terribly wrong if the
scrutinee was already in head normal form.
* exprIsBottom is true: we should be able to see why GHC believes the
scrutinee is diverging for sure.
In principle, the first check is redundant: exprIsBottom == True will
always imply exprIsHNF == False. But the first check is reliable: If
exprIsHNF == True, then there definitely is a problem (exprIsHNF errs
on the right side). If the second check triggers then it may be the
case that the compiler got smarter elsewhere, and the empty case is
correct, but that exprIsBottom is unable to see it. In particular, the
empty-type check in exprIsBottom is an approximation. Therefore, this
check is not fully reliable, and we keep both around.
************************************************************************
* *
\subsection[lintCoreArgs]{lintCoreArgs}
* *
************************************************************************
The basic version of these functions checks that the argument is a
subtype of the required type, as one would expect.
-}
lintCoreArg :: OutType -> CoreArg -> LintM OutType
lintCoreArg fun_ty (Type arg_ty)
= do { arg_ty' <- applySubstTy arg_ty
; lintTyApp fun_ty arg_ty' }
lintCoreArg fun_ty arg
= do { arg_ty <- lintCoreExpr arg
; checkL (not (isUnLiftedType arg_ty) || exprOkForSpeculation arg)
(mkLetAppMsg arg)
; lintValApp arg fun_ty arg_ty }
-----------------
lintAltBinders :: OutType -- Scrutinee type
-> OutType -- Constructor type
-> [OutVar] -- Binders
-> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintAltBinders scrut_ty con_ty []
= checkTys con_ty scrut_ty (mkBadPatMsg con_ty scrut_ty)
lintAltBinders scrut_ty con_ty (bndr:bndrs)
| isTyVar bndr
= do { con_ty' <- lintTyApp con_ty (mkTyVarTy bndr)
; lintAltBinders scrut_ty con_ty' bndrs }
| otherwise
= do { con_ty' <- lintValApp (Var bndr) con_ty (idType bndr)
; lintAltBinders scrut_ty con_ty' bndrs }
-----------------
lintTyApp :: OutType -> OutType -> LintM OutType
lintTyApp fun_ty arg_ty
| Just (tyvar,body_ty) <- splitForAllTy_maybe fun_ty
, isTyVar tyvar
= do { checkTyKind tyvar arg_ty
; return (substTyWith [tyvar] [arg_ty] body_ty) }
| otherwise
= failWithL (mkTyAppMsg fun_ty arg_ty)
-----------------
lintValApp :: CoreExpr -> OutType -> OutType -> LintM OutType
lintValApp arg fun_ty arg_ty
| Just (arg,res) <- splitFunTy_maybe fun_ty
= do { checkTys arg arg_ty err1
; return res }
| otherwise
= failWithL err2
where
err1 = mkAppMsg fun_ty arg_ty arg
err2 = mkNonFunAppMsg fun_ty arg_ty arg
checkTyKind :: OutTyVar -> OutType -> LintM ()
-- Both args have had substitution applied
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
checkTyKind tyvar arg_ty
| isSuperKind tyvar_kind -- kind forall
= lintKind arg_ty
-- Arg type might be boxed for a function with an uncommitted
-- tyvar; notably this is used so that we can give
-- error :: forall a:*. String -> a
-- and then apply it to both boxed and unboxed types.
| otherwise -- type forall
= do { arg_kind <- lintType arg_ty
; unless (arg_kind `isSubKind` tyvar_kind)
(addErrL (mkKindErrMsg tyvar arg_ty $$ (text "xx" <+> ppr arg_kind))) }
where
tyvar_kind = tyVarKind tyvar
checkDeadIdOcc :: Id -> LintM ()
-- Occurrences of an Id should never be dead....
-- except when we are checking a case pattern
checkDeadIdOcc id
| isDeadOcc (idOccInfo id)
= do { in_case <- inCasePat
; checkL in_case
(ptext (sLit "Occurrence of a dead Id") <+> ppr id) }
| otherwise
= return ()
{-
************************************************************************
* *
\subsection[lintCoreAlts]{lintCoreAlts}
* *
************************************************************************
-}
checkCaseAlts :: CoreExpr -> OutType -> [CoreAlt] -> LintM ()
-- a) Check that the alts are non-empty
-- b1) Check that the DEFAULT comes first, if it exists
-- b2) Check that the others are in increasing order
-- c) Check that there's a default for infinite types
-- NB: Algebraic cases are not necessarily exhaustive, because
-- the simplifer correctly eliminates case that can't
-- possibly match.
checkCaseAlts e ty alts =
do { checkL (all non_deflt con_alts) (mkNonDefltMsg e)
; checkL (increasing_tag con_alts) (mkNonIncreasingAltsMsg e)
-- For types Int#, Word# with an infinite (well, large!) number of
-- possible values, there should usually be a DEFAULT case
-- But (see Note [Empty case alternatives] in CoreSyn) it's ok to
-- have *no* case alternatives.
-- In effect, this is a kind of partial test. I suppose it's possible
-- that we might *know* that 'x' was 1 or 2, in which case
-- case x of { 1 -> e1; 2 -> e2 }
-- would be fine.
; checkL (isJust maybe_deflt || not is_infinite_ty || null alts)
(nonExhaustiveAltsMsg e) }
where
(con_alts, maybe_deflt) = findDefault alts
-- Check that successive alternatives have increasing tags
increasing_tag (alt1 : rest@( alt2 : _)) = alt1 `ltAlt` alt2 && increasing_tag rest
increasing_tag _ = True
non_deflt (DEFAULT, _, _) = False
non_deflt _ = True
is_infinite_ty = case tyConAppTyCon_maybe ty of
Nothing -> False
Just tycon -> isPrimTyCon tycon
checkAltExpr :: CoreExpr -> OutType -> LintM ()
checkAltExpr expr ann_ty
= do { actual_ty <- lintCoreExpr expr
; checkTys actual_ty ann_ty (mkCaseAltMsg expr actual_ty ann_ty) }
lintCoreAlt :: OutType -- Type of scrutinee
-> OutType -- Type of the alternative
-> CoreAlt
-> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreAlt _ alt_ty (DEFAULT, args, rhs) =
do { checkL (null args) (mkDefaultArgsMsg args)
; checkAltExpr rhs alt_ty }
lintCoreAlt scrut_ty alt_ty (LitAlt lit, args, rhs)
| litIsLifted lit
= failWithL integerScrutinisedMsg
| otherwise
= do { checkL (null args) (mkDefaultArgsMsg args)
; checkTys lit_ty scrut_ty (mkBadPatMsg lit_ty scrut_ty)
; checkAltExpr rhs alt_ty }
where
lit_ty = literalType lit
lintCoreAlt scrut_ty alt_ty alt@(DataAlt con, args, rhs)
| isNewTyCon (dataConTyCon con)
= addErrL (mkNewTyDataConAltMsg scrut_ty alt)
| Just (tycon, tycon_arg_tys) <- splitTyConApp_maybe scrut_ty
= addLoc (CaseAlt alt) $ do
{ -- First instantiate the universally quantified
-- type variables of the data constructor
-- We've already check
checkL (tycon == dataConTyCon con) (mkBadConMsg tycon con)
; let con_payload_ty = applyTys (dataConRepType con) tycon_arg_tys
-- And now bring the new binders into scope
; lintBinders args $ \ args' -> do
{ addLoc (CasePat alt) (lintAltBinders scrut_ty con_payload_ty args')
; checkAltExpr rhs alt_ty } }
| otherwise -- Scrut-ty is wrong shape
= addErrL (mkBadAltMsg scrut_ty alt)
{-
************************************************************************
* *
\subsection[lint-types]{Types}
* *
************************************************************************
-}
-- When we lint binders, we (one at a time and in order):
-- 1. Lint var types or kinds (possibly substituting)
-- 2. Add the binder to the in scope set, and if its a coercion var,
-- we may extend the substitution to reflect its (possibly) new kind
lintBinders :: [Var] -> ([Var] -> LintM a) -> LintM a
lintBinders [] linterF = linterF []
lintBinders (var:vars) linterF = lintBinder var $ \var' ->
lintBinders vars $ \ vars' ->
linterF (var':vars')
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintBinder :: Var -> (Var -> LintM a) -> LintM a
lintBinder var linterF
| isId var = lintIdBndr var linterF
| otherwise = lintTyBndr var linterF
lintTyBndr :: InTyVar -> (OutTyVar -> LintM a) -> LintM a
lintTyBndr tv thing_inside
= do { subst <- getTvSubst
; let (subst', tv') = Type.substTyVarBndr subst tv
; lintTyBndrKind tv'
; updateTvSubst subst' (thing_inside tv') }
lintIdBndr :: Id -> (Id -> LintM a) -> LintM a
-- Do substitution on the type of a binder and add the var with this
-- new type to the in-scope set of the second argument
-- ToDo: lint its rules
lintIdBndr id linterF
= do { lintAndScopeId id $ \id' -> linterF id' }
lintAndScopeIds :: [Var] -> ([Var] -> LintM a) -> LintM a
lintAndScopeIds ids linterF
= go ids
where
go [] = linterF []
go (id:ids) = lintAndScopeId id $ \id ->
lintAndScopeIds ids $ \ids ->
linterF (id:ids)
lintAndScopeId :: InVar -> (OutVar -> LintM a) -> LintM a
lintAndScopeId id linterF
= do { flags <- getLintFlags
; checkL (not (lf_check_global_ids flags) || isLocalId id)
(ptext (sLit "Non-local Id binder") <+> ppr id)
-- See Note [Checking for global Ids]
; ty <- lintInTy (idType id)
; let id' = setIdType id ty
; addInScopeVar id' $ (linterF id') }
{-
************************************************************************
* *
Types and kinds
* *
************************************************************************
We have a single linter for types and kinds. That is convenient
because sometimes it's not clear whether the thing we are looking
at is a type or a kind.
-}
lintInTy :: InType -> LintM LintedType
-- Types only, not kinds
-- Check the type, and apply the substitution to it
-- See Note [Linting type lets]
lintInTy ty
= addLoc (InType ty) $
do { ty' <- applySubstTy ty
; _k <- lintType ty'
; return ty' }
-------------------
lintTyBndrKind :: OutTyVar -> LintM ()
-- Handles both type and kind foralls.
lintTyBndrKind tv = lintKind (tyVarKind tv)
-------------------
lintType :: OutType -> LintM LintedKind
-- The returned Kind has itself been linted
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintType (TyVarTy tv)
= do { checkTyCoVarInScope tv
; return (tyVarKind tv) }
-- We checked its kind when we added it to the envt
lintType ty@(AppTy t1 t2)
= do { k1 <- lintType t1
; k2 <- lintType t2
; lint_ty_app ty k1 [(t2,k2)] }
lintType ty@(FunTy t1 t2) -- (->) has two different rules, for types and kinds
= do { k1 <- lintType t1
; k2 <- lintType t2
; lintArrow (ptext (sLit "type or kind") <+> quotes (ppr ty)) k1 k2 }
lintType ty@(TyConApp tc tys)
| Just ty' <- coreView ty
= lintType ty' -- Expand type synonyms, so that we do not bogusly complain
-- about un-saturated type synonyms
| isUnLiftedTyCon tc || isTypeSynonymTyCon tc || isTypeFamilyTyCon tc
-- See Note [The kind invariant] in TypeRep
-- Also type synonyms and type families
, length tys < tyConArity tc
= failWithL (hang (ptext (sLit "Un-saturated type application")) 2 (ppr ty))
| otherwise
= do { ks <- mapM lintType tys
; lint_ty_app ty (tyConKind tc) (tys `zip` ks) }
lintType (ForAllTy tv ty)
= do { lintTyBndrKind tv
; addInScopeVar tv (lintType ty) }
lintType ty@(LitTy l) = lintTyLit l >> return (typeKind ty)
lintKind :: OutKind -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintKind k = do { sk <- lintType k
; unless (isSuperKind sk)
(addErrL (hang (ptext (sLit "Ill-kinded kind:") <+> ppr k)
2 (ptext (sLit "has kind:") <+> ppr sk))) }
lintArrow :: SDoc -> LintedKind -> LintedKind -> LintM LintedKind
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintArrow what k1 k2 -- Eg lintArrow "type or kind `blah'" k1 k2
-- or lintarrow "coercion `blah'" k1 k2
| isSuperKind k1
= return superKind
| otherwise
= do { unless (okArrowArgKind k1) (addErrL (msg (ptext (sLit "argument")) k1))
; unless (okArrowResultKind k2) (addErrL (msg (ptext (sLit "result")) k2))
; return liftedTypeKind }
where
msg ar k
= vcat [ hang (ptext (sLit "Ill-kinded") <+> ar)
2 (ptext (sLit "in") <+> what)
, what <+> ptext (sLit "kind:") <+> ppr k ]
lint_ty_app :: Type -> LintedKind -> [(LintedType,LintedKind)] -> LintM LintedKind
lint_ty_app ty k tys
= lint_app (ptext (sLit "type") <+> quotes (ppr ty)) k tys
----------------
lint_co_app :: Coercion -> LintedKind -> [(LintedType,LintedKind)] -> LintM LintedKind
lint_co_app ty k tys
= lint_app (ptext (sLit "coercion") <+> quotes (ppr ty)) k tys
----------------
lintTyLit :: TyLit -> LintM ()
lintTyLit (NumTyLit n)
| n >= 0 = return ()
| otherwise = failWithL msg
where msg = ptext (sLit "Negative type literal:") <+> integer n
lintTyLit (StrTyLit _) = return ()
lint_app :: SDoc -> LintedKind -> [(LintedType,LintedKind)] -> LintM Kind
-- (lint_app d fun_kind arg_tys)
-- We have an application (f arg_ty1 .. arg_tyn),
-- where f :: fun_kind
-- Takes care of linting the OutTypes
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lint_app doc kfn kas
= foldlM go_app kfn kas
where
fail_msg = vcat [ hang (ptext (sLit "Kind application error in")) 2 doc
, nest 2 (ptext (sLit "Function kind =") <+> ppr kfn)
, nest 2 (ptext (sLit "Arg kinds =") <+> ppr kas) ]
go_app kfn ka
| Just kfn' <- coreView kfn
= go_app kfn' ka
go_app (FunTy kfa kfb) (_,ka)
= do { unless (ka `isSubKind` kfa) (addErrL fail_msg)
; return kfb }
go_app (ForAllTy kv kfn) (ta,ka)
= do { unless (ka `isSubKind` tyVarKind kv) (addErrL fail_msg)
; return (substKiWith [kv] [ta] kfn) }
go_app _ _ = failWithL fail_msg
{-
************************************************************************
* *
Linting coercions
* *
************************************************************************
-}
lintInCo :: InCoercion -> LintM (LintedKind, LintedType, LintedType, Role)
-- Check the coercion, and apply the substitution to it
-- See Note [Linting type lets]
lintInCo co
= addLoc (InCo co) $
do { co' <- applySubstCo co
; lintCoercion co' }
lintCoercion :: OutCoercion -> LintM (LintedKind, LintedType, LintedType, Role)
-- Check the kind of a coercion term, returning the kind
-- Post-condition: the returned OutTypes are lint-free
-- and have the same kind as each other
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoercion (Refl r ty)
= do { k <- lintType ty
; return (k, ty, ty, r) }
lintCoercion co@(TyConAppCo r tc cos)
| tc `hasKey` funTyConKey
, [co1,co2] <- cos
= do { (k1,s1,t1,r1) <- lintCoercion co1
; (k2,s2,t2,r2) <- lintCoercion co2
; rk <- lintArrow (ptext (sLit "coercion") <+> quotes (ppr co)) k1 k2
; checkRole co1 r r1
; checkRole co2 r r2
; return (rk, mkFunTy s1 s2, mkFunTy t1 t2, r) }
| Just {} <- synTyConDefn_maybe tc
= failWithL (ptext (sLit "Synonym in TyConAppCo:") <+> ppr co)
| otherwise
= do { (ks,ss,ts,rs) <- mapAndUnzip4M lintCoercion cos
; rk <- lint_co_app co (tyConKind tc) (ss `zip` ks)
; _ <- zipWith3M checkRole cos (tyConRolesX r tc) rs
; return (rk, mkTyConApp tc ss, mkTyConApp tc ts, r) }
lintCoercion co@(AppCo co1 co2)
= do { (k1,s1,t1,r1) <- lintCoercion co1
; (k2,s2,t2,r2) <- lintCoercion co2
; rk <- lint_co_app co k1 [(s2,k2)]
; if r1 == Phantom
then checkL (r2 == Phantom || r2 == Nominal)
(ptext (sLit "Second argument in AppCo cannot be R:") $$
ppr co)
else checkRole co Nominal r2
; return (rk, mkAppTy s1 s2, mkAppTy t1 t2, r1) }
lintCoercion (ForAllCo tv co)
= do { lintTyBndrKind tv
; (k, s, t, r) <- addInScopeVar tv (lintCoercion co)
; return (k, mkForAllTy tv s, mkForAllTy tv t, r) }
lintCoercion (CoVarCo cv)
| not (isCoVar cv)
= failWithL (hang (ptext (sLit "Bad CoVarCo:") <+> ppr cv)
2 (ptext (sLit "With offending type:") <+> ppr (varType cv)))
| otherwise
= do { checkTyCoVarInScope cv
; cv' <- lookupIdInScope cv
; let (s,t) = coVarKind cv'
k = typeKind s
r = coVarRole cv'
; when (isSuperKind k) $
do { checkL (r == Nominal) (hang (ptext (sLit "Non-nominal kind equality"))
2 (ppr cv))
; checkL (s `eqKind` t) (hang (ptext (sLit "Non-refl kind equality"))
2 (ppr cv)) }
; return (k, s, t, r) }
-- See Note [Bad unsafe coercion]
lintCoercion (UnivCo _prov r ty1 ty2)
= do { k1 <- lintType ty1
; k2 <- lintType ty2
-- ; unless (k1 `eqKind` k2) $
-- failWithL (hang (ptext (sLit "Unsafe coercion changes kind"))
-- 2 (ppr co))
; when (r /= Phantom && isSubOpenTypeKind k1 && isSubOpenTypeKind k2)
(checkTypes ty1 ty2)
; return (k1, ty1, ty2, r) }
where
report s = hang (text $ "Unsafe coercion between " ++ s)
2 (vcat [ text "From:" <+> ppr ty1
, text " To:" <+> ppr ty2])
isUnBoxed :: PrimRep -> Bool
isUnBoxed PtrRep = False
isUnBoxed _ = True
checkTypes t1 t2
= case (repType t1, repType t2) of
(UnaryRep _, UnaryRep _) ->
validateCoercion (typePrimRep t1)
(typePrimRep t2)
(UbxTupleRep rep1, UbxTupleRep rep2) -> do
checkWarnL (length rep1 == length rep2)
(report "unboxed tuples of different length")
zipWithM_ checkTypes rep1 rep2
_ -> addWarnL (report "unboxed tuple and ordinary type")
validateCoercion :: PrimRep -> PrimRep -> LintM ()
validateCoercion rep1 rep2
= do { dflags <- getDynFlags
; checkWarnL (isUnBoxed rep1 == isUnBoxed rep2)
(report "unboxed and boxed value")
; checkWarnL (TyCon.primRepSizeW dflags rep1
== TyCon.primRepSizeW dflags rep2)
(report "unboxed values of different size")
; let fl = liftM2 (==) (TyCon.primRepIsFloat rep1)
(TyCon.primRepIsFloat rep2)
; case fl of
Nothing -> addWarnL (report "vector types")
Just False -> addWarnL (report "float and integral values")
_ -> return ()
}
lintCoercion (SymCo co)
= do { (k, ty1, ty2, r) <- lintCoercion co
; return (k, ty2, ty1, r) }
lintCoercion co@(TransCo co1 co2)
= do { (k1, ty1a, ty1b, r1) <- lintCoercion co1
; (_, ty2a, ty2b, r2) <- lintCoercion co2
; checkL (ty1b `eqType` ty2a)
(hang (ptext (sLit "Trans coercion mis-match:") <+> ppr co)
2 (vcat [ppr ty1a, ppr ty1b, ppr ty2a, ppr ty2b]))
; checkRole co r1 r2
; return (k1, ty1a, ty2b, r1) }
lintCoercion the_co@(NthCo n co)
= do { (_,s,t,r) <- lintCoercion co
; case (splitTyConApp_maybe s, splitTyConApp_maybe t) of
(Just (tc_s, tys_s), Just (tc_t, tys_t))
| tc_s == tc_t
, isDistinctTyCon tc_s || r /= Representational
-- see Note [NthCo and newtypes] in Coercion
, tys_s `equalLength` tys_t
, n < length tys_s
-> return (ks, ts, tt, tr)
where
ts = getNth tys_s n
tt = getNth tys_t n
tr = nthRole r tc_s n
ks = typeKind ts
_ -> failWithL (hang (ptext (sLit "Bad getNth:"))
2 (ppr the_co $$ ppr s $$ ppr t)) }
lintCoercion the_co@(LRCo lr co)
= do { (_,s,t,r) <- lintCoercion co
; checkRole co Nominal r
; case (splitAppTy_maybe s, splitAppTy_maybe t) of
(Just s_pr, Just t_pr)
-> return (k, s_pick, t_pick, Nominal)
where
s_pick = pickLR lr s_pr
t_pick = pickLR lr t_pr
k = typeKind s_pick
_ -> failWithL (hang (ptext (sLit "Bad LRCo:"))
2 (ppr the_co $$ ppr s $$ ppr t)) }
lintCoercion (InstCo co arg_ty)
= do { (k,s,t,r) <- lintCoercion co
; arg_kind <- lintType arg_ty
; case (splitForAllTy_maybe s, splitForAllTy_maybe t) of
(Just (tv1,ty1), Just (tv2,ty2))
| arg_kind `isSubKind` tyVarKind tv1
-> return (k, substTyWith [tv1] [arg_ty] ty1,
substTyWith [tv2] [arg_ty] ty2, r)
| otherwise
-> failWithL (ptext (sLit "Kind mis-match in inst coercion"))
_ -> failWithL (ptext (sLit "Bad argument of inst")) }
lintCoercion co@(AxiomInstCo con ind cos)
= do { unless (0 <= ind && ind < brListLength (coAxiomBranches con))
(bad_ax (ptext (sLit "index out of range")))
-- See Note [Kind instantiation in coercions]
; let CoAxBranch { cab_tvs = ktvs
, cab_roles = roles
, cab_lhs = lhs
, cab_rhs = rhs } = coAxiomNthBranch con ind
; unless (equalLength ktvs cos) (bad_ax (ptext (sLit "lengths")))
; in_scope <- getInScope
; let empty_subst = mkTvSubst in_scope emptyTvSubstEnv
; (subst_l, subst_r) <- foldlM check_ki
(empty_subst, empty_subst)
(zip3 ktvs roles cos)
; let lhs' = Type.substTys subst_l lhs
rhs' = Type.substTy subst_r rhs
; case checkAxInstCo co of
Just bad_branch -> bad_ax $ ptext (sLit "inconsistent with") <+> (pprCoAxBranch (coAxiomTyCon con) bad_branch)
Nothing -> return ()
; return (typeKind rhs', mkTyConApp (coAxiomTyCon con) lhs', rhs', coAxiomRole con) }
where
bad_ax what = addErrL (hang (ptext (sLit "Bad axiom application") <+> parens what)
2 (ppr co))
check_ki (subst_l, subst_r) (ktv, role, co)
= do { (k, t1, t2, r) <- lintCoercion co
; checkRole co role r
; let ktv_kind = Type.substTy subst_l (tyVarKind ktv)
-- Using subst_l is ok, because subst_l and subst_r
-- must agree on kind equalities
; unless (k `isSubKind` ktv_kind)
(bad_ax (ptext (sLit "check_ki2") <+> vcat [ ppr co, ppr k, ppr ktv, ppr ktv_kind ] ))
; return (Type.extendTvSubst subst_l ktv t1,
Type.extendTvSubst subst_r ktv t2) }
lintCoercion co@(SubCo co')
= do { (k,s,t,r) <- lintCoercion co'
; checkRole co Nominal r
; return (k,s,t,Representational) }
lintCoercion this@(AxiomRuleCo co ts cs)
= do _ks <- mapM lintType ts
eqs <- mapM lintCoercion cs
let tyNum = length ts
case compare (coaxrTypeArity co) tyNum of
EQ -> return ()
LT -> err "Too many type arguments"
[ txt "expected" <+> int (coaxrTypeArity co)
, txt "provided" <+> int tyNum ]
GT -> err "Not enough type arguments"
[ txt "expected" <+> int (coaxrTypeArity co)
, txt "provided" <+> int tyNum ]
checkRoles 0 (coaxrAsmpRoles co) eqs
case coaxrProves co ts [ Pair l r | (_,l,r,_) <- eqs ] of
Nothing -> err "Malformed use of AxiomRuleCo" [ ppr this ]
Just (Pair l r) ->
do kL <- lintType l
kR <- lintType r
unless (eqKind kL kR)
$ err "Kind error in CoAxiomRule"
[ppr kL <+> txt "/=" <+> ppr kR]
return (kL, l, r, coaxrRole co)
where
txt = ptext . sLit
err m xs = failWithL $
hang (txt m) 2 $ vcat (txt "Rule:" <+> ppr (coaxrName co) : xs)
checkRoles n (e : es) ((_,_,_,r) : rs)
| e == r = checkRoles (n+1) es rs
| otherwise = err "Argument roles mismatch"
[ txt "In argument:" <+> int (n+1)
, txt "Expected:" <+> ppr e
, txt "Found:" <+> ppr r ]
checkRoles _ [] [] = return ()
checkRoles n [] rs = err "Too many coercion arguments"
[ txt "Expected:" <+> int n
, txt "Provided:" <+> int (n + length rs) ]
checkRoles n es [] = err "Not enough coercion arguments"
[ txt "Expected:" <+> int (n + length es)
, txt "Provided:" <+> int n ]
{-
************************************************************************
* *
\subsection[lint-monad]{The Lint monad}
* *
************************************************************************
-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism]
data LintEnv
= LE { le_flags :: LintFlags -- Linting the result of this pass
, le_loc :: [LintLocInfo] -- Locations
, le_subst :: TvSubst -- Current type substitution; we also use this
-- to keep track of all the variables in scope,
-- both Ids and TyVars
, le_dynflags :: DynFlags -- DynamicFlags
}
data LintFlags
= LF { lf_check_global_ids :: Bool -- See Note [Checking for global Ids]
, lf_check_inline_loop_breakers :: Bool -- See Note [Checking for INLINE loop breakers]
}
defaultLintFlags :: LintFlags
defaultLintFlags = LF { lf_check_global_ids = False
, lf_check_inline_loop_breakers = True }
newtype LintM a =
LintM { unLintM ::
LintEnv ->
WarnsAndErrs -> -- Error and warning messages so far
(Maybe a, WarnsAndErrs) } -- Result and messages (if any)
type WarnsAndErrs = (Bag MsgDoc, Bag MsgDoc)
{- Note [Checking for global Ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Before CoreTidy, all locally-bound Ids must be LocalIds, even
top-level ones. See Note [Exported LocalIds] and Trac #9857.
Note [Type substitution]
~~~~~~~~~~~~~~~~~~~~~~~~
Why do we need a type substitution? Consider
/\(a:*). \(x:a). /\(a:*). id a x
This is ill typed, because (renaming variables) it is really
/\(a:*). \(x:a). /\(b:*). id b x
Hence, when checking an application, we can't naively compare x's type
(at its binding site) with its expected type (at a use site). So we
rename type binders as we go, maintaining a substitution.
The same substitution also supports let-type, current expressed as
(/\(a:*). body) ty
Here we substitute 'ty' for 'a' in 'body', on the fly.
-}
instance Functor LintM where
fmap = liftM
instance Applicative LintM where
pure = return
(<*>) = ap
instance Monad LintM where
return x = LintM (\ _ errs -> (Just x, errs))
fail err = failWithL (text err)
m >>= k = LintM (\ env errs ->
let (res, errs') = unLintM m env errs in
case res of
Just r -> unLintM (k r) env errs'
Nothing -> (Nothing, errs'))
instance HasDynFlags LintM where
getDynFlags = LintM (\ e errs -> (Just (le_dynflags e), errs))
data LintLocInfo
= RhsOf Id -- The variable bound
| LambdaBodyOf Id -- The lambda-binder
| BodyOfLetRec [Id] -- One of the binders
| CaseAlt CoreAlt -- Case alternative
| CasePat CoreAlt -- The *pattern* of the case alternative
| AnExpr CoreExpr -- Some expression
| ImportedUnfolding SrcLoc -- Some imported unfolding (ToDo: say which)
| TopLevelBindings
| InType Type -- Inside a type
| InCo Coercion -- Inside a coercion
initL :: DynFlags -> LintFlags -> LintM a -> WarnsAndErrs -- Errors and warnings
initL dflags flags m
= case unLintM m env (emptyBag, emptyBag) of
(_, errs) -> errs
where
env = LE { le_flags = flags, le_subst = emptyTvSubst, le_loc = [], le_dynflags = dflags }
getLintFlags :: LintM LintFlags
getLintFlags = LintM $ \ env errs -> (Just (le_flags env), errs)
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = failWithL msg
checkWarnL :: Bool -> MsgDoc -> LintM ()
checkWarnL True _ = return ()
checkWarnL False msg = addWarnL msg
failWithL :: MsgDoc -> LintM a
failWithL msg = LintM $ \ env (warns,errs) ->
(Nothing, (warns, addMsg env errs msg))
addErrL :: MsgDoc -> LintM ()
addErrL msg = LintM $ \ env (warns,errs) ->
(Just (), (warns, addMsg env errs msg))
addWarnL :: MsgDoc -> LintM ()
addWarnL msg = LintM $ \ env (warns,errs) ->
(Just (), (addMsg env warns msg, errs))
addMsg :: LintEnv -> Bag MsgDoc -> MsgDoc -> Bag MsgDoc
addMsg env msgs msg
= ASSERT( notNull locs )
msgs `snocBag` mk_msg msg
where
locs = le_loc env
(loc, cxt1) = dumpLoc (head locs)
cxts = [snd (dumpLoc loc) | loc <- locs]
context | opt_PprStyle_Debug = vcat (reverse cxts) $$ cxt1 $$
ptext (sLit "Substitution:") <+> ppr (le_subst env)
| otherwise = cxt1
mk_msg msg = mkLocMessage SevWarning (mkSrcSpan loc loc) (context $$ msg)
addLoc :: LintLocInfo -> LintM a -> LintM a
addLoc extra_loc m
= LintM $ \ env errs ->
unLintM m (env { le_loc = extra_loc : le_loc env }) errs
inCasePat :: LintM Bool -- A slight hack; see the unique call site
inCasePat = LintM $ \ env errs -> (Just (is_case_pat env), errs)
where
is_case_pat (LE { le_loc = CasePat {} : _ }) = True
is_case_pat _other = False
addInScopeVars :: [Var] -> LintM a -> LintM a
addInScopeVars vars m
= LintM $ \ env errs ->
unLintM m (env { le_subst = extendTvInScopeList (le_subst env) vars })
errs
addInScopeVar :: Var -> LintM a -> LintM a
addInScopeVar var m
= LintM $ \ env errs ->
unLintM m (env { le_subst = extendTvInScope (le_subst env) var }) errs
extendSubstL :: TyVar -> Type -> LintM a -> LintM a
extendSubstL tv ty m
= LintM $ \ env errs ->
unLintM m (env { le_subst = Type.extendTvSubst (le_subst env) tv ty }) errs
updateTvSubst :: TvSubst -> LintM a -> LintM a
updateTvSubst subst' m
= LintM $ \ env errs -> unLintM m (env { le_subst = subst' }) errs
getTvSubst :: LintM TvSubst
getTvSubst = LintM (\ env errs -> (Just (le_subst env), errs))
getInScope :: LintM InScopeSet
getInScope = LintM (\ env errs -> (Just (getTvInScope (le_subst env)), errs))
applySubstTy :: InType -> LintM OutType
applySubstTy ty = do { subst <- getTvSubst; return (Type.substTy subst ty) }
applySubstCo :: InCoercion -> LintM OutCoercion
applySubstCo co = do { subst <- getTvSubst; return (substCo (tvCvSubst subst) co) }
lookupIdInScope :: Id -> LintM Id
lookupIdInScope id
| not (mustHaveLocalBinding id)
= return id -- An imported Id
| otherwise
= do { subst <- getTvSubst
; case lookupInScope (getTvInScope subst) id of
Just v -> return v
Nothing -> do { addErrL out_of_scope
; return id } }
where
out_of_scope = pprBndr LetBind id <+> ptext (sLit "is out of scope")
oneTupleDataConId :: Id -- Should not happen
oneTupleDataConId = dataConWorkId (tupleDataCon Boxed 1)
checkBndrIdInScope :: Var -> Var -> LintM ()
checkBndrIdInScope binder id
= checkInScope msg id
where
msg = ptext (sLit "is out of scope inside info for") <+>
ppr binder
checkTyCoVarInScope :: Var -> LintM ()
checkTyCoVarInScope v = checkInScope (ptext (sLit "is out of scope")) v
checkInScope :: SDoc -> Var -> LintM ()
checkInScope loc_msg var =
do { subst <- getTvSubst
; checkL (not (mustHaveLocalBinding var) || (var `isInScope` subst))
(hsep [pprBndr LetBind var, loc_msg]) }
checkTys :: OutType -> OutType -> MsgDoc -> LintM ()
-- check ty2 is subtype of ty1 (ie, has same structure but usage
-- annotations need only be consistent, not equal)
-- Assumes ty1,ty2 are have alrady had the substitution applied
checkTys ty1 ty2 msg = checkL (ty1 `eqType` ty2) msg
checkRole :: Coercion
-> Role -- expected
-> Role -- actual
-> LintM ()
checkRole co r1 r2
= checkL (r1 == r2)
(ptext (sLit "Role incompatibility: expected") <+> ppr r1 <> comma <+>
ptext (sLit "got") <+> ppr r2 $$
ptext (sLit "in") <+> ppr co)
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
dumpLoc :: LintLocInfo -> (SrcLoc, SDoc)
dumpLoc (RhsOf v)
= (getSrcLoc v, brackets (ptext (sLit "RHS of") <+> pp_binders [v]))
dumpLoc (LambdaBodyOf b)
= (getSrcLoc b, brackets (ptext (sLit "in body of lambda with binder") <+> pp_binder b))
dumpLoc (BodyOfLetRec [])
= (noSrcLoc, brackets (ptext (sLit "In body of a letrec with no binders")))
dumpLoc (BodyOfLetRec bs@(_:_))
= ( getSrcLoc (head bs), brackets (ptext (sLit "in body of letrec with binders") <+> pp_binders bs))
dumpLoc (AnExpr e)
= (noSrcLoc, text "In the expression:" <+> ppr e)
dumpLoc (CaseAlt (con, args, _))
= (noSrcLoc, text "In a case alternative:" <+> parens (ppr con <+> pp_binders args))
dumpLoc (CasePat (con, args, _))
= (noSrcLoc, text "In the pattern of a case alternative:" <+> parens (ppr con <+> pp_binders args))
dumpLoc (ImportedUnfolding locn)
= (locn, brackets (ptext (sLit "in an imported unfolding")))
dumpLoc TopLevelBindings
= (noSrcLoc, Outputable.empty)
dumpLoc (InType ty)
= (noSrcLoc, text "In the type" <+> quotes (ppr ty))
dumpLoc (InCo co)
= (noSrcLoc, text "In the coercion" <+> quotes (ppr co))
pp_binders :: [Var] -> SDoc
pp_binders bs = sep (punctuate comma (map pp_binder bs))
pp_binder :: Var -> SDoc
pp_binder b | isId b = hsep [ppr b, dcolon, ppr (idType b)]
| otherwise = hsep [ppr b, dcolon, ppr (tyVarKind b)]
------------------------------------------------------
-- Messages for case expressions
mkDefaultArgsMsg :: [Var] -> MsgDoc
mkDefaultArgsMsg args
= hang (text "DEFAULT case with binders")
4 (ppr args)
mkCaseAltMsg :: CoreExpr -> Type -> Type -> MsgDoc
mkCaseAltMsg e ty1 ty2
= hang (text "Type of case alternatives not the same as the annotation on case:")
4 (vcat [ppr ty1, ppr ty2, ppr e])
mkScrutMsg :: Id -> Type -> Type -> TvSubst -> MsgDoc
mkScrutMsg var var_ty scrut_ty subst
= vcat [text "Result binder in case doesn't match scrutinee:" <+> ppr var,
text "Result binder type:" <+> ppr var_ty,--(idType var),
text "Scrutinee type:" <+> ppr scrut_ty,
hsep [ptext (sLit "Current TV subst"), ppr subst]]
mkNonDefltMsg, mkNonIncreasingAltsMsg :: CoreExpr -> MsgDoc
mkNonDefltMsg e
= hang (text "Case expression with DEFAULT not at the beginnning") 4 (ppr e)
mkNonIncreasingAltsMsg e
= hang (text "Case expression with badly-ordered alternatives") 4 (ppr e)
nonExhaustiveAltsMsg :: CoreExpr -> MsgDoc
nonExhaustiveAltsMsg e
= hang (text "Case expression with non-exhaustive alternatives") 4 (ppr e)
mkBadConMsg :: TyCon -> DataCon -> MsgDoc
mkBadConMsg tycon datacon
= vcat [
text "In a case alternative, data constructor isn't in scrutinee type:",
text "Scrutinee type constructor:" <+> ppr tycon,
text "Data con:" <+> ppr datacon
]
mkBadPatMsg :: Type -> Type -> MsgDoc
mkBadPatMsg con_result_ty scrut_ty
= vcat [
text "In a case alternative, pattern result type doesn't match scrutinee type:",
text "Pattern result type:" <+> ppr con_result_ty,
text "Scrutinee type:" <+> ppr scrut_ty
]
integerScrutinisedMsg :: MsgDoc
integerScrutinisedMsg
= text "In a LitAlt, the literal is lifted (probably Integer)"
mkBadAltMsg :: Type -> CoreAlt -> MsgDoc
mkBadAltMsg scrut_ty alt
= vcat [ text "Data alternative when scrutinee is not a tycon application",
text "Scrutinee type:" <+> ppr scrut_ty,
text "Alternative:" <+> pprCoreAlt alt ]
mkNewTyDataConAltMsg :: Type -> CoreAlt -> MsgDoc
mkNewTyDataConAltMsg scrut_ty alt
= vcat [ text "Data alternative for newtype datacon",
text "Scrutinee type:" <+> ppr scrut_ty,
text "Alternative:" <+> pprCoreAlt alt ]
------------------------------------------------------
-- Other error messages
mkAppMsg :: Type -> Type -> CoreExpr -> MsgDoc
mkAppMsg fun_ty arg_ty arg
= vcat [ptext (sLit "Argument value doesn't match argument type:"),
hang (ptext (sLit "Fun type:")) 4 (ppr fun_ty),
hang (ptext (sLit "Arg type:")) 4 (ppr arg_ty),
hang (ptext (sLit "Arg:")) 4 (ppr arg)]
mkNonFunAppMsg :: Type -> Type -> CoreExpr -> MsgDoc
mkNonFunAppMsg fun_ty arg_ty arg
= vcat [ptext (sLit "Non-function type in function position"),
hang (ptext (sLit "Fun type:")) 4 (ppr fun_ty),
hang (ptext (sLit "Arg type:")) 4 (ppr arg_ty),
hang (ptext (sLit "Arg:")) 4 (ppr arg)]
mkLetErr :: TyVar -> CoreExpr -> MsgDoc
mkLetErr bndr rhs
= vcat [ptext (sLit "Bad `let' binding:"),
hang (ptext (sLit "Variable:"))
4 (ppr bndr <+> dcolon <+> ppr (varType bndr)),
hang (ptext (sLit "Rhs:"))
4 (ppr rhs)]
mkTyAppMsg :: Type -> Type -> MsgDoc
mkTyAppMsg ty arg_ty
= vcat [text "Illegal type application:",
hang (ptext (sLit "Exp type:"))
4 (ppr ty <+> dcolon <+> ppr (typeKind ty)),
hang (ptext (sLit "Arg type:"))
4 (ppr arg_ty <+> dcolon <+> ppr (typeKind arg_ty))]
mkRhsMsg :: Id -> SDoc -> Type -> MsgDoc
mkRhsMsg binder what ty
= vcat
[hsep [ptext (sLit "The type of this binder doesn't match the type of its") <+> what <> colon,
ppr binder],
hsep [ptext (sLit "Binder's type:"), ppr (idType binder)],
hsep [ptext (sLit "Rhs type:"), ppr ty]]
mkLetAppMsg :: CoreExpr -> MsgDoc
mkLetAppMsg e
= hang (ptext (sLit "This argument does not satisfy the let/app invariant:"))
2 (ppr e)
mkRhsPrimMsg :: Id -> CoreExpr -> MsgDoc
mkRhsPrimMsg binder _rhs
= vcat [hsep [ptext (sLit "The type of this binder is primitive:"),
ppr binder],
hsep [ptext (sLit "Binder's type:"), ppr (idType binder)]
]
mkStrictMsg :: Id -> MsgDoc
mkStrictMsg binder
= vcat [hsep [ptext (sLit "Recursive or top-level binder has strict demand info:"),
ppr binder],
hsep [ptext (sLit "Binder's demand info:"), ppr (idDemandInfo binder)]
]
mkNonTopExportedMsg :: Id -> MsgDoc
mkNonTopExportedMsg binder
= hsep [ptext (sLit "Non-top-level binder is marked as exported:"), ppr binder]
mkNonTopExternalNameMsg :: Id -> MsgDoc
mkNonTopExternalNameMsg binder
= hsep [ptext (sLit "Non-top-level binder has an external name:"), ppr binder]
mkKindErrMsg :: TyVar -> Type -> MsgDoc
mkKindErrMsg tyvar arg_ty
= vcat [ptext (sLit "Kinds don't match in type application:"),
hang (ptext (sLit "Type variable:"))
4 (ppr tyvar <+> dcolon <+> ppr (tyVarKind tyvar)),
hang (ptext (sLit "Arg type:"))
4 (ppr arg_ty <+> dcolon <+> ppr (typeKind arg_ty))]
{- Not needed now
mkArityMsg :: Id -> MsgDoc
mkArityMsg binder
= vcat [hsep [ptext (sLit "Demand type has"),
ppr (dmdTypeDepth dmd_ty),
ptext (sLit "arguments, rhs has"),
ppr (idArity binder),
ptext (sLit "arguments,"),
ppr binder],
hsep [ptext (sLit "Binder's strictness signature:"), ppr dmd_ty]
]
where (StrictSig dmd_ty) = idStrictness binder
-}
mkCastErr :: CoreExpr -> Coercion -> Type -> Type -> MsgDoc
mkCastErr expr co from_ty expr_ty
= vcat [ptext (sLit "From-type of Cast differs from type of enclosed expression"),
ptext (sLit "From-type:") <+> ppr from_ty,
ptext (sLit "Type of enclosed expr:") <+> ppr expr_ty,
ptext (sLit "Actual enclosed expr:") <+> ppr expr,
ptext (sLit "Coercion used in cast:") <+> ppr co
]
dupVars :: [[Var]] -> MsgDoc
dupVars vars
= hang (ptext (sLit "Duplicate variables brought into scope"))
2 (ppr vars)
dupExtVars :: [[Name]] -> MsgDoc
dupExtVars vars
= hang (ptext (sLit "Duplicate top-level variables with the same qualified name"))
2 (ppr vars)
{-
************************************************************************
* *
\subsection{Annotation Linting}
* *
************************************************************************
-}
-- | This checks whether a pass correctly looks through debug
-- annotations (@SourceNote@). This works a bit different from other
-- consistency checks: We check this by running the given task twice,
-- noting all differences between the results.
lintAnnots :: SDoc -> (ModGuts -> CoreM ModGuts) -> ModGuts -> CoreM ModGuts
lintAnnots pname pass guts = do
-- Run the pass as we normally would
dflags <- getDynFlags
when (gopt Opt_DoAnnotationLinting dflags) $
liftIO $ Err.showPass dflags "Annotation linting - first run"
nguts <- pass guts
-- If appropriate re-run it without debug annotations to make sure
-- that they made no difference.
when (gopt Opt_DoAnnotationLinting dflags) $ do
liftIO $ Err.showPass dflags "Annotation linting - second run"
nguts' <- withoutAnnots pass guts
-- Finally compare the resulting bindings
liftIO $ Err.showPass dflags "Annotation linting - comparison"
let binds = flattenBinds $ mg_binds nguts
binds' = flattenBinds $ mg_binds nguts'
(diffs,_) = diffBinds True (mkRnEnv2 emptyInScopeSet) binds binds'
when (not (null diffs)) $ CoreMonad.putMsg $ vcat
[ lint_banner "warning" pname
, text "Core changes with annotations:"
, withPprStyle defaultDumpStyle $ nest 2 $ vcat diffs
]
-- Return actual new guts
return nguts
-- | Run the given pass without annotations. This means that we both
-- remove the @Opt_Debug@ flag from the environment as well as all
-- annotations from incoming modules.
withoutAnnots :: (ModGuts -> CoreM ModGuts) -> ModGuts -> CoreM ModGuts
withoutAnnots pass guts = do
-- Remove debug flag from environment.
dflags <- getDynFlags
let removeFlag env = env{hsc_dflags = gopt_unset dflags Opt_Debug}
withoutFlag corem =
liftIO =<< runCoreM <$> fmap removeFlag getHscEnv <*> getRuleBase <*>
getUniqueSupplyM <*> getModule <*>
getPrintUnqualified <*> pure corem
-- Nuke existing ticks in module.
-- TODO: Ticks in unfoldings. Maybe change unfolding so it removes
-- them in absence of @Opt_Debug@?
let nukeTicks = stripTicksE (not . tickishIsCode)
nukeAnnotsBind :: CoreBind -> CoreBind
nukeAnnotsBind bind = case bind of
Rec bs -> Rec $ map (\(b,e) -> (b, nukeTicks e)) bs
NonRec b e -> NonRec b $ nukeTicks e
nukeAnnotsMod mg@ModGuts{mg_binds=binds}
= mg{mg_binds = map nukeAnnotsBind binds}
-- Perform pass with all changes applied
fmap fst $ withoutFlag $ pass (nukeAnnotsMod guts)
| fmthoma/ghc | compiler/coreSyn/CoreLint.hs | bsd-3-clause | 73,272 | 50 | 25 | 21,183 | 17,024 | 8,622 | 8,402 | 1,126 | 17 |
{-# OPTIONS -fno-warn-unused-imports #-}
{-# LANGUAGE ForeignFunctionInterface #-}
#ifdef __HASTE__
{-# LANGUAGE OverloadedStrings #-}
#endif
#include "HsConfigure.h"
-- #hide
module Data.Time.LocalTime.TimeZone
(
-- * Time zones
TimeZone(..),timeZoneOffsetString,timeZoneOffsetString',minutesToTimeZone,hoursToTimeZone,utc,
-- getting the locale time zone
getTimeZone,getCurrentTimeZone
) where
--import System.Time.Calendar.Format
import Data.Time.Calendar.Private
import Data.Time.Clock
import Data.Time.Clock.POSIX
#if __GLASGOW_HASKELL__ >= 709
import Foreign
#else
import Foreign.Safe
#endif
import Foreign.C
import Control.DeepSeq
import Data.Typeable
#if LANGUAGE_Rank2Types
import Data.Data
#endif
#ifdef __HASTE__
import Data.Time.Calendar (toGregorian)
import Haste.Prim.JSType (toString)
import Haste.Prim.Foreign
#endif
-- | A TimeZone is a whole number of minutes offset from UTC, together with a name and a \"just for summer\" flag.
data TimeZone = TimeZone {
-- | The number of minutes offset from UTC. Positive means local time will be later in the day than UTC.
timeZoneMinutes :: Int,
-- | Is this time zone just persisting for the summer?
timeZoneSummerOnly :: Bool,
-- | The name of the zone, typically a three- or four-letter acronym.
timeZoneName :: String
} deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
,Data, Typeable
#endif
#endif
)
instance NFData TimeZone where
rnf (TimeZone m so n) = m `deepseq` so `deepseq` n `deepseq` ()
-- | Create a nameless non-summer timezone for this number of minutes
minutesToTimeZone :: Int -> TimeZone
minutesToTimeZone m = TimeZone m False ""
-- | Create a nameless non-summer timezone for this number of hours
hoursToTimeZone :: Int -> TimeZone
hoursToTimeZone i = minutesToTimeZone (60 * i)
showT :: NumericPadOption -> Int -> String
showT opt t = show4 opt ((div t 60) * 100 + (mod t 60))
-- | Text representing the offset of this timezone, such as \"-0800\" or \"+0400\" (like %z in formatTime), with arbitrary padding
timeZoneOffsetString' :: NumericPadOption -> TimeZone -> String
timeZoneOffsetString' opt (TimeZone t _ _) | t < 0 = '-':(showT opt (negate t))
timeZoneOffsetString' opt (TimeZone t _ _) = '+':(showT opt t)
-- | Text representing the offset of this timezone, such as \"-0800\" or \"+0400\" (like %z in formatTime)
timeZoneOffsetString :: TimeZone -> String
timeZoneOffsetString = timeZoneOffsetString' (Just '0')
instance Show TimeZone where
show zone@(TimeZone _ _ "") = timeZoneOffsetString zone
show (TimeZone _ _ name) = name
-- | The UTC time zone
utc :: TimeZone
utc = TimeZone 0 False "UTC"
#ifdef __HASTE__
-- JS is doing it wrong WRT time zone offsets, so we have to negate it.
tzOffMins :: Int -> Int -> Int -> IO Int
tzOffMins = ffi "(function(y,m,d){return -(new Date(y,m,d).getTimezoneOffset());})"
tzIsSummerOnly :: IO Bool
tzIsSummerOnly = ffi "(function(){\
var d = new Date();\
d.setMonth(11);\
var off = d.getTimezoneOffset();\
d.setMonth(5);\
return off != d.getTimezoneOffset();\
})"
-- | Get the local time-zone for a given time (varying as per summertime adjustments)
getTimeZone :: UTCTime -> IO TimeZone
getTimeZone (UTCTime t _) = do
off <- tzOffMins (fromInteger y) m d
summer <- tzIsSummerOnly
let (hs, ms) = abs off `quotRem` 60
utcoff = if ms == 0
then toString hs
else toString hs ++ ":" ++ toString ms
name =
case True of
_ | off < 0 -> "GMT-" ++ utcoff
| off > 0 -> "GMT+" ++ utcoff
| otherwise -> "GMT"
return $ TimeZone off summer name
where
(y, m, d) = toGregorian t
#else
posixToCTime :: POSIXTime -> CTime
posixToCTime = fromInteger . floor
{-# CFILES cbits/HsTime.c #-}
foreign import ccall unsafe "HsTime.h get_current_timezone_seconds" get_current_timezone_seconds :: CTime -> Ptr CInt -> Ptr CString -> IO CLong
-- | Get the local time-zone for a given time (varying as per summertime adjustments)
getTimeZone :: UTCTime -> IO TimeZone
getTimeZone time = with 0 (\pdst -> with nullPtr (\pcname -> do
secs <- get_current_timezone_seconds (posixToCTime (utcTimeToPOSIXSeconds time)) pdst pcname
case secs of
0x80000000 -> fail "localtime_r failed"
_ -> do
dst <- peek pdst
cname <- peek pcname
name <- peekCString cname
return (TimeZone (div (fromIntegral secs) 60) (dst == 1) name)
))
#endif
-- | Get the current time-zone
getCurrentTimeZone :: IO TimeZone
getCurrentTimeZone = getCurrentTime >>= getTimeZone
| beni55/haste-compiler | libraries/time/lib/Data/Time/LocalTime/TimeZone.hs | bsd-3-clause | 4,552 | 24 | 16 | 847 | 882 | 491 | 391 | 51 | 2 |
{-# LANGUAGE CPP #-}
--------------------------------------------------------------------------------
--
-- Module : MatrixMul
-- Copyright : (c) 2009 Trevor L. McDonell
-- License : BSD
--
-- Matrix multiplication using driver interface
--
--------------------------------------------------------------------------------
module Main where
#include "matrix_mul.h"
-- Friends
import RandomVector
-- System
import Numeric
import Data.Array
import Control.Exception
import Data.Array.Storable
import Foreign.Storable
import qualified Data.ByteString.Char8 as B
import qualified Foreign.CUDA.Driver as CUDA
-- Return the (width,height) of a matrix
--
getSize :: Storable e => Matrix e -> IO (Int,Int)
getSize mat = do
((li,lj),(ui,uj)) <- getBounds mat
return (rangeSize (lj,uj), rangeSize (li,ui))
--------------------------------------------------------------------------------
-- Reference implementation
--------------------------------------------------------------------------------
matMult :: (Num e, Storable e) => Matrix e -> Matrix e -> IO (Matrix e)
matMult mx my = do
x <- unsafeFreeze mx
y <- unsafeFreeze my
let ((li, lj), (ui, uj)) = bounds x
((li',lj'),(ui',uj')) = bounds y
resBnds | (lj,uj) == (li',ui') = ((li,lj'),(ui,uj'))
| otherwise = error "matrix dimensions must agree"
newListArray resBnds [sum [x!(i,k) * y!(k,j) | k <- range (lj,uj)]
| i <- range (li,ui)
, j <- range (lj',uj') ]
--------------------------------------------------------------------------------
-- CUDA
--------------------------------------------------------------------------------
--
-- Initialise the device and context. Load the PTX source code, and return a
-- reference to the kernel function.
--
initCUDA :: IO (CUDA.Context, CUDA.Fun)
initCUDA = do
CUDA.initialise []
dev <- CUDA.device 0
ctx <- CUDA.create dev []
ptx <- B.readFile "data/matrix_mul.ptx"
(mdl,r) <- CUDA.loadDataEx ptx [CUDA.ThreadsPerBlock (BLOCK_SIZE*BLOCK_SIZE)]
fun <- CUDA.getFun mdl "matrixMul"
putStrLn $ ">> PTX JIT compilation (" ++ showFFloat (Just 2) (CUDA.jitTime r) " ms)"
B.putStrLn (CUDA.jitInfoLog r)
return (ctx,fun)
--
-- Allocate some memory, and copy over the input data to the device. Should
-- probably catch allocation exceptions individually...
--
initData :: (Num e, Storable e)
=> Matrix e -> Matrix e -> IO (CUDA.DevicePtr e, CUDA.DevicePtr e, CUDA.DevicePtr e)
initData xs ys = do
(wx,hx) <- getSize xs
(wy,hy) <- getSize ys
dxs <- CUDA.mallocArray (wx*hx)
dys <- CUDA.mallocArray (wy*hy)
res <- CUDA.mallocArray (wy*hx)
flip onException (mapM_ CUDA.free [dxs,dys,res]) $ do
withMatrix xs $ \p -> CUDA.pokeArray (wx*hx) p dxs
withMatrix ys $ \p -> CUDA.pokeArray (wy*hy) p dys
return (dxs, dys, res)
--
-- Run the test
--
testCUDA :: (Num e, Storable e) => Matrix e -> Matrix e -> IO (Matrix e)
testCUDA xs' ys' = doTest undefined xs' ys'
where
doTest :: (Num e', Storable e') => e' -> Matrix e' -> Matrix e' -> IO (Matrix e')
doTest dummy xs ys = do
(widthX,heightX) <- getSize xs
(widthY,_) <- getSize ys
((li, lj), (ui, uj)) <- getBounds xs
((li',lj'),(ui',uj')) <- getBounds ys
let resBnds | (lj,uj) == (li',ui') = ((li,lj'),(ui,uj'))
| otherwise = error "matrix dimensions must agree"
-- Initialise environment and copy over test data
--
putStrLn ">> Initialising"
bracket initCUDA (\(ctx,_) -> CUDA.destroy ctx) $ \(_,matMul) -> do
-- Ensure we release the memory, even if there was an error
--
putStrLn ">> Executing"
bracket
(initData xs ys)
(\(dx,dy,dz) -> mapM_ CUDA.free [dx,dy,dz]) $
\(dx,dy,dz) -> do
-- Repeat test many times...
--
CUDA.setParams matMul [CUDA.VArg dx, CUDA.VArg dy, CUDA.VArg dz, CUDA.IArg widthX, CUDA.IArg widthY]
CUDA.setBlockShape matMul (BLOCK_SIZE,BLOCK_SIZE,1)
CUDA.setSharedSize matMul (fromIntegral (2 * BLOCK_SIZE * BLOCK_SIZE * sizeOf dummy))
CUDA.launch matMul (widthY `div` BLOCK_SIZE, heightX `div` BLOCK_SIZE) Nothing
CUDA.sync
-- Copy back result
--
zs <- newArray_ resBnds
withMatrix zs $ \p -> CUDA.peekArray (widthY*heightX) dz p
return zs
--------------------------------------------------------------------------------
-- Test & Verify
--------------------------------------------------------------------------------
main :: IO ()
main = do
putStrLn "== Generating random matrices"
xs <- randomArr ((1,1),(8*BLOCK_SIZE, 4*BLOCK_SIZE)) :: IO (Matrix Float)
ys <- randomArr ((1,1),(4*BLOCK_SIZE,12*BLOCK_SIZE)) :: IO (Matrix Float)
putStrLn "== Generating reference solution"
ref <- matMult xs ys
putStrLn "== Testing CUDA"
mat <- testCUDA xs ys
putStr "== Validating: "
verify ref mat >>= \rv -> putStrLn $ if rv then "Ok!" else "INVALID!"
| mwu-tow/cuda | examples/src/matrixMulDrv/MatrixMul.hs | bsd-3-clause | 5,101 | 0 | 23 | 1,143 | 1,680 | 881 | 799 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Test.Framework
import GHC.IO.Encoding
import qualified Tests.Old
import qualified Tests.Readers.LaTeX
import qualified Tests.Readers.Markdown
import qualified Tests.Readers.Org
import qualified Tests.Readers.RST
import qualified Tests.Readers.Docx
import qualified Tests.Readers.Txt2Tags
import qualified Tests.Readers.EPUB
import qualified Tests.Writers.ConTeXt
import qualified Tests.Writers.LaTeX
import qualified Tests.Writers.HTML
import qualified Tests.Writers.Docbook
import qualified Tests.Writers.Native
import qualified Tests.Writers.Markdown
import qualified Tests.Writers.Plain
import qualified Tests.Writers.AsciiDoc
import qualified Tests.Shared
import qualified Tests.Walk
import Text.Pandoc.Shared (inDirectory)
import System.Environment (getArgs)
tests :: [Test]
tests = [ testGroup "Old" Tests.Old.tests
, testGroup "Shared" Tests.Shared.tests
, testGroup "Walk" Tests.Walk.tests
, testGroup "Writers"
[ testGroup "Native" Tests.Writers.Native.tests
, testGroup "ConTeXt" Tests.Writers.ConTeXt.tests
, testGroup "LaTeX" Tests.Writers.LaTeX.tests
, testGroup "HTML" Tests.Writers.HTML.tests
, testGroup "Docbook" Tests.Writers.Docbook.tests
, testGroup "Markdown" Tests.Writers.Markdown.tests
, testGroup "Plain" Tests.Writers.Plain.tests
, testGroup "AsciiDoc" Tests.Writers.AsciiDoc.tests
]
, testGroup "Readers"
[ testGroup "LaTeX" Tests.Readers.LaTeX.tests
, testGroup "Markdown" Tests.Readers.Markdown.tests
, testGroup "Org" Tests.Readers.Org.tests
, testGroup "RST" Tests.Readers.RST.tests
, testGroup "Docx" Tests.Readers.Docx.tests
, testGroup "Txt2Tags" Tests.Readers.Txt2Tags.tests
, testGroup "EPUB" Tests.Readers.EPUB.tests
]
]
main :: IO ()
main = do
setLocaleEncoding utf8
args <- getArgs
inDirectory "tests" $ defaultMainWithArgs tests args
| peter-fogg/pardoc | tests/test-pandoc.hs | gpl-2.0 | 2,045 | 0 | 9 | 395 | 445 | 269 | 176 | 50 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Token Generator | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/tokengen/src/main/javahelp/org/zaproxy/zap/extension/tokengen/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 976 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
-- | Define our Wiki data type, routes, and the YesodWiki typeclass. Due to the
-- GHC stage restriction, the routes must be declared in a separate module from
-- our dispatch instance.
module WikiRoutes where
import Control.Monad (liftM)
import Control.Monad.IO.Class (MonadIO)
import Data.IORef (IORef, newIORef)
import Data.Map (Map, empty)
import Yesod
-- | Simple Wiki datatype: just store a Map from Wiki path to the contents of
-- the page.
data Wiki = Wiki
{ wikiContent :: IORef (Map Texts Textarea)
}
-- | A typeclass that all master sites that want a Wiki must implement. A
-- master must be able to render form messages, as we use yesod-forms for
-- processing user input.
class (RenderMessage master FormMessage, Yesod master) => YesodWiki master where
-- | Write protection. By default, no protection.
canEditPage :: Texts -> HandlerT master IO Bool
canEditPage _ = return True
-- | Define our routes. We'll have a homepage that lists all of the pages, a
-- read route for reading a page, and an edit route.
mkYesodSubData "Wiki" [parseRoutes|
/ WikiHomeR GET
/read/*Texts WikiReadR GET
/edit/*Texts WikiEditR GET POST
|]
-- | A convenience function for creating an empty Wiki.
newWiki :: MonadIO m => m Wiki
newWiki = Wiki `liftM` liftIO (newIORef empty)
| pikajude/yesod | demo/subsite/WikiRoutes.hs | mit | 1,575 | 0 | 11 | 372 | 198 | 117 | 81 | -1 | -1 |
module PrettyJSON (renderJValue) where
import Prettify
import JSONClass
import Numeric (showHex)
import Data.Bits (shiftR, (.&.))
import Char (ord)
renderJValue :: JValue -> Doc
renderJValue (JBool True) = text "true"
renderJValue (JBool False) = text "false"
renderJValue JNull = text "null"
renderJValue (JNumber num) = double num
renderJValue (JString str) = string str
renderJValue (JArray (JAry ary)) = series '[' ']' renderJValue ary
renderJValue (JObject (JObj obj)) = series '{' '}' field obj
where field (name, val) = string name <> text ": " <> renderJValue val
string :: String -> Doc
string = enclose '"' '"' . hcat . map oneChar
enclose :: Char -> Char -> Doc -> Doc
enclose left right x = char left <> x <> char right
oneChar :: Char -> Doc
oneChar c = case lookup c simpleEscapes of
Just r -> text r
Nothing | mustEscape c -> hexEscape c
| otherwise -> char c
where
mustEscape c = c < ' ' || c == '\x7f' || c > '\xff'
simpleEscapes :: [(Char, String)]
simpleEscapes = zipWith ch "\b\n\f\r\t\\\"/" "bnfrt\\\"/"
where ch a b = (a, ['\\',b])
smallHex :: Int -> Doc
smallHex x = text "\\u"
<> text (replicate (4 - length h) '0') <> text h
where h = showHex x ""
astral :: Int -> Doc
astral n = smallHex (a + 0xd800) <> smallHex (b + 0xdc00)
where
a = (n `shiftR` 10) .&. 0x3ff
b = n .&. 0x3ff
hexEscape :: Char -> Doc
hexEscape c | d < 0x10000 = smallHex d
| otherwise = astral (d - 0x10000)
where d = ord c
series :: Char -> Char -> (a -> Doc) -> [a] -> Doc
series open close item = enclose open close . fsep . punctuate (char ',') . map item
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p [] = []
punctuate p [d] = [d]
punctuate p (d:ds) = (d <> p) : punctuate p ds
| pauldoo/scratch | RealWorldHaskell/ch06/mypretty/PrettyJSON.hs | isc | 1,835 | 0 | 12 | 482 | 793 | 403 | 390 | 46 | 2 |
{-|
Copyright (c) 2014 Maciej Bendkowski
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-}
module BCKWTests where
import Control.Monad (liftM2)
import Test.QuickCheck
import BCKW
import Types
-- BCKW trees with bounded size
arbBCKWTerm :: Integral a => a -> Gen Term
arbBCKWTerm 0 = oneof [return B, return C, return K, return W]
arbBCKWTerm n = frequency
[(1, return B), (1, return C), (1, return K), (1, return W),
(5, liftM2 (App) (arbBCKWTerm (n `div` 2)) (arbBCKWTerm (n `div` 2)))]
-- random BCKW tree generator
instance Arbitrary Term where
arbitrary = sized arbBCKWTerm
-- BCKW-terms have non-negative size
propBCKW_NonnegativeSize :: Term -> Property
propBCKW_NonnegativeSize t = collect (size t) $ (size t) >= 0
-- Each BCKW-term is a subterm of itself
propBCKW_SubtermItself :: Term -> Property
propBCKW_SubtermItself t = collect (size t) $ t `isSubterm` t
-- Reduction applies only to terms containing redexes
propBCKW_ReductRedex :: Term -> Property
propBCKW_ReductRedex t = collect (size t) $ hasRedex t ==>
case headReduction t of
(_, _, True) -> True
_ -> False
-- Don't reduct terms without redexes
propBCKW_DontReductWithoutRedex :: Term -> Property
propBCKW_DontReductWithoutRedex t = collect (size t) $ (not $ hasRedex t) ==>
case headReduction t of
(_, _, False) -> True
_ -> False
-- full test suite
suite :: [([Char], Term -> Property)]
suite = [("BCKW-terms have non-negative size", propBCKW_NonnegativeSize),
("Each BCKW-term is a subterm of itself", propBCKW_SubtermItself),
("BCKW-Reduction applies only to terms containing redexes", propBCKW_ReductRedex),
("Don't reduct BCKW-terms without redexes", propBCKW_DontReductWithoutRedex)]
-- test runner
main :: IO ()
main = mapM_ (\(s, a) -> do
putStr $ s ++ " "
quickCheck a) suite | maciej-bendkowski/LCCLUtils | src/BCKWTests.hs | mit | 3,105 | 0 | 12 | 798 | 538 | 296 | 242 | 35 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module TestImport
( module Yesod.Test
, App (..)
, runDB
, statusIs
) where
import ClassyPrelude
import Control.Monad.Logger
import Control.Monad.Trans.Resource
import Data.Conduit.Pool (Pool)
import Database.Persist.Sql hiding (LogFunc, runSqlPool, runSqlPersistMPool)
import Foundation hiding (runDB)
import Import (runSqlPool)
import System.Log.FastLogger (LogStr)
import Yesod.Test hiding (statusIs)
import qualified Yesod.Test
import Network.Wai.Test (simpleHeaders, simpleBody)
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
runSqlPersistMPool :: LogFunc
-> SqlPersistT (LoggingT (ResourceT IO)) a
-> Pool (ignored, SqlBackend)
-> IO a
runSqlPersistMPool logFunc x pool = runResourceT $ runLoggingT (runSqlPool pool x) logFunc
runDB :: SqlPersistT (LoggingT (ResourceT IO)) a -> YesodExample App a
runDB query = do
pool <- fmap connPool getTestYesod
liftIO $ runSqlPersistMPool (\_ _ _ _ -> return ()) query pool
statusIs :: Text -> Int -> YesodExample site ()
statusIs src code = Yesod.Test.statusIs code `catch` \e -> do
mres <- getResponse
body <-
case mres of
Just res
| Just ctype <- lookup "content-type" (simpleHeaders res)
, "text/html" `isPrefixOf` ctype -> do
errs <- htmlQuery ".main-content > pre"
return $ mconcat errs
| otherwise -> return $ simpleBody res
Nothing -> return $ "No response available"
throwIO $ StatusExc src e $ decodeUtf8 $ toStrict body
data StatusExc = StatusExc Text SomeException Text
deriving (Show, Typeable)
instance Exception StatusExc
| fpco/schoolofhaskell.com | tests/TestImport.hs | mit | 1,760 | 0 | 19 | 444 | 524 | 276 | 248 | 43 | 2 |
module Main where
import Idris.AbsSyntax
import Idris.Core.TT
import Idris.ElabDecls
import Idris.Main
import Idris.Options
import IRTS.CodegenGo
import IRTS.Compiler
import Util.System
import Paths_idris_go
import Control.Monad
import System.Environment
import System.Exit
data Opts = Opts { inputs :: [FilePath],
output :: FilePath }
showUsage = do putStrLn "A code generator which is intended to be called by the compiler, not by a user."
putStrLn "Usage: idris-codegen-c <ibc-files> [-o <output-file>]"
exitSuccess
getOpts :: IO Opts
getOpts = do xs <- getArgs
return $ process (Opts [] "a.out") xs
where
process opts ("-o":o:xs) = process (opts { output = o }) xs
process opts (x:xs) = process (opts { inputs = x:inputs opts }) xs
process opts [] = opts
c_main :: Opts -> Idris ()
c_main opts = do runIO setupBundledCC
elabPrims
loadInputs (inputs opts) Nothing
mainProg <- elabMain
ir <- compile (Via IBCFormat "go") (output opts) (Just mainProg)
runIO $ codegenGo ir
main :: IO ()
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (c_main opts)
| Trundle/idris-go | src/Main.hs | mit | 1,289 | 0 | 12 | 377 | 390 | 200 | 190 | 36 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Unison.Parsers where
import Control.Arrow ((***))
import Data.Text (Text)
import Unison.Term (Term)
import Unison.Type (Type)
import Unison.Parser (Result(..), run, unsafeGetSucceed)
import Unison.Var (Var)
import qualified Unison.Parser as Parser
import qualified Data.Text as Text
import qualified Unison.ABT as ABT
import qualified Unison.Term as Term
import qualified Unison.TermParser as TermParser
import qualified Unison.TypeParser as TypeParser
import qualified Unison.Type as Type
import qualified Unison.Reference as R
import qualified Unison.Var as Var
type S v = TypeParser.S v
s0 :: S v
s0 = TypeParser.s0
parseTerm :: Var v => String -> Result (S v) (Term v)
parseTerm = parseTerm' termBuiltins typeBuiltins
parseType :: Var v => String -> Result (S v) (Type v)
parseType = parseType' typeBuiltins
parseTerm' :: Var v => [(v, Term v)] -> [(v, Type v)] -> String -> Result (S v) (Term v)
parseTerm' termBuiltins typeBuiltins s =
bindBuiltins termBuiltins typeBuiltins <$> run (Parser.root TermParser.term) s s0
bindBuiltins :: Var v => [(v, Term v)] -> [(v, Type v)] -> Term v -> Term v
bindBuiltins termBuiltins typeBuiltins =
Term.typeMap (ABT.substs typeBuiltins) . ABT.substs termBuiltins
parseType' :: Var v => [(v, Type v)] -> String -> Result (S v) (Type v)
parseType' typeBuiltins s =
ABT.substs typeBuiltins <$> run (Parser.root TypeParser.type_) s s0
unsafeParseTerm :: Var v => String -> Term v
unsafeParseTerm = unsafeGetSucceed . parseTerm
unsafeParseType :: Var v => String -> Type v
unsafeParseType = unsafeGetSucceed . parseType
unsafeParseTerm' :: Var v => [(v, Term v)] -> [(v, Type v)] -> String -> Term v
unsafeParseTerm' er tr = unsafeGetSucceed . parseTerm' er tr
unsafeParseType' :: Var v => [(v, Type v)] -> String -> Type v
unsafeParseType' tr = unsafeGetSucceed . parseType' tr
-- Alias <alias> <fully-qualified-name>
-- will import the builtin <fully-qualified-name>, and once more as the alias
-- AliasFromModule
-- <modulename> e.g. "Number"
-- <aliases import modulename.alias as alias> e.g. "plus"
-- <ids import as qualified modulename.id> e.g. "minus" will import builtin "Number.plus" only
data Builtin = Builtin Text -- e.g. Builtin "()"
| Alias Text Text
| AliasFromModule Text [Text] [Text]
-- aka default imports
termBuiltins :: Var v => [(v, Term v)]
termBuiltins = (Var.named *** Term.ref) <$> (
[ Builtin "True"
, Builtin "False"
, Builtin "()"
, Builtin "Either.Right"
, Builtin "Either.Left"
, Builtin "Greater"
, Builtin "Less"
, Builtin "Equal"
, Alias "unit" "()"
, Alias "Unit" "()"
, Alias "Some" "Optional.Some"
, Alias "None" "Optional.None"
, Alias "+" "Number.+"
, Alias "-" "Number.-"
, Alias "*" "Number.*"
, Alias "/" "Number./"
, AliasFromModule "Vector" ["single"] []
, AliasFromModule "Remote" ["pure", "bind", "pure", "fork"] []
] >>= unpackAliases)
where
unpackAliases :: Builtin -> [(Text, R.Reference)]
unpackAliases (Builtin t) = [builtin t]
unpackAliases (Alias a sym) = [alias a sym, builtin sym]
unpackAliases (AliasFromModule m toAlias other) =
(aliasFromModule m <$> toAlias) ++ (builtinInModule m <$> toAlias)
++ (builtinInModule m <$> other)
builtin t = (t, R.Builtin t)
alias new known = (new, R.Builtin known)
aliasFromModule m sym = alias sym (Text.intercalate "." [m, sym])
builtinInModule m sym = builtin (Text.intercalate "." [m, sym])
typeBuiltins :: Var v => [(v, Type v)]
typeBuiltins = (Var.named *** Type.lit) <$>
[ ("Number", Type.Number)
, builtin "Unit"
, builtin "Boolean"
, ("Optional", Type.Optional)
, builtin "Either"
, builtin "Pair"
, builtin "Order"
, builtin "Comparison"
, builtin "Order.Key"
-- kv store
, builtin "Index"
-- html
, builtin "Html.Link"
-- distributed
, builtin "Channel"
, builtin "Duration"
, builtin "Remote"
, builtin "Node"
-- hashing
, builtin "Hash"
]
where builtin t = (t, Type.Ref $ R.Builtin t)
| nightscape/platform | shared/src/Unison/Parsers.hs | mit | 4,118 | 0 | 11 | 838 | 1,362 | 734 | 628 | 94 | 3 |
{-# LANGUAGE RecordWildCards #-}
module Robot
( Bearing(East, North, South, West)
, bearing
, coordinates
, mkRobot
, simulate
, turnLeft
, turnRight
) where
data Bearing
= North
| East
| South
| West
deriving (Eq, Show)
data Robot = Robot
{ bearing :: Bearing
, coordinates :: Position
}
type Position = (Integer, Integer)
mkRobot :: Bearing -> Position -> Robot
mkRobot direction coordinates = Robot {bearing = direction, coordinates = coordinates}
simulate :: Robot -> String -> Robot
simulate = foldl (flip move)
move :: Char -> Robot -> Robot
move 'A' = advance
move 'L' = changeBearing turnLeft
move 'R' = changeBearing turnRight
changeBearing :: (Bearing -> Bearing) -> Robot -> Robot
changeBearing f Robot {..} = mkRobot (f bearing) coordinates
advance :: Robot -> Robot
advance Robot {bearing = dir, coordinates = (x, y)} = mkRobot dir $ changeCoordinates dir (x, y)
changeCoordinates :: Bearing -> Position -> Position
changeCoordinates North (x, y) = (x, y + 1)
changeCoordinates South (x, y) = (x, y - 1)
changeCoordinates East (x, y) = (x + 1, y)
changeCoordinates West (x, y) = (x - 1, y)
turnLeft :: Bearing -> Bearing
turnLeft North = West
turnLeft West = South
turnLeft South = East
turnLeft East = North
turnRight :: Bearing -> Bearing
turnRight North = East
turnRight East = South
turnRight South = West
turnRight West = North | enolive/exercism | haskell/robot-simulator/src/Robot.hs | mit | 1,390 | 0 | 9 | 276 | 507 | 285 | 222 | 49 | 1 |
module Labyrinth.Models (
labyrinthSize,
Kind(..), Direction(..), Treasure, EmptyTile(..),
FreeTile(..), Tile(..), Color(..), Control(..),
Position(..), Cards, Player(..), Board(..), Game(..),
hasOpening, inverse, hasTreasure, treasureToChar,
intToTreasure
) where
import Labyrinth.Helpers
import Prelude hiding (Right, Left)
import Data.Char
import Data.List.Split
import System.Random
import qualified Data.Maybe
labyrinthSize :: Int
labyrinthSize = 7
data Kind = Corner | TShape | Line deriving (Show, Eq)
data Direction = North | East | South | West deriving (Show, Eq, Bounded, Enum, Read)
type Treasure = Maybe Int
data EmptyTile = EmptyTile Kind Direction deriving(Eq)
data FreeTile = FreeTile Kind Treasure deriving (Eq)
data Tile = Tile Kind Treasure Direction deriving (Eq)
data Color = Yellow | Red | Blue | Green deriving (Show, Eq, Bounded, Enum)
data Control = Human | AI deriving (Show, Eq)
type Position = (Int, Int)
type Cards = [Int]
data Player = Player Color Control Position Cards deriving (Show, Eq)
data Game = Game [Player] FreeTile Board
newtype Board = Board [Tile]
instance Show EmptyTile
where show (EmptyTile Corner North) = " | |_\n\
\ |___\n\
\ "
show (EmptyTile Corner East) = " ___\n\
\ | _\n\
\ | | "
show (EmptyTile Corner South) = "___ \n\
\_ | \n\
\ | | "
show (EmptyTile Corner West) = "_| | \n\
\___| \n\
\ "
show (EmptyTile TShape North) = "_| |_\n\
\_____\n\
\ "
show (EmptyTile TShape East) = " | |_\n\
\ | _\n\
\ | | "
show (EmptyTile TShape South) = "_____\n\
\_ _\n\
\ | | "
show (EmptyTile TShape West) = "_| | \n\
\_ | \n\
\ | | "
show (EmptyTile Line North) = " | | \n\
\ | | \n\
\ | | "
show (EmptyTile Line East) = "_____\n\
\_____\n\
\ "
show (EmptyTile Line South) = " | | \n\
\ | | \n\
\ | | "
show (EmptyTile Line West) = "_____\n\
\_____\n\
\ "
treasureToChar :: Treasure -> Char
treasureToChar Nothing = ' '
treasureToChar (Just idx) = ['a'..'z'] !! idx
instance Show Tile where
show (Tile kind treasure direction) = replaceAtIndex 8 treasureAsChar asEmptyTile
where asEmptyTile = show (EmptyTile kind direction)
treasureAsChar = treasureToChar treasure
instance Show FreeTile where
show (FreeTile kind treasure) = show $ Tile kind treasure North
instance Random Direction where
random gen = case randomR (fromEnum (minBound :: Direction), fromEnum (maxBound :: Direction)) gen
of (randomIndex, newGen) -> (toEnum randomIndex, newGen)
randomR (lower,upper) g = case randomR (fromEnum lower, fromEnum upper) g
of (randomIndex, newGen) -> (toEnum randomIndex, newGen)
showRow :: [Tile] -> String
showRow rowOfTiles = unlines
$ foldl (zipWith (++)) ["", "", ""]
$ map (lines . show) rowOfTiles
instance Show Board where
show (Board []) = ""
show (Board tiles) = unlines
$ map showRow
$ chunksOf labyrinthSize tiles
type Rotation = Direction -> Direction
rotateClockWise :: Rotation
rotateClockWise North = East
rotateClockWise East = South
rotateClockWise South = West
rotateClockWise West = North
applyRotations :: Direction -> [Rotation] -> Direction
applyRotations = foldl (\dir rotation -> rotation dir)
getRotations :: Direction -> Direction -> [Rotation]
getRotations from to
| from == to = []
| otherwise = rotateClockWise : getRotations (rotateClockWise from) to
inverse :: Direction -> Direction
inverse North = South
inverse East = West
inverse South = North
inverse West = East
-- Determines whether a tile has an opening on the side of the provided direction
hasOpening :: Direction -> Tile -> Bool
-- predefined openings for the corner tile facing north
hasOpening North (Tile Corner _ North) = True
hasOpening East (Tile Corner _ North) = True
hasOpening South (Tile Corner _ North) = False
hasOpening West (Tile Corner _ North) = False
-- predefined openings for the tshape tile facing north
hasOpening North (Tile TShape _ North) = True
hasOpening East (Tile TShape _ North) = True
hasOpening South (Tile TShape _ North) = False
hasOpening West (Tile TShape _ North) = True
-- predefined openings for the line tile facing north
hasOpening North (Tile Line _ North) = True
hasOpening East (Tile Line _ North) = False
hasOpening South (Tile Line _ North) = True
hasOpening West (Tile Line _ North) = False
-- We can compute if a tile has an opening for other sides than North
-- by rotating the tile to the north
-- For example, to determine if a Corner tile facing East has a South opening
-- We can turn the tile to the North, which takes 3 clockwise rotations
-- We then apply those same 3 rotations to 'South'
-- So asking whether a Corner tile facing East has a South opening
-- is equivalent to asking whether a Corner tile facing North has an East opening
hasOpening direction (Tile kind t tileDirection) = hasOpening rotatedDirection (Tile kind t North)
where rotations = getRotations tileDirection North
rotatedDirection = applyRotations direction rotations
hasTreasure :: Tile -> Bool
hasTreasure (Tile _ t _) = Data.Maybe.isJust t
intToTreasure :: Int -> Treasure
intToTreasure = Just
| amoerie/labyrinth | Labyrinth/Models.hs | mit | 6,267 | 0 | 10 | 2,131 | 1,549 | 838 | 711 | 101 | 1 |
{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Main where
import Yage hiding ((</>))
import Yage.Wire hiding (unless, when)
import Yage.Lens
import Yage.Material
import Yage.Scene
import Yage.HDR
import Yage.GL
import Yage.Rendering.Pipeline.Deferred
import Yage.Rendering.Pipeline.Deferred.ScreenPass
import Yage.Rendering.Pipeline.Deferred.BaseGPass
import Yage.Formats.Ygm
import Yage.Resources
import System.FilePath
import Yage.Rendering.Resources.GL
import Foreign.Ptr
import Foreign.Storable
import Data.FileEmbed
import Data.Data
import qualified Data.ByteString.Char8 as Char8
import Quine.Monitor
import Quine.GL
import Quine.GL.Attribute
import Quine.GL.Buffer
import Quine.GL.Error
import Quine.GL.Program
import Quine.GL.Shader
import Quine.GL.Sampler
import Quine.GL.Types
import Quine.GL.Uniform
import Quine.GL.Texture hiding (Texture)
import Quine.GL.VertexArray
import Quine.GL.ProgramPipeline
import Yage.Rendering.GL
import Graphics.GL.Ext.EXT.TextureFilterAnisotropic
appConf :: ApplicationConfig
appConf = defaultAppConfig{ logPriority = WARNING }
winSettings :: WindowConfig
winSettings = WindowConfig
{ windowSize = (800, 600)
, windowHints =
[ WindowHint'ContextVersionMajor 4
, WindowHint'ContextVersionMinor 1
, WindowHint'OpenGLProfile OpenGLProfile'Core
, WindowHint'OpenGLForwardCompat True
, WindowHint'OpenGLDebugContext True
, WindowHint'sRGBCapable True
, WindowHint'RefreshRate 60
]
}
data Configuration = Configuration
{ _mainAppConfig :: ApplicationConfig
, _mainWindowConfig :: WindowConfig
, _mainMonitorOptions :: MonitorOptions
}
makeLenses ''Configuration
configuration :: Configuration
configuration = Configuration appConf winSettings (MonitorOptions "localhost" 8080 True False)
type GameEntity = DeferredEntity
type GameScene = DeferredScene
data Game = Game
{ _mainViewport :: Viewport Int
, _gameScene :: GameScene
, _gameCamera :: HDRCamera
, _sceneRenderer :: RenderSystem Game ()
}
makeLenses ''Game
instance HasCamera Game where
camera = gameCamera.camera
instance HasEntities Game (Seq GameEntity) where
entities = gameScene.entities
simScene :: YageWire t () GameScene
simScene = Scene
<$> fmap singleton (acquireOnce testEntity)
<*> pure emptyEnvironment
testEntity :: YageResource GameEntity
testEntity = Entity
<$> (fromMesh =<< meshRes (loadYGM id ("res/sphere.ygm", mkSelection [])))
<*> gBaseMaterialRes defaultGBaseMaterial
<*> pure idTransformation
sceneWire :: YageWire t () Game
sceneWire = proc () -> do
pipeline <- acquireOnce simplePipeline -< ()
scene <- simScene -< ()
returnA -< Game (defaultViewport 800 600) scene (defaultHDRCamera $ def & position .~ V3 0 0 5) pipeline
simplePipeline :: YageResource (RenderSystem Game ())
simplePipeline = do
-- Convert output linear RGB to SRGB
throwWithStack $ glEnable GL_FRAMEBUFFER_SRGB
throwWithStack $
io (getDir "res/glsl") >>= \ ss -> buildNamedStrings ss ("/res/glsl"</>)
baseSampler <- mkBaseSampler
gBasePass <- drawGBuffers
screenQuadPass <- drawRectangle
return $ do
game <- ask
screenQuadPass .
dimap (,game^.camera, game^.mainViewport)
(\base -> ([(1,baseSampler,base^.aChannel)], game^.mainViewport))
gBasePass
mkBaseSampler :: YageResource Sampler
mkBaseSampler = throwWithStack $ do
sampler <- glResource
samplerParameteri sampler GL_TEXTURE_WRAP_S $= GL_CLAMP_TO_EDGE
samplerParameteri sampler GL_TEXTURE_WRAP_T $= GL_CLAMP_TO_EDGE
samplerParameteri sampler GL_TEXTURE_MIN_FILTER $= GL_LINEAR
samplerParameteri sampler GL_TEXTURE_MAG_FILTER $= GL_LINEAR
when gl_EXT_texture_filter_anisotropic $
samplerParameterf sampler GL_TEXTURE_MAX_ANISOTROPY_EXT $= 16
return sampler
main :: IO ()
main = yageMain "standalone" configuration sceneWire (1/60)
instance HasMonitorOptions Configuration where
monitorOptions = mainMonitorOptions
instance HasWindowConfig Configuration where
windowConfig = mainWindowConfig
instance HasApplicationConfig Configuration where
applicationConfig = mainAppConfig
instance HasViewport Game Int where
viewport = mainViewport
instance LinearInterpolatable Game where
lerp _ _ = id
instance HasRenderSystem Game (ResourceT IO) Game () where
renderSystem = sceneRenderer
| MaxDaten/yage | Main.hs | mit | 4,841 | 1 | 17 | 810 | 1,090 | 599 | 491 | 132 | 1 |
module Example.Schema.Student
( studentTable
, studentIdField
, studentNameField
, studentMajorField
, majorTable
, majorIdField
, majorNameField
, majorCollegeField
) where
import qualified Database.Orville.PostgreSQL as O
import Example.Data.Major
( Major(..)
, MajorCollege(..)
, MajorId(..)
, MajorName(..)
, collegeMajorToText
, textToCollegeMajor
)
import Example.Data.Student (Student(..), StudentId(..), StudentName(..))
majorTable :: O.TableDefinition (Major MajorId) (Major ()) MajorId
majorTable =
O.mkTableDefinition $
O.TableParams
{ O.tblName = "majors"
, O.tblPrimaryKey = O.primaryKey majorIdField
, O.tblMapper =
Major <$> O.readOnlyField majorIdField <*>
O.attrField majorName majorNameField <*>
O.attrField majorCollege majorCollegeField
, O.tblGetKey = majorId
, O.tblSafeToDelete = []
, O.tblComments = O.noComments
}
majorIdField :: O.FieldDefinition O.NotNull MajorId
majorIdField =
O.automaticIdField "id" `O.withConversion`
O.convertSqlType majorIdInt MajorId
majorNameField :: O.FieldDefinition O.NotNull MajorName
majorNameField =
O.textField "name" 255 `O.withConversion`
O.convertSqlType majorNameText MajorName
majorCollegeField :: O.FieldDefinition O.NotNull MajorCollege
majorCollegeField =
O.textField "college" 255 `O.withConversion`
O.convertSqlType collegeMajorToText textToCollegeMajor
studentTable :: O.TableDefinition (Student StudentId) (Student ()) StudentId
studentTable =
O.mkTableDefinition $
O.TableParams
{ O.tblName = "students"
, O.tblPrimaryKey = O.primaryKey studentIdField
, O.tblMapper =
Student <$> O.readOnlyField studentIdField <*>
O.attrField studentName studentNameField <*>
O.attrField studentMajor studentMajorField
, O.tblGetKey = studentId
, O.tblSafeToDelete = []
, O.tblComments = O.noComments
}
studentIdField :: O.FieldDefinition O.NotNull StudentId
studentIdField =
O.automaticIdField "id" `O.withConversion`
O.convertSqlType studentIdInt StudentId
studentNameField :: O.FieldDefinition O.NotNull StudentName
studentNameField =
O.textField "name" 255 `O.withConversion`
O.convertSqlType studentNameText StudentName
studentMajorField :: O.FieldDefinition O.NotNull MajorId
studentMajorField = O.foreignKeyField "major" majorTable majorIdField
| flipstone/orville | orville-postgresql/sample-project/Example/Schema/Student.hs | mit | 2,382 | 0 | 12 | 398 | 604 | 335 | 269 | 66 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-redshift-clustersubnetgroup.html
module Stratosphere.Resources.RedshiftClusterSubnetGroup where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for RedshiftClusterSubnetGroup. See
-- 'redshiftClusterSubnetGroup' for a more convenient constructor.
data RedshiftClusterSubnetGroup =
RedshiftClusterSubnetGroup
{ _redshiftClusterSubnetGroupDescription :: Val Text
, _redshiftClusterSubnetGroupSubnetIds :: ValList Text
, _redshiftClusterSubnetGroupTags :: Maybe [Tag]
} deriving (Show, Eq)
instance ToResourceProperties RedshiftClusterSubnetGroup where
toResourceProperties RedshiftClusterSubnetGroup{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::Redshift::ClusterSubnetGroup"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ (Just . ("Description",) . toJSON) _redshiftClusterSubnetGroupDescription
, (Just . ("SubnetIds",) . toJSON) _redshiftClusterSubnetGroupSubnetIds
, fmap (("Tags",) . toJSON) _redshiftClusterSubnetGroupTags
]
}
-- | Constructor for 'RedshiftClusterSubnetGroup' containing required fields
-- as arguments.
redshiftClusterSubnetGroup
:: Val Text -- ^ 'rcsugDescription'
-> ValList Text -- ^ 'rcsugSubnetIds'
-> RedshiftClusterSubnetGroup
redshiftClusterSubnetGroup descriptionarg subnetIdsarg =
RedshiftClusterSubnetGroup
{ _redshiftClusterSubnetGroupDescription = descriptionarg
, _redshiftClusterSubnetGroupSubnetIds = subnetIdsarg
, _redshiftClusterSubnetGroupTags = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-redshift-clustersubnetgroup.html#cfn-redshift-clustersubnetgroup-description
rcsugDescription :: Lens' RedshiftClusterSubnetGroup (Val Text)
rcsugDescription = lens _redshiftClusterSubnetGroupDescription (\s a -> s { _redshiftClusterSubnetGroupDescription = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-redshift-clustersubnetgroup.html#cfn-redshift-clustersubnetgroup-subnetids
rcsugSubnetIds :: Lens' RedshiftClusterSubnetGroup (ValList Text)
rcsugSubnetIds = lens _redshiftClusterSubnetGroupSubnetIds (\s a -> s { _redshiftClusterSubnetGroupSubnetIds = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-redshift-clustersubnetgroup.html#cfn-redshift-clustersubnetgroup-tags
rcsugTags :: Lens' RedshiftClusterSubnetGroup (Maybe [Tag])
rcsugTags = lens _redshiftClusterSubnetGroupTags (\s a -> s { _redshiftClusterSubnetGroupTags = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/RedshiftClusterSubnetGroup.hs | mit | 2,779 | 0 | 15 | 308 | 370 | 213 | 157 | 37 | 1 |
module Proteome.Data.ProjectLang where
newtype ProjectLang =
ProjectLang { _lang :: Text }
deriving stock (Ord, Eq, Show, Generic)
deriving newtype (MsgpackEncode, MsgpackDecode, IsString)
makeClassy ''ProjectLang
| tek/proteome | packages/proteome/lib/Proteome/Data/ProjectLang.hs | mit | 222 | 0 | 6 | 33 | 66 | 37 | 29 | -1 | -1 |
-- |
-- Module: Parser.EnumDesc
-- Copyright: (c) 2015-2016 Martijn Rijkeboer <mrr@sru-systems.com>
-- License: MIT
-- Maintainer: Martijn Rijkeboer <mrr@sru-systems.com>
--
-- Enum Descriptor type and functions.
module Parser.EnumDesc
( EnumDesc
, new
, getName
, addValueDesc
, addValueDescs
, getValueDescs
, getAllowAlias
, setAllowAlias
) where
import Data.Foldable (toList)
import Data.Sequence ((|>), (><))
import Prelude (Maybe(..), (.))
import qualified Data.Sequence
import qualified Data.Sequence as Seq
import qualified Parser.EnumValueDesc as Parser
import qualified Prelude
data EnumDesc = EnumDesc
{ name :: Prelude.String
, valueDescs :: Data.Sequence.Seq Parser.EnumValueDesc
, allowAlias :: Prelude.Maybe Prelude.Bool
} deriving (Prelude.Show, Prelude.Eq)
type EnumName = Prelude.String
new :: EnumName -> EnumDesc
new n = EnumDesc n Seq.empty Nothing
getName :: EnumDesc -> EnumName
getName = name
addValueDesc :: Parser.EnumValueDesc -> EnumDesc -> EnumDesc
addValueDesc val self = self{valueDescs = valueDescs self |> val}
addValueDescs :: [Parser.EnumValueDesc] -> EnumDesc -> EnumDesc
addValueDescs vs self = self{valueDescs = valueDescs self >< Seq.fromList vs}
getValueDescs :: EnumDesc -> [Parser.EnumValueDesc]
getValueDescs = toList . valueDescs
getAllowAlias :: EnumDesc -> Maybe Prelude.Bool
getAllowAlias = allowAlias
setAllowAlias :: Prelude.Bool -> EnumDesc -> EnumDesc
setAllowAlias val self = self{allowAlias = Just val}
| sru-systems/protobuf-simple | src/Parser/EnumDesc.hs | mit | 1,557 | 0 | 10 | 290 | 382 | 224 | 158 | 36 | 1 |
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.SmallCheck
import Confetti
main :: IO ()
main = defaultMain $ testGroup "all-tests" tests
-- TODO(|p=101|#lol) - some tests would be good for future avi to write.
tests :: [TestTree]
tests = []
| aviaviavi/confetti | test/Main.hs | mit | 257 | 0 | 6 | 41 | 61 | 35 | 26 | 8 | 1 |
{- Main.hs -}
import Text.Parsec ( parseTest, string )
import Text.Parsec.String ( Parser )
main = do
parseTest (string "abcd") "abcd"
| ubnt-intrepid/orslang | haskell/src/Main.hs | mit | 168 | 0 | 9 | 54 | 45 | 25 | 20 | 4 | 1 |
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
{-|
Module : Labyrinth.Generator
Description : pathfinding testcase generator
Copyright : (c) deweyvm 2014
License : MIT
Maintainer : deweyvm
Stability : experimental
Portability : unknown
Generates maps for checking the validity of pathfinding algorithms.
-}
module Labyrinth.Generator(printCase, doSimple) where
import Codec.Picture
import System.Random
import Data.Word
import qualified Data.List as List
import qualified Data.Set as Set
import qualified Data.Vector as Vec
import Data.Maybe
import Control.Applicative
import Labyrinth.Util
import Labyrinth.Data.Array2d
import Labyrinth.Graph
import qualified Labyrinth.Machine2d as M
import qualified Labyrinth.Flood as F
import Labyrinth.Instances()
import Debug.Trace
printArray :: (a -> String) -> Array2d a -> String
printArray f arr =
let (Array2d _ _ vec) = (\(x, y) p -> select "" "\n" (x == 0 && y /= 0) ++ (f p)) <$*> arr in
concat (Vec.toList vec)
printSet :: Bool
-> (Bool -> String)
-> Int
-> Int
-> Set.Set Point
-> String
printSet x f cols rows set =
let arr = tabulate cols rows x (\pt -> Set.member pt set) in
printArray f arr
printPoint :: Point -> String
printPoint (i, j) = (show i ++ "," ++ show j)
printCase :: Set.Set Point -> Int -> Int -> (Bool -> String) -> String
printCase set rows cols f = do
case minMaxView set of
Just (x, y, _) ->
printPoint x ++ "\n" ++ printPoint y ++ "\n" ++ printSet False f rows cols set
Nothing -> ""
makeRandom :: Int -> Int -> Int -> Array2d Bool
makeRandom seed cols rows =
Array2d cols rows (Vec.fromList rand)
where rand = take (cols*rows) $ randoms (mkStdGen seed)
type Color = (Word8, Word8, Word8)
black :: Color
black = (0, 0, 0)
white :: Color
white = (255, 255, 255)
randColors :: Int -> [Color]
randColors seed =
zip3 (rand id) (rand (+1)) (rand (+2))
where rand f = (randoms . mkStdGen . f) seed
saveMap :: FilePath -> Int -> Array2d Color -> IO ()
saveMap path fc arr@(Array2d cols rows _) =
let ex = expandMap 3 (0,0,0) arr in
let colors = (\(r, g, b) -> PixelRGBA8 r g b 255) <$> ex in
let getter = getOrElse colors (PixelRGBA8 0 0 0 255) in
writePng path $ generateImage getter (cols*fc) (rows*fc)
data PathRegion a = PathRegion a -- start node
a -- end node
(Set.Set a) -- nodes not covered by path
[a] -- path
mkPathRegion :: (a, a, Set.Set a, [a]) -> PathRegion a
mkPathRegion (start, end, open, path) = PathRegion start end open path
prContains :: Point -> PathRegion Point -> Bool
prContains pt (PathRegion start end open path) =
pt == start || pt == end || Set.member pt open || elem pt path
toPixelArray :: Int -> Int -> Int -> [PathRegion Point] -> Array2d Color
toPixelArray seed cols rows regions =
tabulate cols rows black (\pt -> maybe black id (found pt <$> findReg pt))
where colors = zip (randColors seed) regions
findReg pt = List.find (\(_, reg) -> prContains pt reg) colors
found :: Point -> (Color, PathRegion Point) -> Color
found pt (color, (PathRegion start end rest path))
| start == pt = (0, 255, 0)
| end == pt = (255, 0, 0)
| Set.member pt rest = color
| elem pt path = white
createPath :: (Array2d Bool -> Point -> Point -> Either String [Point])
-> Array2d Bool
-> Set.Set Point
-> Maybe (Point, Point, Set.Set Point, [Point])
createPath pfind arr area =
case minMaxView area of
Just (x, y, rest) ->
case pfind arr x y of
Right pts ->
Just (x, y, removed, pts)
where removed = Set.difference rest (Set.union (Set.fromList [x, y])
(Set.fromList pts))
-- this case should be impossible if pfind is correct
Left s -> trace ("Failed to find path: " ++ s) Nothing
Nothing -> Nothing
getOpen :: Open a => Array2d a -> Set.Set Point
getOpen arr = Set.fromList $ foldli (\xs (pt, x) -> (select id (pt:) (isOpen x)) xs) [] arr
largest :: [Set.Set Point] -> Set.Set Point
largest s = List.maximumBy setSize (Set.empty:s)
where setSize s0 s1 = Set.size s0 `compare` Set.size s1
data Params a = Params Int Int Int (a -> a)
expandMap :: Int -> a -> Array2d a -> Array2d a
expandMap fc x arr@(Array2d cols rows _) = tabulate (cols*fc) (rows*fc) x get
where get (i, j) = getOrElse arr x (i `quot` fc) (j `quot` fc)
saveMask :: Params a -> Array2d Bool -> IO ()
saveMask _ arr = saveMap "mask.png" 3 $ (select white black) <$> arr
saveFlooded :: Params a -> [Set.Set Point] -> IO ()
saveFlooded (Params _ cols rows _) (x:_) = do
let _ = printCase x cols rows (select "x" "0")
return ()
saveFlooded _ _ = return ()
savePathed :: Params a -> Array2d Color -> IO ()
savePathed _ = saveMap "flood.png" 3
doSimple :: (Array2d Bool -> Point -> Point -> Either String [Point])
-> Int
-> IO ()
doSimple pfind seed = processMaze pfind saveMask saveFlooded savePathed (Params seed 500 500 transform)
where transform = (M.<.> [ M.occuCount 5
, M.vertStrip True 4
, M.occuCount 5
])
processMaze :: (Array2d Bool -> Point -> Point -> Either String [Point])
-> (Params (Array2d Bool) -> Array2d Bool -> IO ())
-> (Params (Array2d Bool) -> [Set.Set Point] -> IO ())
-> (Params (Array2d Bool) -> Array2d Color -> IO ())
-> Params (Array2d Bool)
-> IO ()
processMaze pfind processMask processFlooded processPathed p@(Params seed rows cols endo) = do
let initial = makeRandom seed cols rows
let permuted = endo initial
let open = getOpen permuted
let flooded = F.floodAll permuted open
let biggest = largest flooded
let array = tabulate rows cols False (\pt -> Set.member pt biggest)
let border = F.computeBorder array False ((0,0) :: Point)
let paths = catMaybes [createPath pfind permuted biggest]
let pathRegions = mkPathRegion <$> (((0,0),(0,0),border,[]):paths)
let arr = toPixelArray seed cols rows pathRegions
processMask p permuted
processFlooded p [biggest]
processPathed p arr
| deweyvm/labyrinth | src/Labyrinth/Generator.hs | mit | 6,510 | 0 | 18 | 1,845 | 2,534 | 1,302 | 1,232 | 136 | 3 |
--Project Euler Problem 4 Find largest number smaller than 999^2 which is a
--palindrome
isPalindrome x = (==) (show(x)) (reverse(show(x)))
-- Simple solution is to create string and compare it to its reverse
problem_4 = maximum [x*y | x <- [100..999], y <- [100..999], isPalindrome (x*y)]
main :: IO()
main = do
y <- return (problem_4)
putStrLn("Answer to problem 4 = " ++ show(y))
| calewis/SmallProjectsAndDev | project_euler/haskell/problem_4.hs | mit | 396 | 0 | 11 | 77 | 145 | 76 | 69 | 6 | 1 |
module ApachePrettyPrinter where
--{{{ Modules imports
import TreeConfigApacheFiller
import ApacheSourceCreator
import TypeFiles.ApacheTypes
import TypesAndFunctions
--}}}
--{{{ Haskell imports
import Text.PrettyPrint
--}}}
--{{{ Function that will pretty print the ApacheWebserver
printApache :: ApacheWebserver -> Doc
printApache source = printMaybe
(printListInstruction "AcceptFilter")
(aAcceptFilter source)
$$ printMaybe
(printSimpleInstruction "AcceptPathInfo")
(aAcceptPathInfo source)
$$ printMaybe
(printSimpleInstruction "AccessFileName")
(aAccessFileName source)
$$ printMaybe
(printSimpleInstruction "AddDefaultCharset")
(aAddDefaultCharset source)
$$ printMaybe
(printSimpleInstruction "AllowEncodedSlashes")
(aAllowEncodedSlashes source)
$$ printMaybe
(printSimpleInstruction "ContentDigest")
(aContentDigest source)
$$ printMaybe
(printSimpleInstruction "DefaultRuntimeDir")
(aDefaultRuntimeDir source)
$$ printMaybe
(printListInstruction "Define")
(aDefine source)
$$ printMaybe
(printSimpleInstruction "DocumentRoot")
(aDocumentRoot source)
$$ printMaybe
(printSimpleInstruction "EnableMMAP")
(aEnableMMAP source)
$$ printMaybe
(printSimpleInstruction "EnableSendfile")
(aEnableSendfile source)
$$ printMaybe
(printSimpleInstruction "Error")
(aError source)
$$ printMaybe
(printListInstruction "ErrorDocument")
(aErrorDocument source)
$$ printMaybe
(printSimpleInstruction "ErrorLog")
(aErrorLog source)
$$ printMaybe
(printSimpleInstruction "ErrorLogFormat")
(aErrorLogFormat source)
$$ printMaybe
(printSimpleInstruction "ExtendedStatus")
(aExtendedStatus source)
$$ printMaybe
(printSimpleInstruction "FileEtag")
(aFileEtag source)
$$ printMaybe
(printSimpleInstruction "HostnameLookups")
(aHostnameLookups source)
$$ printMaybe
(printListInstruction "Include")
(aInclude source)
$$ printMaybe
(printListInstruction "IncludeOptional")
(aIncludeOptional source)
$$ printMaybe
(printSimpleInstruction "KeepAlive")
(aKeepAlive source)
$$ printMaybe
(printSimpleInstruction "KeepAliveTimeout")
(aKeepAliveTimeout source)
$$ printMaybe
(printSimpleInstruction "LimitInternalRecursion")
(aLimitInternalRecursion source)
$$ printMaybe
(printSimpleInstruction "LimitRequestBody")
(aLimitRequestBody source)
$$ printMaybe
(printSimpleInstruction "LimitRequestFields")
(aLimitRequestFields source)
$$ printMaybe
(printSimpleInstruction "LimitRequestFieldSize")
(aLimitRequestFieldSize source)
$$ printMaybe
(printSimpleInstruction "LimitRequestLine")
(aLimitRequestLine source)
$$ printMaybe
(printSimpleInstruction "LimitXMLRequestBody")
(aLimitXMLRequestBody source)
$$ printMaybe
(printSimpleInstruction "LogLevel")
(aLogLevel source)
$$ printMaybe
(printSimpleInstruction "MaxKeepAliveRequests")
(aMaxKeepAliveRequests source)
$$ printMaybe
(printSimpleInstruction "MaxRangeOverlaps")
(aMaxRangeOverlaps source)
$$ printMaybe
(printSimpleInstruction "MaxRangeReversals")
(aMaxRangeReversals source)
$$ printMaybe
(printSimpleInstruction "MaxRanges")
(aMaxRanges source)
$$ printMaybe
(printListInstruction "Mutex")
(aMutex source)
$$ printMaybe
(printSimpleInstruction "Options")
(aOptions source)
$$ printMaybe
(printSimpleInstruction "Protocol")
(aProtocol source)
$$ printMaybe
(printSimpleInstruction "RLimitCPU")
(aRLimitCPU source)
$$ printMaybe
(printSimpleInstruction "RLimitMEM")
(aRLimitMEM source)
$$ printMaybe
(printSimpleInstruction "RLimitNPROC")
(aRLimitNPROC source)
$$ printMaybe
(printSimpleInstruction "SeeRequestTail")
(aSeeRequestTail source)
$$ printMaybe
(printSimpleInstruction "ServerAdmin")
(aServerAdmin source)
$$ printMaybe
(printSimpleInstruction "ServerName")
(aServerName source)
$$ printMaybe
(printSimpleInstruction "ServerRoot")
(aServerRoot source)
$$ printMaybe
(printSimpleInstruction "ServerSignature")
(aServerSignature source)
$$ printMaybe
(printSimpleInstruction "ServerTokens")
(aServerTokens source)
$$ printMaybe
(printSimpleInstruction "SetHandler")
(aSetHandler source)
$$ printMaybe
(printSimpleInstruction "SetInputFilter")
(aSetInputFilter source)
$$ printMaybe
(printSimpleInstruction "SetOutputFilter")
(aSetOutputFilter source)
$$ printMaybe
(printSimpleInstruction "TimeOut")
(aTimeOut source)
$$ printMaybe
(printSimpleInstruction "TraceEnable")
(aTraceEnable source)
$$ printMaybe
(printListInstruction "UnDefine")
(aUnDefine source)
$$ printMaybe
(printSimpleInstruction "UseCanonicalName")
(aUseCanonicalName source)
$$ printMaybe
(printSimpleInstruction "UseCanonicalPhysicalPort")
(aUseCanonicalPhysicalPort source)
$$ printMaybe
(printSimpleInstruction "GracefulShutDownTimeout")
(aGracefulShutdownTimeout source)
$$ printMaybe
(printListInstruction "Listen")
(aListen source)
$$ printMaybe
(printSimpleInstruction "ListenBackLog")
(aListenBackLog source)
$$ printMaybe
(printSimpleInstruction "MaxConnectionsPerChild")
(aMaxConnectionsPerChild source)
$$ printMaybe
(printSimpleInstruction "MaxMemFree")
(aMaxMemFree source)
$$ printMaybe
(printSimpleInstruction "MaxRequestWorkers")
(aMaxRequestWorkers source)
$$ printMaybe
(printSimpleInstruction "PidFile")
(aPidFile source)
$$ printMaybe
(printSimpleInstruction "ReceiveBufferSize")
(aReceiveBufferSize source)
$$ printMaybe
(printSimpleInstruction "ScoreBoardFile")
(aScoreBoardFile source)
$$ printMaybe
(printSimpleInstruction "SendBufferSize")
(aSendBufferSize source)
$$ printMaybe
(printSimpleInstruction "ServerLimit")
(aServerLimit source)
$$ printMaybe
(printSimpleInstruction "StartServers")
(aStartServers source)
$$ printMaybe
(printSimpleInstruction "MaxSpareServers")
(aMaxSpareServers source)
$$ printMaybe
(printSimpleInstruction "MinSpareServers")
(aMinSpareServers source)
$$ printMaybe
(printListInstruction "Alias")
(aAlias source)
$$ printMaybe
(printListInstruction "AliasMatch")
(aAliasMatch source)
$$ printMaybe
(printListInstruction "Redirect")
(aRedirect source)
$$ printMaybe
(printListInstruction "RedirectMatch")
(aRedirectMatch source)
$$ printMaybe
(printListInstruction "RedirectPermanent")
(aRedirectPermanent source)
$$ printMaybe
(printListInstruction "RedirectTemp")
(aRedirectTemp source)
$$ printMaybe
(printListInstruction "ScriptAlias")
(aScriptAlias source)
$$ printMaybe
(printListInstruction "ScriptAliasMatch")
(aScriptAliasMatch source)
$$ printMaybe
(printListInstruction "AddAlt")
(aAddAlt source)
$$ printMaybe
(printListInstruction "AddAltByEncoding")
(aAddAltByEncoding source)
$$ printMaybe
(printListInstruction "AddAltByType")
(aAddAltByType source)
$$ printMaybe
(printListInstruction "AddDescription")
(aAddDescription source)
$$ printMaybe
(printListInstruction "AddIcon")
(aAddIcon source)
$$ printMaybe
(printListInstruction "AddIconByEncoding")
(aAddIconByEncoding source)
$$ printMaybe
(printListInstruction "AddIconByType")
(aAddIconByType source)
$$ printMaybe
(printSimpleInstruction "DefaultIcon")
(aDefaultIcon source)
$$ printMaybe
(printSimpleInstruction "HeaderName")
(aHeaderName source)
$$ printMaybe
(printSimpleInstruction "IndexHeadInsert")
(aIndexHeadInsert source)
$$ printMaybe
(printListInstruction "IndexIgnore")
(aIndexIgnore source)
$$ printMaybe
(printSimpleInstruction "IndexIgnoreReset")
(aIndexIgnoreReset source)
$$ printMaybe
(printListInstruction "IndexOptions")
(aIndexOptions source)
$$ printMaybe
(printSimpleInstruction "IndexOrderDefault")
(aIndexOrderDefault source)
$$ printMaybe
(printSimpleInstruction "IndexStyleSheet")
(aIndexStyleSheet source)
$$ printMaybe
(printSimpleInstruction "ReadmeName")
(aReadmeName source)
$$ printMaybe
(printSimpleInstruction "ScriptLog")
(aScriptLog source)
$$ printMaybe
(printSimpleInstruction "ScriptLogBuffer")
(aScriptLogBuffer source)
$$ printMaybe
(printSimpleInstruction "ScriptLogLength")
(aScriptLogLength source)
$$ printMaybe
(printSimpleInstruction "ScriptSock")
(aScriptSock source)
$$ printMaybe
(printSimpleInstruction "CGIDScriptTimeout")
(aCGIDScriptTimeout source)
$$ printMaybe
(printSimpleInstruction "DirectoryCheckHandler")
(aDirectoryCheckHandler source)
$$ printMaybe
(printSimpleInstruction "DirectoryIndex")
(aDirectoryIndex source)
$$ printMaybe
(printSimpleInstruction "IndexRedirect")
(aIndexRedirect source)
$$ printMaybe
(printSimpleInstruction "DirectorySlash")
(aDirectorySlash source)
$$ printMaybe
(printSimpleInstruction "FallbackResource")
(aFallbackResource source)
$$ printMaybe
(printSimpleInstruction "PassEnv")
(aPassEnv source)
$$ printMaybe
(printListInstruction "SetEnv")
(aSetEnv source)
$$ printMaybe
(printListInstruction "UnsetEnv")
(aUnsetEnv source)
$$ printMaybe
(printListInstruction "AddOutputFilterByType")
(aAddOutputFilterByType source)
$$ printMaybe
(printListInstruction "FilterChain")
(aFilterChain source)
$$ printMaybe
(printListInstruction "FilterDeclare")
(aFilterDeclare source)
$$ printMaybe
(printListInstruction "FilterProtocol")
(aFilterProtocol source)
$$ printMaybe
(printListInstruction "FilterProvider")
(aFilterProvider source)
$$ printMaybe
(printListInstruction "FilterTrace")
(aFilterTrace source)
$$ printMaybe
(printSimpleInstruction "ImapBase")
(aImapBase source)
$$ printMaybe
(printSimpleInstruction "ImapDefault")
(aImapDefault source)
$$ printMaybe
(printSimpleInstruction "ImapMenu")
(aImapMenu source)
$$ printMaybe
(printSimpleInstruction "BufferedLogs")
(aBufferedLogs source)
$$ printMaybe
(printListInstruction "CustomLog")
(aCustomLog source)
$$ printMaybe
(printListInstruction "LogFormat")
(aLogFormat source)
$$ printMaybe
(printListInstruction "TransferLog")
(aTransferLog source)
$$ printMaybe
(printListInstruction "AddCharset")
(aAddCharset source)
$$ printMaybe
(printListInstruction "AddEncoding")
(aAddEncoding source)
$$ printMaybe
(printListInstruction "AddHandler")
(aAddHandler source)
$$ printMaybe
(printListInstruction "AddInputFilter")
(aAddInputFilter source)
$$ printMaybe
(printListInstruction "AddLanguage")
(aAddLanguage source)
$$ printMaybe
(printListInstruction "AddOutputFilter")
(aAddOutputFilter source)
$$ printMaybe
(printListInstruction "AddType")
(aAddType source)
$$ printMaybe
(printSimpleInstruction "DefaultLanguage")
(aDefaultLanguage source)
$$ printMaybe
(printSimpleInstruction "MultiviewsMatch")
(aMultiviewsMatch source)
$$ printMaybe
(printSimpleInstruction "TypesConfig")
(aTypesConfig source)
$$ printMaybe
(printSimpleInstruction "CacheNegotiatedDocs")
(aCacheNegotiatedDocs source)
$$ printMaybe
(printSimpleInstruction "ForceLanguagePriority")
(aForceLanguagePriority source)
$$ printMaybe
(printSimpleInstruction "LanguagePriority")
(aLanguagePriority source)
$$ printMaybe
(printSimpleInstruction "ReflectorHeader")
(aReflectorHeader source)
$$ printMaybe
(printListInstruction "BrowserMatch")
(aBrowserMatch source)
$$ printMaybe
(printListInstruction "BrowserMatchNoCase")
(aBrowserMatchNoCase source)
$$ printMaybe
(printListInstruction "SetEnvIf")
(aSetEnvIf source)
$$ printMaybe
(printListInstruction "SetEnvIfExpr")
(aSetEnvIfExpr source)
$$ printMaybe
(printListInstruction "SetEnvIfNoCase")
(aSetEnvIfNoCase source)
$$ printMaybe
(printSimpleInstruction "ChrootDir")
(aChrootDir source)
$$ printMaybe
(printSimpleInstruction "Group")
(aGroup source)
$$ printMaybe
(printSimpleInstruction "Suexec")
(aSuexec source)
$$ printMaybe
(printSimpleInstruction "User")
(aUser source)
$$ printMaybe
(printSimpleInstruction "UserDir")
(aUserDir source)
$$ printMaybe
(printSimpleInstruction "SSLCACertificateFile")
(aSSLCACertificateFile source)
$$ printMaybe
(printSimpleInstruction "SSLCACertificatePath")
(aSSLCACertificatePath source)
$$ printMaybe
(printSimpleInstruction "SSLCADNRequestFile")
(aSSLCADNRequestFile source)
$$ printMaybe
(printSimpleInstruction "SSLCADNRequestPath")
(aSSLCADNRequestPath source)
$$ printMaybe
(printSimpleInstruction "SSLCARevocationCheck")
(aSSLCARevocationCheck source)
$$ printMaybe
(printSimpleInstruction "SSLCARevocationFile")
(aSSLCARevocationFile source)
$$ printMaybe
(printSimpleInstruction "SSLCARevocationPath")
(aSSLCARevocationPath source)
$$ printMaybe
(printSimpleInstruction "SSLCertificateChainFile")
(aSSLCertificateChainFile source)
$$ printMaybe
(printSimpleInstruction "SSLCertificateFile")
(aSSLCertificateFile source)
$$ printMaybe
(printSimpleInstruction "SSLCertificateKeyFile")
(aSSLCertificateKeyFile source)
$$ printMaybe
(printSimpleInstruction "SSLCipherSuite")
(aSSLCipherSuite source)
$$ printMaybe
(printSimpleInstruction "SSLCompression")
(aSSLCompression source)
$$ printMaybe
(printSimpleInstruction "SSLEngine")
(aSSLEngine source)
$$ printMaybe
(printSimpleInstruction "SSLHonorCipherOrder")
(aSSLHonorCipherOrder source)
$$ printMaybe
(printSimpleInstruction "SSLInsecureRenegotiation")
(aSSLInsecureRenegotiation source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPDefaultResponder")
(aSSLOCSPDefaultResponder source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPEnable")
(aSSLOCSPEnable source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPOverrideResponder")
(aSSLOCSPOverrideResponder source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPResponderTImeout")
(aSSLOCSPResponderTimeout source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPResponseMaxAge")
(aSSLOCSPResponseMaxAge source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPResponseTimeSkew")
(aSSLOCSPResponseTimeSkew source)
$$ printMaybe
(printSimpleInstruction "SSLOCSPUserRequestNonce")
(aSSLOCSPUseRequestNonce source)
$$ printMaybe
(printListInstruction "SSLOpenSSLConfCmd")
(aSSLOpenSSLConfCmd source)
$$ printMaybe
(printSimpleInstruction "SSLOptions")
(aSSLOptions source)
$$ printMaybe
(printSimpleInstruction "SSLProtocol")
(aSSLProtocol source)
$$ printMaybe
(printSimpleInstruction "SSLSessionCacheTimeout")
(aSSLSessionCacheTimeout source)
$$ printMaybe
(printSimpleInstruction "SSLSessionTicketKeyFile")
(aSSLSessionTicketKeyFile source)
$$ printMaybe
(printSimpleInstruction "SSLSessionTickets")
(aSSLSessionTickets source)
$$ printMaybe
(printSimpleInstruction "SSLSRPUnknownUserSeed")
(aSSLSRPUnknownUserSeed source)
$$ printMaybe
(printSimpleInstruction "SSLSRPVerifierFile")
(aSSLSRPVerifierFile source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingErrorCacheTimeout")
(aSSLStaplingErrorCacheTimeout source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingFakeTryLater")
(aSSLStaplingFakeTryLater source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingForceURL")
(aSSLStaplingForceURL source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingResponderTimeout")
(aSSLStaplingResponderTimeout source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingResponseMaxAge")
(aSSLStaplingResponseMaxAge source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingResponseTimeSkew")
(aSSLStaplingResponseTimeSkew source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingReturnResponderErrors")
(aSSLStaplingReturnResponderErrors source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingStandardCacheTimeout")
(aSSLStaplingStandardCacheTimeout source)
$$ printMaybe
(printSimpleInstruction "SSLStrictSNIVHostCheck")
(aSSLStrictSNIVHostCheck source)
$$ printMaybe
(printSimpleInstruction "SSLUseStapling")
(aSSLUseStapling source)
$$ printMaybe
(printSimpleInstruction "SSLVerifyClient")
(aSSLVerifyClient source)
$$ printMaybe
(printSimpleInstruction "SSLVerifyDepth")
(aSSLVerifyDepth source)
$$ printMaybe
(printSimpleInstruction "SSLCryptoDevice")
(aSSLCryptoDevice source)
$$ printMaybe
(printSimpleInstruction "SSLFIPS")
(aSSLFIPS source)
$$ printMaybe
(printSimpleInstruction "SSLPassPhraseDialog")
(aSSLPassPhraseDialog source)
$$ printMaybe
(printSimpleInstruction "SSLRandomSeed")
(aSSLRandomSeed source)
$$ printMaybe
(printSimpleInstruction "SSLSessionCache")
(aSSLSessionCache source)
$$ printMaybe
(printSimpleInstruction "SSLStaplingCache")
(aSSLStaplingCache source)
$$ printMaybe
(printSimpleInstruction "SSLUserName")
(aSSLUserName source)
$$ printMaybe
printDirectories
(aDirectory source)
$$ printMaybe
printFiles
(aFiles source)
$$ printMaybe
printLocations
(aLocation source)
$$ printMaybe
printVirtualHosts
(aVirtualHosts source)
--}}}
--{{{ Function that will print a list of Directory context
printDirectories :: [Directory] -> Doc
printDirectories [] = empty
printDirectories (x:xs) = printDirectory x $$ printDirectories xs
--}}}
--{{{ Function that will print a Directory context
printDirectory :: Directory -> Doc
printDirectory source = text "<Directory" <>
case (noMaybe (dMatch source)) of
True -> (text "Match " <> text (noMaybe (dPath source)) <> text ">")
False -> (text " " <> text (noMaybe (dPath source)) <> text ">")
$$ nest 5 (printMaybe
printDirDirectives
(dDirDirectives source))
$$ nest 5 (printMaybe
printRequireCons
(dRequireCons source))
$$ nest 5 (printMaybe
(printSimpleInstruction "AllowOverride")
(dAllowOverride source))
$$ nest 5 (printMaybe
(printSimpleInstruction "AllowOverrideList")
(dAllowOverrideList source))
$$ nest 5 (printMaybe
(printSimpleInstruction "Options")
(dOptions source))
$$ nest 5 (printMaybe
printFiles
(dFiles source))
$$ text "</Directory>"
--}}}
--{{{ Function that will print a DirDirective context
printDirDirectives :: DirDirectives -> Doc
printDirDirectives source = printMaybe
(printSimpleInstruction "AcceptPathInfo")
(dAcceptPathInfo source)
$$ printMaybe
(printSimpleInstruction "AddDefaultCharset")
(dAddDefaultCharset source)
$$ printMaybe
(printSimpleInstruction "ContentDigest")
(dContentDigest source)
$$ printMaybe
(printListInstruction "Define")
(dDefine source)
$$ printMaybe
(printSimpleInstruction "EnableMMAP")
(dEnableMMAP source)
$$ printMaybe
(printSimpleInstruction "EnableSendFile")
(dEnableSendFile source)
$$ printMaybe
(printSimpleInstruction "Error")
(dError source)
$$ printMaybe
(printListInstruction "ErrorDocument")
(dErrorDocument source)
$$ printMaybe
(printSimpleInstruction "FileETag")
(dFileETag source)
$$ printMaybe
(printSimpleInstruction "ForceType")
(dForceType source)
$$ printMaybe
(printSimpleInstruction "HostnameLookups")
(dHostnameLookups source)
$$ printMaybe
(printListInstruction "Include")
(dInclude source)
$$ printMaybe
(printListInstruction "IncludeOptional")
(dIncludeOptional source)
$$ printMaybe
(printSimpleInstruction "LimitRequestBody")
(dLimitRequestBody source)
$$ printMaybe
(printSimpleInstruction "LimitXMLRequestBody")
(dLimitXMLRequestBody source)
$$ printMaybe
(printSimpleInstruction "LogLevel")
(dLogLevel source)
$$ printMaybe
(printSimpleInstruction "MaxRangeOverlaps")
(dMaxRangeOverlaps source)
$$ printMaybe
(printSimpleInstruction "MaxRangeReversals")
(dMaxRangeReversals source)
$$ printMaybe
(printSimpleInstruction "MaxRanges")
(dMaxRanges source)
$$ printMaybe
(printSimpleInstruction "RLimitCPU")
(dRLimitCPU source)
$$ printMaybe
(printSimpleInstruction "RLimitMEM")
(dRLimitMEM source)
$$ printMaybe
(printSimpleInstruction "RLimitNPROC")
(dRLimitNPROC source)
$$ printMaybe
(printSimpleInstruction "ServerSignature")
(dServerSignature source)
$$ printMaybe
(printSimpleInstruction "SetHandler")
(dSetHandler source)
$$ printMaybe
(printSimpleInstruction "SetInputFilter")
(dSetInputFilter source)
$$ printMaybe
(printSimpleInstruction "SetOutputFilter")
(dSetOutputFilter source)
$$ printMaybe
(printSimpleInstruction "UseCanonicalName")
(dUseCanonicalName source)
$$ printMaybe
(printSimpleInstruction "UseCanonicalPhysicalPort")
(dUseCanonicalPhysicalPort source)
$$ printMaybe
(printListInstruction "Redirect")
(dRedirect source)
$$ printMaybe
(printListInstruction "RedirectMatch")
(dRedirectMatch source)
$$ printMaybe
(printListInstruction "RedirectPermanent")
(dRedirectPermanent source)
$$ printMaybe
(printListInstruction "RedirectTemp")
(dRedirectTemp source)
$$ printMaybe
(printSimpleInstruction "AuthBasicAuthoritative")
(dAuthBasicAuthoritative source)
$$ printMaybe
(printSimpleInstruction "AuthBasicFake")
(dAuthBasicFake source)
$$ printMaybe
(printSimpleInstruction "AuthBasicProvider")
(dAuthBasicProvider source)
$$ printMaybe
(printSimpleInstruction "AuthBasicUseDigestAlgorithm")
(dAuthBasicUseDigestAlgorithm source)
$$ printMaybe
(printSimpleInstruction "AuthName")
(dAuthName source)
$$ printMaybe
(printSimpleInstruction "AuthType")
(dAuthType source)
$$ printMaybe
(printSimpleInstruction "AuthUserFile")
(dAuthUserFile source)
$$ printMaybe
(printSimpleInstruction "AuthMerging")
(dAuthMerging source)
$$ printMaybe
(printSimpleInstruction "AuthzSendForbiddenOnFailure")
(dAuthzSendForbiddenOnFailure source)
$$ printMaybe
(printListInstruction "Require")
(dRequire source)
$$ printMaybe
(printSimpleInstruction "AuthGroupFile")
(dAuthGroupFile source)
$$ printMaybe
(printListInstruction "AddAlt")
(dAddAlt source)
$$ printMaybe
(printListInstruction "AddAltByEncoding")
(dAddAltByEncoding source)
$$ printMaybe
(printListInstruction "AddAltByType")
(dAddAltByType source)
$$ printMaybe
(printListInstruction "AddDescription")
(dAddDescription source)
$$ printMaybe
(printListInstruction "AddIcon")
(dAddIcon source)
$$ printMaybe
(printListInstruction "AddIconByEncoding")
(dAddIconByEncoding source)
$$ printMaybe
(printListInstruction "AddIconByType")
(dAddIconByType source)
$$ printMaybe
(printSimpleInstruction "DefaultIcon")
(dDefaultIcon source)
$$ printMaybe
(printSimpleInstruction "HeaderName")
(dHeaderName source)
$$ printMaybe
(printSimpleInstruction "IndexHeadInsert")
(dIndexHeadInsert source)
$$ printMaybe
(printListInstruction "IndexIgnore")
(dIndexIgnore source)
$$ printMaybe
(printSimpleInstruction "IndexIgnoreReset")
(dIndexIgnoreReset source)
$$ printMaybe
(printListInstruction "IndexOptions")
(dIndexOptions source)
$$ printMaybe
(printSimpleInstruction "IndexOrderDefault")
(dIndexOrderDefault source)
$$ printMaybe
(printSimpleInstruction "IndexStyleSheet")
(dIndexStyleSheet source)
$$ printMaybe
(printSimpleInstruction "ReadmeName")
(dReadmeName source)
$$ printMaybe
(printSimpleInstruction "CGIDScript")
(dCGIDScriptTimeout source)
$$ printMaybe
(printSimpleInstruction "DirectoryCheckHandler")
(dDirectoryCheckHandler source)
$$ printMaybe
(printSimpleInstruction "DirectoryIndex")
(dDirectoryIndex source)
$$ printMaybe
(printSimpleInstruction "IndexRedirect")
(dIndexRedirect source)
$$ printMaybe
(printSimpleInstruction "DirectorySlash")
(dDirectorySlash source)
$$ printMaybe
(printSimpleInstruction "FallbackResource")
(dFallbackResource source)
$$ printMaybe
(printSimpleInstruction "PassEnv")
(dPassEnv source)
$$ printMaybe
(printListInstruction "SetEnv")
(dSetEnv source)
$$ printMaybe
(printListInstruction "UnsetEnv")
(dUnsetEnv source)
$$ printMaybe
(printListInstruction "AddOutputFilterByType")
(dAddOutputFilterByType source)
$$ printMaybe
(printListInstruction "FilterChain")
(dFilterChain source)
$$ printMaybe
(printListInstruction "FilterDeclare")
(dFilterDeclare source)
$$ printMaybe
(printListInstruction "FilterProtocol")
(dFilterProtocol source)
$$ printMaybe
(printListInstruction "FilterProvider")
(dFilterProvider source)
$$ printMaybe
(printListInstruction "FilterTrace")
(dFilterTrace source)
$$ printMaybe
(printSimpleInstruction "ImapBase")
(dImapBase source)
$$ printMaybe
(printSimpleInstruction "ImapDefault")
(dImapDefault source)
$$ printMaybe
(printSimpleInstruction "ImapMenu")
(dImapMenu source)
$$ printMaybe
(printListInstruction "CustomLog") (dCustomLog source)
$$ printMaybe
(printListInstruction "LogFormat")
(dLogFormat source)
$$ printMaybe
(printListInstruction "TransferLog")
(dTransferLog source)
$$ printMaybe
(printListInstruction "AddCharset")
(dAddCharset source)
$$ printMaybe
(printListInstruction "AddEncoding")
(dAddEncoding source)
$$ printMaybe
(printListInstruction "AddHandler")
(dAddHandler source)
$$ printMaybe
(printListInstruction "AddInputFilter")
(dAddInputFilter source)
$$ printMaybe
(printListInstruction "AddLanguage")
(dAddLanguage source)
$$ printMaybe
(printListInstruction "AddOutputFilter")
(dAddOutputFilter source)
$$ printMaybe
(printListInstruction "AddType")
(dAddType source)
$$ printMaybe
(printSimpleInstruction "DefaultLanguage")
(dDefaultLanguage source)
$$ printMaybe
(printSimpleInstruction "ModMimeUsePathInfo")
(dModMimeUsePathInfo source)
$$ printMaybe
(printSimpleInstruction "MultiviewsMatch")
(dMultiviewsMatch source)
$$ printMaybe
(printListInstruction "RemoveCharset")
(dRemoveCharset source)
$$ printMaybe
(printListInstruction "RemoveEncoding")
(dRemoveEncoding source)
$$ printMaybe
(printListInstruction "RemoveHandler")
(dRemoveHandler source)
$$ printMaybe
(printListInstruction "RemoveInputFilter")
(dRemoveInputFilter source)
$$ printMaybe
(printListInstruction "RemoveLanguage")
(dRemoveLanguage source)
$$ printMaybe
(printListInstruction "RemoveOutputFilter")
(dRemoveOutputFilter source)
$$ printMaybe
(printListInstruction "RemoveType")
(dRemoveType source)
$$ printMaybe
(printSimpleInstruction "ForceLanguagePriority")
(dForceLanguagePriority source)
$$ printMaybe
(printSimpleInstruction "LanguagePriority")
(dLanguagePriority source)
$$ printMaybe
(printSimpleInstruction "ReflectorHeader")
(dReflectorHeader source)
$$ printMaybe
(printSimpleInstruction "KeptBodySize")
(dKeptBodySize source)
$$ printMaybe
(printListInstruction "BrowserMatch")
(dBrowserMatch source)
$$ printMaybe
(printListInstruction "BrowserMatchNoCase")
(dBrowserMatchNoCase source)
$$ printMaybe
(printListInstruction "SetEnvIf")
(dSetEnvIf source)
$$ printMaybe
(printListInstruction "SetEnvIfExpr")
(dSetEnvIfExpr source)
$$ printMaybe
(printListInstruction "SetEnvIfNoCase")
(dSetEnvIfNoCase source)
$$ printMaybe
(printSimpleInstruction "SSLCipherSuite")
(dSSLCipherSuite source)
$$ printMaybe
(printSimpleInstruction "SSLOptions")
(dSSLOptions source)
$$ printMaybe
(printSimpleInstruction "SSLRenegBufferSize")
(dSSLRenegBufferSize source)
$$ printMaybe
(printSimpleInstruction "SSLRequireSSL")
(dSSLRequireSSL source)
$$ printMaybe
(printSimpleInstruction "SSLUserName")
(dSSLUserName source)
$$ printMaybe
(printSimpleInstruction "SSLVerifyClient")
(dSSLVerifyClient source)
$$ printMaybe
(printSimpleInstruction "SSLVerifyDepth")
(dSSLVerifyDepth source)
--}}}
--{{{ Function that will print a list of RequireCons context
printRequireCons :: [RequireCons] -> Doc
printRequireCons [] = empty
printRequireCons (x:xs) = printRequiresCons x $$ printRequireCons xs
--}}}
--{{{ Function that will print a RequireCons context
printRequiresCons :: RequireCons -> Doc
printRequiresCons source = text "<Require" <>
case (noMaybe (rConsType source)) of
"All" -> text "All>"
"Any" -> text "Any>"
"None" -> text "None>"
$$ nest 5 (printMaybe
(printListInstruction "Require")
(rRequire source))
$$ nest 5 (printMaybe
printRequireCons
(rRequireCons source))
$$ text "</Require>"
--}}}
--{{{ Function that prints a list of Files context
printFiles :: [Files] -> Doc
printFiles [] = empty
printFiles (x:xs) = printFile x $$ printFiles xs
--}}}
--{{{ Function that will print a Files context
printFile :: Files -> Doc
printFile source = text "<Files" <>
case (noMaybe (fMatch source)) of
True -> (text "Match " <> text (noMaybe (fFileName source)) <> text ">")
False -> (text " " <> text (noMaybe (fFileName source)) <> text ">")
$$ nest 5 (printMaybe
printDirDirectives
(fDirDirectives source))
$$ nest 5 (printMaybe
printRequireCons
(fRequireCons source))
$$ case (noMaybe (fMatch source)) of
True -> text "</FilesMatch>"
False -> text "</Files>"
--}}}
--{{{ Function that prints a list of Location context
printLocations :: [Location] -> Doc
printLocations [] = empty
printLocations (x:xs) = printLocation x $$ printLocations xs
--}}}
--{{{ Function that prints a Location context
printLocation :: Location -> Doc
printLocation source = text "<Location" <>
case (noMaybe (lMatch source)) of
True -> (text "Match " <> text (noMaybe (lPath source)) <> text ">")
False -> (text " " <> text (noMaybe (lPath source)) <> text ">")
$$ nest 5 (printMaybe
printDirDirectives
(lDirDirectives source))
$$ nest 5 (printMaybe
printRequireCons
(lRequireCons source))
$$ nest 5 (printMaybe
(printSimpleInstruction "Options")
(lOptions source))
$$ text "</Location>"
--}}}
--{{{ Function that prints a list of VirtualHost context
printVirtualHosts :: [VirtualHost] -> Doc
printVirtualHosts [] = empty
printVirtualHosts (x:xs) = printVirtualHost x $$ printVirtualHosts xs
--}}}
--{{{ Function that prints a VirtualHost context
printVirtualHost :: VirtualHost -> Doc
printVirtualHost source = text "<VirtualHost "
<> hsep (map text (noMaybe (sVirtualHostAddress source))) <> text ">"
$$ nest 5 (printMaybe
(printSimpleInstruction "AcceptPathInfo")
(sAcceptPathInfo source))
$$ nest 5 (printMaybe
(printSimpleInstruction "AccessFileName")
(sAccessFileName source))
$$ nest 5 (printMaybe
(printSimpleInstruction "AddDefaultCharset")
(sAddDefaultCharset source))
$$ nest 5 (printMaybe
(printSimpleInstruction "AllowEncodedSlashes")
(sAllowEncodedSlashes source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ContentDigest")
(sContentDigest source))
$$ nest 5 (printMaybe
(printListInstruction "Define")
(sDefine source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DocumentRoot")
(sDocumentRoot source))
$$ nest 5 (printMaybe
(printSimpleInstruction "EnableMMAP")
(sEnableMMAP source))
$$ nest 5 (printMaybe
(printSimpleInstruction "EnableSendfile")
(sEnableSendfile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "Error")
(sError source))
$$ nest 5 (printMaybe
(printListInstruction "ErrorDocument")
(sErrorDocument source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ErrorLog")
(sErrorLog source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ErrorLogFormat")
(sErrorLogFormat source))
$$ nest 5 (printMaybe
(printSimpleInstruction "FileETag")
(sFileETag source))
$$ nest 5 (printMaybe
(printSimpleInstruction "HostnameLookups")
(sHostnameLookups source))
$$ nest 5 (printMaybe
(printListInstruction "Include")
(sInclude source))
$$ nest 5 (printMaybe
(printListInstruction "IncludeOptional")
(sIncludeOptional source))
$$ nest 5 (printMaybe
(printSimpleInstruction "KeepAlive")
(sKeepAlive source))
$$ nest 5 (printMaybe
(printSimpleInstruction "KeepAliveTimeout")
(sKeepAliveTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitInternalRecursion")
(sLimitInternalRecursion source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitRequestBody")
(sLimitRequestBody source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitRequestFields")
(sLimitRequestFields source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitRequestFieldSize")
(sLimitRequestFieldSize source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitRequestLine")
(sLimitRequestLine source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LimitXMLRequestBody")
(sLimitXMLRequestBody source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LogLevel")
(sLogLevel source))
$$ nest 5 (printMaybe
(printSimpleInstruction "MaxKeepAliveRequests")
(sMaxKeepAliveRequests source))
$$ nest 5 (printMaybe
(printSimpleInstruction "MaxRangesOverlaps")
(sMaxRangesOverlaps source))
$$ nest 5
(printMaybe (printSimpleInstruction "MaxRangesReversals")
(sMaxRangesReversals source))
$$ nest 5 (printMaybe
(printSimpleInstruction "MaxRanges")
(sMaxRanges source))
$$ nest 5
(printMaybe (printSimpleInstruction "Options")
(sOptions source))
$$ nest 5
(printMaybe (printSimpleInstruction "Protocol")
(sProtocol source))
$$ nest 5
(printMaybe (printSimpleInstruction "RLimitCPU")
(sRLimitCPU source))
$$ nest 5 (printMaybe
(printSimpleInstruction "RLimitMEM")
(sRLimitMEM source))
$$ nest 5 (printMaybe
(printSimpleInstruction "RLimitNPROC")
(sRLimitNPROC source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ServerAdmin")
(sServerAdmin source))
-- ServerName and ServerAlias instructions are represented in the same list.
-- ServerName is in the first position of the list and ServerAlias are after.
-- The pretty print has to take care of that.
$$ nest 5 (if ((sServerName source) /= Nothing) then
printSimpleInstruction "ServerName" (head (noMaybe
(sServerName source))) else
empty)
$$ nest 5 (if ((sServerName source) /= Nothing) then
printListInstruction "ServerAlias" (tail (noMaybe
(sServerName source))) else
empty)
$$ nest 5 (printMaybe
(printSimpleInstruction "ServerPath")
(sServerPath source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ServerSignature")
(sServerSignature source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SetHandler")
(sSetHandler source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SetInputFilter")
(sSetInputFilter source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SetOutputFilter")
(sSetOutputFilter source))
$$ nest 5 (printMaybe
(printSimpleInstruction "TimeOut")
(sTimeOut source))
$$ nest 5 (printMaybe
(printSimpleInstruction "TraceEnable")
(sTraceEnable source))
$$ nest 5 (printMaybe
(printSimpleInstruction "UseCanonicalName")
(sUseCanonicalName source))
$$ nest 5 (printMaybe
(printSimpleInstruction "UseCanonicalPhysicalPort")
(sUseCanonicalPhysicalPort source))
$$ nest 5 (printMaybe
(printListInstruction "Alias")
(sAlias source))
$$ nest 5 (printMaybe
(printListInstruction "AliasMatch")
(sAliasMatch source))
$$ nest 5 (printMaybe
(printListInstruction "Redirect")
(sRedirect source))
$$ nest 5 (printMaybe
(printListInstruction "RedirectMatch")
(sRedirectMatch source))
$$ nest 5 (printMaybe
(printListInstruction "RedirectPermanent")
(sRedirectPermanent source))
$$ nest 5 (printMaybe
(printListInstruction "RedirectTemp")
(sRedirectTemp source))
$$ nest 5 (printMaybe
(printListInstruction "ScriptAlias")
(sScriptAlias source))
$$ nest 5 (printMaybe
(printListInstruction "ScriptAliasMatch")
(sScriptAliasMatch source))
$$ nest 5 (printMaybe
(printListInstruction "AddAlt")
(sAddAlt source))
$$ nest 5 (printMaybe
(printListInstruction "AddAltByEncoding")
(sAddAltByEncoding source))
$$ nest 5 (printMaybe
(printListInstruction "AddAltByType")
(sAddAltByType source))
$$ nest 5 (printMaybe
(printListInstruction "AddDescription")
(sAddDescription source))
$$ nest 5 (printMaybe
(printListInstruction "AddIcon")
(sAddIcon source))
$$ nest 5 (printMaybe
(printListInstruction "AddIconByEncoding")
(sAddIconByEncoding source))
$$ nest 5 (printMaybe
(printListInstruction "AddIconByType")
(sAddIconByType source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DefaultIcon")
(sDefaultIcon source))
$$ nest 5 (printMaybe
(printSimpleInstruction "HeaderName")
(sHeaderName source))
$$ nest 5 (printMaybe
(printSimpleInstruction "IndexHeadInsert")
(sIndexHeadInsert source))
$$ nest 5 (printMaybe
(printListInstruction "IndexIgnore")
(sIndexIgnore source))
$$ nest 5 (printMaybe
(printSimpleInstruction "IndexIgnoreReset")
(sIndexIgnoreReset source))
$$ nest 5 (printMaybe
(printListInstruction "IndexOptions")
(sIndexOptions source))
$$ nest 5 (printMaybe
(printSimpleInstruction "IndexOrderDefault")
(sIndexOrderDefault source))
$$ nest 5 (printMaybe
(printSimpleInstruction "IndexStyleSheet")
(sIndexStyleSheet source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ReadmeName")
(sReadmeName source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ScriptLog")
(sScriptLog source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ScriptLogBuffer")
(sScriptLogBuffer source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ScriptLogLength")
(sScriptLogLength source))
$$ nest 5 (printMaybe
(printSimpleInstruction "CGIDScriptTimeout")
(sCGIDScriptTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DirectoryCheckHandler")
(sDirectoryCheckHandler source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DirectoryIndex")
(sDirectoryIndex source))
$$ nest 5 (printMaybe
(printSimpleInstruction "IndexRedirect")
(sIndexRedirect source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DirectorySlash")
(sDirectorySlash source))
$$ nest 5 (printMaybe
(printSimpleInstruction "FallbackResource")
(sFallbackResource source))
$$ nest 5 (printMaybe
(printSimpleInstruction "PassEnv")
(sPassEnv source))
$$ nest 5 (printMaybe
(printListInstruction "SetEnv")
(sSetEnv source))
$$ nest 5 (printMaybe
(printListInstruction "UnsetEnv")
(sUnsetEnv source))
$$ nest 5 (printMaybe
(printListInstruction "AddOutputFilterByType")
(sAddOutputFilterByType source))
$$ nest 5 (printMaybe
(printListInstruction "FilterChain")
(sFilterChain source))
$$ nest 5 (printMaybe
(printListInstruction "FilterDeclare")
(sFilterDeclare source))
$$ nest 5 (printMaybe
(printListInstruction "FilterProtocol")
(sFilterProtocol source))
$$ nest 5 (printMaybe
(printListInstruction "FilterProvider")
(sFilterProvider source))
$$ nest 5 (printMaybe
(printListInstruction "FilterTrace")
(sFilterTrace source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ImapBase")
(sImapBase source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ImapDefault")
(sImapDefault source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ImapMenu")
(sImapMenu source))
$$ nest 5 (printMaybe
(printListInstruction "CustomLog")
(sCustomLog source))
$$ nest 5 (printMaybe
(printListInstruction "LogFormat")
(sLogFormat source))
$$ nest 5 (printMaybe
(printListInstruction "TransferLog")
(sTransferLog source))
$$ nest 5 (printMaybe
(printListInstruction "AddCharset")
(sAddCharset source))
$$ nest 5 (printMaybe
(printListInstruction "AddEncoding")
(sAddEncoding source))
$$ nest 5 (printMaybe
(printListInstruction "AddHandler")
(sAddHandler source))
$$ nest 5 (printMaybe
(printListInstruction "AddInputFilter")
(sAddInputFilter source))
$$ nest 5 (printMaybe
(printListInstruction "AddLanguage")
(sAddLanguage source))
$$ nest 5 (printMaybe
(printListInstruction "AddOutputFilter")
(sAddOutputFilter source))
$$ nest 5 (printMaybe
(printListInstruction "AddType")
(sAddType source))
$$ nest 5 (printMaybe
(printSimpleInstruction "DefaultLanguage")
(sDefaultLanguage source))
$$ nest 5 (printMaybe
(printSimpleInstruction "MultiviewsMatch")
(sMultiviewsMatch source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveCharset")
(sRemoveCharset source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveEncoding")
(sRemoveEncoding source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveHandler")
(sRemoveHandler source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveInputFilter")
(sRemoveInputFilter source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveLanguage")
(sRemoveLanguage source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveOutputFilter")
(sRemoveOutputFilter source))
$$ nest 5 (printMaybe
(printListInstruction "RemoveType")
(sRemoveType source))
$$ nest 5 (printMaybe
(printSimpleInstruction "CacheNegotiatedDocs")
(sCacheNegotiatedDocs source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ForceLanguagePriority")
(sForceLanguagePriority source))
$$ nest 5 (printMaybe
(printSimpleInstruction "LanguagePriority")
(sLanguagePriority source))
$$ nest 5 (printMaybe
(printSimpleInstruction "ReflectorHeader")
(sReflectorHeader source))
$$ nest 5 (printMaybe
(printListInstruction "BrowserMatch")
(sBrowserMatch source))
$$ nest 5 (printMaybe
(printListInstruction "BrowserMatchNoCase")
(sBrowserMatchNoCase source))
$$ nest 5 (printMaybe
(printListInstruction "SetEnvIf")
(sSetEnvIf source))
$$ nest 5 (printMaybe
(printListInstruction "SetEnvIfExpr")
(sSetEnvIfExpr source))
$$ nest 5 (printMaybe
(printListInstruction "SetEnvIfNoCase")
(sSetEnvIfNoCase source))
$$ nest 5 (printMaybe
(printSimpleInstruction "UserDir")
(sUserDir source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCACertificateFile")
(sSSLCACertificateFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCACertificatePath")
(sSSLCACertificatePath source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCADNRequestFile")
(sSSLCADNRequestFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCADNRequestPath")
(sSSLCADNRequestPath source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCARevocationCheck")
(sSSLCARevocationCheck source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCARevocationFile")
(sSSLCARevocationFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCARevocationPath")
(sSSLCARevocationPath source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCertificateChainFile")
(sSSLCertificateChainFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCertificateFile")
(sSSLCertificateFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCertificateKeyFile")
(sSSLCertificateKeyFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCipherSuite")
(sSSLCipherSuite source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLCompression")
(sSSLCompression source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLEngine")
(sSSLEngine source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLHonorCipherOrder")
(sSSLHonorCipherOrder source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLInsecureRenegotiation")
(sSSLInsecureRenegotiation source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPDefaultResponder")
(sSSLOCSPDefaultResponder source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPEnable")
(sSSLOCSPEnable source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPOverrideResponder")
(sSSLOCSPOverrideResponder source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPResponderTImeout")
(sSSLOCSPResponderTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPResponseMaxAge")
(sSSLOCSPResponseMaxAge source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPResponseTimeSkew")
(sSSLOCSPResponseTimeSkew source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOCSPUserRequestNonce")
(sSSLOCSPUseRequestNonce source))
$$ nest 5 (printMaybe
(printListInstruction "SSLOpenSSLConfCmd")
(sSSLOpenSSLConfCmd source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLOptions")
(sSSLOptions source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLProtocol")
(sSSLProtocol source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLSessionCacheTimeout")
(sSSLSessionCacheTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLSessionTicketKeyFile")
(sSSLSessionTicketKeyFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLSessionTickets")
(sSSLSessionTickets source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLSRPUnknownUserSeed")
(sSSLSRPUnknownUserSeed source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLSRPVerifierFile")
(sSSLSRPVerifierFile source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingErrorCacheTimeout")
(sSSLStaplingErrorCacheTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingFakeTryLater")
(sSSLStaplingFakeTryLater source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingForceURL")
(sSSLStaplingForceURL source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingResponderTimeout")
(sSSLStaplingResponderTimeout source))
$$ nest 5
(printMaybe (printSimpleInstruction "SSLStaplingResponseMaxAge")
(sSSLStaplingResponseMaxAge source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingResponseTimeSkew")
(sSSLStaplingResponseTimeSkew source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingReturnResponderErrors")
(sSSLStaplingReturnResponderErrors source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStaplingStandardCacheTimeout")
(sSSLStaplingStandardCacheTimeout source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLStrictSNIVHostCheck")
(sSSLStrictSNIVHostCheck source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLUseStapling")
(sSSLUseStapling source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLVerifyClient")
(sSSLVerifyClient source))
$$ nest 5 (printMaybe
(printSimpleInstruction "SSLVerifyDepth")
(sSSLVerifyDepth source))
$$ nest 5 (printMaybe
printDirectories
(sDirectory source))
$$ nest 5 (printMaybe
printLocations
(sLocation source))
$$ text "</VirtualHost>"
--}}}
--{{{ Functionn that prints a simple instruction
printSimpleInstruction :: String -> String -> Doc
printSimpleInstruction instruction value = text instruction <+> text value
--}}}
--{{{ Function that prints a list of the same instructions from a list of values
printListInstruction :: String -> [String] -> Doc
printListInstruction instruction [] = empty
printListInstruction instruction (x:xs) = printSimpleInstruction instruction x
$$ printListInstruction instruction xs
--}}}
--{{{ Functions that prints a Apache source
printApacheConf :: IO ()
printApacheConf = parseTreeApache "apache.conf" >>= \(Right tree) -> print
(printApache (createSourceApache tree))
--}}}
| prl-tokyo/bigul-configuration-adaptation | Transformations/Parsers/Apache_Parser/ApachePrettyPrinter.hs | mit | 45,708 | 2,351 | 200 | 5,374 | 14,108 | 7,022 | 7,086 | 1,519 | 3 |
-- |
module Workflow.Reexports
( MonadThrow
, MonadFree
) where
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Free (MonadFree)
| sboosali/workflow-types | sources/Workflow/Reexports.hs | mit | 146 | 0 | 5 | 20 | 37 | 24 | 13 | 5 | 0 |
module Wf.Application.Time
( Time
, formatTime
, diffTime
, addSeconds
, getCurrentTime
, mjd
) where
import qualified Data.Time.Clock as T (UTCTime(..), diffUTCTime, addUTCTime, getCurrentTime)
import qualified Data.Time.Calendar as T (Day(..))
import qualified Data.Time.Format as T (formatTime)
import System.Locale (defaultTimeLocale)
import qualified Data.Binary as Bin (Binary(..))
type Time = T.UTCTime
formatTime :: String -> Time -> String
formatTime = T.formatTime defaultTimeLocale
diffTime :: Time -> Time -> Integer
diffTime a b = floor $ T.diffUTCTime a b
addSeconds :: Time -> Integer -> Time
addSeconds t s = T.addUTCTime (fromInteger s) t
getCurrentTime :: IO Time
getCurrentTime = T.getCurrentTime
mjd :: Time
mjd = T.UTCTime (T.ModifiedJulianDay 0) 0
instance Bin.Binary T.UTCTime where
put (T.UTCTime (T.ModifiedJulianDay day) time) = do
Bin.put day
Bin.put (round time :: Integer)
get = do
day <- Bin.get
time <- Bin.get
return $ T.UTCTime (T.ModifiedJulianDay day) (fromInteger time)
| bigsleep/Wf | src/Wf/Application/Time.hs | mit | 1,062 | 0 | 12 | 198 | 368 | 205 | 163 | 31 | 1 |
{-
Handles all the functionality surrounding the commenting ability. This
covers things like everything to do with reading and writing comments and
provides lots of helper functions.
Author(s): Lewis Deane
License: MIT
Last Modified: 20/7/2016
-}
module CommentTools (setComment, appendComment, updateComment, deleteComment, currentComment, genBlock) where
-- Imports for things we will need.
import Control.Applicative
import Data.List (isPrefixOf, sortBy, elemIndex)
import Data.List.Split (splitOn)
import Data.String.Utils (replace)
import Data.List (sort)
import Data.Time.Calendar
import Data.Time.Clock
import System.Directory
import System.IO
import qualified Config as C
import qualified FieldTools as F
import LangTools
-- Useful type synonyms.
type FileName = String
type Comment = String
type Params = [String]
type Line = String
type Lines = [Line]
-- Writes a comment to filename with the associated params.
write :: FileName -> Comment -> Lines -> Params -> IO ()
write fname com content params = do
let lang = getLang fname
content' = removeCommentBlock lang content
f <- F.getFields params
block <- generateCommentBlock lang com f
writeToFile fname $ block ++ content'
-- Opens a temp file, writes content to it. It does this all safely.
writeToFile :: FileName -> Lines -> IO ()
writeToFile fname content = do
(tempName, tempHandle) <- openTempFile "." "temp"
hPutStr tempHandle $ unlines content
hClose tempHandle
removeFile fname
renameFile tempName fname
-- Sets a comment to a file and applies the params if needed.
setComment :: FileName -> Comment -> Params -> IO ()
setComment fname com params = do
content <- lines <$> readFile fname
write fname com content params
-- Appends a comment to a file with the appropriate parameters.
appendComment :: FileName -> Comment -> Params -> IO ()
appendComment fname com params = do
content <- lines <$> readFile fname
let lang = getLang fname
oldCom = getComment lang content
write fname (oldCom ++ " " ++ com) content params
-- Updates the comment block if there is any new information.
updateComment :: FileName -> Params -> IO ()
updateComment fname params = do
content <- lines <$> readFile fname
let lang = getLang fname
oldCom = getComment lang content
write fname oldCom content params
-- Deletes a comment block from a file.
deleteComment :: FileName -> IO ()
deleteComment fname = do
let lang = getLang fname
content <- lines <$> readFile fname
writeToFile fname $ removeCommentBlock lang content
-- Retrieves the current comment from a file.
currentComment :: FileName -> IO ()
currentComment fname = do
let lang = getLang fname
content <- lines <$> readFile fname
(putStrLn . unlines) $ getCommentBlock lang content
-- Removes the current comment section.
removeCommentBlock :: Lang -> Lines -> Lines
removeCommentBlock lang content = dropWhile (isInCommentBlock lang) content
-- Gets the comment block if it exists.
getCommentBlock :: Lang -> Lines -> Lines
getCommentBlock lang content = if (not . hasCommentBlock lang) content then error "No comment block found. Run 'comet' for a list of legal commands." else takeWhile (isInCommentBlock lang) content
-- Checks to see if the given line begin with a comment lies within a comment block.
isInCommentBlock :: Lang -> Line -> Bool
isInCommentBlock lang line = any (`isPrefixOf` line) [getBlockStart lang, getCommentChar lang, getBlockEnd lang]
-- Gets the comment from a bunch of lines making up the file.
getComment :: Lang -> Lines -> Comment
getComment lang content = if (not . hasCommentBlock lang) content then error "No comment found. Run 'comet' for a list of legal commands." else (unlines . map (strip lang) . takeWhile (\x -> (trim . strip lang) x /= "" && trim x /= getBlockEnd lang)) $ tail content
hasCommentBlock :: Lang -> Lines -> Bool
hasCommentBlock l c = head c == getBlockStart l
-- Adds a comment character to the start of a string needing commenting.
comment :: Lang -> Line -> Line
comment lang line = getCommentChar lang ++ line
-- Generates the comment block to be added to the header of the file.
generateCommentBlock :: Lang -> Comment -> [(String, String)] -> IO Lines
generateCommentBlock lang com fields = do
splitLines <- splitInput lang com
return $ [getBlockStart lang] ++ map (comment lang) splitLines ++ map (comment lang) (genBlock ":" 1 fields) ++ [getBlockEnd lang]
genBlock :: String -> Int -> [(String, String)] -> Lines
genBlock c n pairs = [""] ++ map (\a -> fst a ++ c ++ rep " " (s a) ++ snd a) pairs
where mlen = foldl max 0 len
len = map (length . fst) pairs
s y = n + mlen - ((length . fst) y)
-- Replicates a string an amount of times.
rep :: String -> Int -> String
rep s 0 = ""
rep s n = s ++ rep s (n - 1)
-- Takes a string and chops off any whitespace at either end.
trim :: String -> String
trim x = let f = reverse . dropWhile (\x -> x == ' ' || x == '\t') in (f . f) x
-- Removes the comment character preceding the line.
strip :: Lang -> Line -> Line
strip lang = drop ((length . getCommentChar) lang)
-- Takes a string and wraps the length according the the comment width setting.
splitInput :: Lang -> Comment -> IO Lines
splitInput l c = do
lim <- read <$> C.readValue "comment-width"
let lim' = lim - (length $ getCommentChar l)
w = words c
x = foldl f [] w
f [] x = [x]
f acc [] = acc
f acc x = if (length . last) acc + length x < lim' then init acc ++ [last acc ++ " " ++ x] else acc ++ [x]
return x
| lewisjdeane/Comet | CommentTools.hs | mit | 6,316 | 0 | 18 | 1,866 | 1,634 | 831 | 803 | 96 | 4 |
{-# OPTIONS_GHC -Wall #-}
-- This module uses cabal package 'sdl2',
-- instead of the legacy sdl1(Graphics.UI.SDL) bindings.
module Main where
import qualified Control.Concurrent as C
import qualified Control.Monad as M
import qualified Data.Vector.Storable.Mutable as V
import qualified Data.Set as S
import qualified SDL
import qualified SDL.Audio as A
import qualified Codec.Audio.Wave as W
import qualified System.IO as IO
audioCb :: A.AudioFormat f -> V.IOVector f -> IO ()
audioCb _ _ = print "foo"
micSpec :: A.OpenDeviceSpec
micSpec = A.OpenDeviceSpec {A.openDeviceFreq = A.Mandate 48000
,A.openDeviceFormat = A.Mandate A.Unsigned16BitNativeAudio
,A.openDeviceChannels = A.Mandate A.Mono
,A.openDeviceSamples = 4096
,A.openDeviceCallback = audioCb
,A.openDeviceUsage = A.ForCapture
,A.openDeviceName = Nothing}
type SS = S.Set W.SpeakerPosition
waveSpec :: W.Wave
waveSpec = W.Wave {W.waveFileFormat = W.WaveVanilla
, W.waveSampleRate = 48000
, W.waveSampleFormat = W.SampleFormatPcmInt 16
, W.waveChannelMask = S.singleton W.SpeakerFrontCenter
, W.waveDataOffset = 0
, W.waveDataSize = 0
, W.waveSamplesTotal = 0
, W.waveOtherChunks = []}
record :: IO.Handle -> IO ()
record h = do
SDL.initialize [SDL.InitAudio]
(dev, _) <- A.openAudioDevice micSpec
A.setAudioDevicePlaybackState dev A.Play
_ <- M.forever (C.threadDelay maxBound)
return ()
main :: IO ()
main = W.writeWaveFile "mic.rec" waveSpec record
| MiroslavVitkov/voiceid | src/Main2.hs | mit | 1,730 | 0 | 11 | 502 | 432 | 245 | 187 | 39 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.