code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Divan where
|
joom/Divan.hs
|
src/Divan.hs
|
mit
| 19
| 0
| 2
| 3
| 4
| 3
| 1
| 1
| 0
|
{-# LANGUAGE PatternSynonyms #-}
module Unison.Codebase.Editor.SlurpComponent where
import Unison.Prelude
import Data.Tuple (swap)
import Unison.Reference ( Reference )
import Unison.UnisonFile (TypecheckedUnisonFile)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Unison.DataDeclaration as DD
import qualified Unison.Term as Term
import qualified Unison.UnisonFile as UF
data SlurpComponent v =
SlurpComponent { types :: Set v, terms :: Set v }
deriving (Eq,Ord,Show)
isEmpty :: SlurpComponent v -> Bool
isEmpty sc = Set.null (types sc) && Set.null (terms sc)
empty :: Ord v => SlurpComponent v
empty = SlurpComponent mempty mempty
difference :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v
difference c1 c2 = SlurpComponent types' terms' where
types' = types c1 `Set.difference` types c2
terms' = terms c1 `Set.difference` terms c2
intersection :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v
intersection c1 c2 = SlurpComponent types' terms' where
types' = types c1 `Set.intersection` types c2
terms' = terms c1 `Set.intersection` terms c2
instance Ord v => Semigroup (SlurpComponent v) where (<>) = mappend
instance Ord v => Monoid (SlurpComponent v) where
mempty = SlurpComponent mempty mempty
c1 `mappend` c2 = SlurpComponent (types c1 <> types c2)
(terms c1 <> terms c2)
-- I'm calling this `closeWithDependencies` because it doesn't just compute
-- the dependencies of the inputs, it mixes them together. Make sure this
-- is what you want.
closeWithDependencies :: forall v a. Ord v
=> TypecheckedUnisonFile v a -> SlurpComponent v -> SlurpComponent v
closeWithDependencies uf inputs = seenDefns where
seenDefns = foldl' termDeps (SlurpComponent mempty seenTypes) (terms inputs)
seenTypes = foldl' typeDeps mempty (types inputs)
termDeps :: SlurpComponent v -> v -> SlurpComponent v
termDeps seen v | Set.member v (terms seen) = seen
termDeps seen v = fromMaybe seen $ do
term <- findTerm v
let -- get the `v`s for the transitive dependency types
-- (the ones for terms are just the `freeVars below`)
-- although this isn't how you'd do it for a term that's already in codebase
tdeps :: [v]
tdeps = resolveTypes $ Term.dependencies term
seenTypes :: Set v
seenTypes = foldl' typeDeps (types seen) tdeps
seenTerms = Set.insert v (terms seen)
pure $ foldl' termDeps (seen { types = seenTypes
, terms = seenTerms})
(Term.freeVars term)
typeDeps :: Set v -> v -> Set v
typeDeps seen v | Set.member v seen = seen
typeDeps seen v = fromMaybe seen $ do
dd <- fmap snd (Map.lookup v (UF.dataDeclarations' uf)) <|>
fmap (DD.toDataDecl . snd) (Map.lookup v (UF.effectDeclarations' uf))
pure $ foldl' typeDeps (Set.insert v seen) (resolveTypes $ DD.dependencies dd)
resolveTypes :: Set Reference -> [v]
resolveTypes rs = [ v | r <- Set.toList rs, Just v <- [Map.lookup r typeNames]]
findTerm :: v -> Maybe (Term.Term v a)
findTerm v = Map.lookup v allTerms
allTerms = UF.allTerms uf
typeNames :: Map Reference v
typeNames = invert (fst <$> UF.dataDeclarations' uf) <> invert (fst <$> UF.effectDeclarations' uf)
invert :: forall k v . Ord k => Ord v => Map k v -> Map v k
invert m = Map.fromList (swap <$> Map.toList m)
|
unisonweb/platform
|
parser-typechecker/src/Unison/Codebase/Editor/SlurpComponent.hs
|
mit
| 3,447
| 0
| 17
| 764
| 1,133
| 575
| 558
| -1
| -1
|
{-# LANGUAGE QuasiQuotes, OverloadedStrings #-}
module Data.Aeson.TransformSpec (main, spec) where
import Test.Hspec
import Data.Aeson.Transform
import Data.HashMap.Strict (fromList)
import Data.Aeson.QQ
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "transform" $ do
context "at" $
it "moves builder deeper" $ do
let o = [aesonQQ| { foo: { bar: { baz: 1 }}} |]
(at "foo" $ at "bar" id) o `shouldBe` [aesonQQ| { baz: 1 } |]
context "atIndex" $
it "moves builder deeper" $ do
let o = [aesonQQ| [ [ 1, 2 ] ] |]
(atIndex 0 $ atIndex 1 id) o `shouldBe` [aesonQQ| 2 |]
context "index" $
it "moves builder into array" $ do
let arr = [aesonQQ| [{a:1}, {a:2}, {a:7}] |]
index 1 arr `shouldBe` [aesonQQ| {a:2} |]
context "attr" $
it "extracts a value" $ do
let o = [aesonQQ| {a:1, b:2} |]
attr "a" o `shouldBe` [aesonQQ| 1 |]
context "keep" $ do
it "filters objects by keys" $ do
let o = [aesonQQ| {a:1, b:2, c:3} |]
keep ["a", "b"] o `shouldBe` [aesonQQ| {a:1, b:2} |]
it "ignores undiscovered keys" $ do
let o = [aesonQQ| {a:1, b:2, c:3} |]
keep ["a", "b", "z"] o `shouldBe` [aesonQQ| {a:1, b:2} |]
context "map" $
it "replaces an array" $ do
let arr = [aesonQQ| [{a:1}, {a:2}, {a:7}] |]
Data.Aeson.Transform.map (attr "a") arr `shouldBe`
[aesonQQ| [1,2,7] |]
context "obj" $
it "builds an object with specified keys" $ do
let o = [aesonQQ| {a:1} |]
result = obj (fromList [("foo", attr "a") , ("bar", attr "a")]) o
result `shouldBe` [aesonQQ| {foo: 1, bar: 1} |]
context "merge" $
it "combines objects with previous value at a key winning against later value" $ do
let o = [aesonQQ| { foo: {a:1, b:2}, bar: {b:3, c:4} } |]
merge (attr "foo") (attr "bar") o `shouldBe`
[aesonQQ| { a:1, b:2, c:4 } |]
|
begriffs/aeson-t
|
test/Data/Aeson/TransformSpec.hs
|
mit
| 1,978
| 0
| 19
| 582
| 607
| 334
| 273
| 49
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module DB.Database
(
connectToDB,
close,
)
where
import Control.Monad
import Data.Word
import Database.PostgreSQL.Simple
import System.Environment
dbName :: IO String
dbName = getEnv "DBNAME"
dbHost :: IO String
dbHost = getEnv "DBHOST"
dbPass :: IO String
dbPass = getEnv "DBPASS"
dbUser :: IO String
dbUser = getEnv "DBUSER"
dbPort :: IO Word16
dbPort = do
port <- getEnv "DBPORT"
return $ fromIntegral (read port :: Int)
connection :: IO ConnectInfo
connection = do
host <- dbHost
port <- dbPort
user <- dbUser
pass <- dbPass
name <- dbName
return ConnectInfo {
connectHost = host,
connectPort = port,
connectUser = user,
connectPassword = pass,
connectDatabase = name
}
connectToDB :: IO Connection
connectToDB = connection >>= connect
|
caneroj1/HaskellDocs
|
DB/Database.hs
|
mit
| 878
| 0
| 10
| 222
| 246
| 130
| 116
| 36
| 1
|
module Model where
import Data.List (partition, sort, group)
import GHC.Generics (Generic)
type Coord = (Int, Int)
boardSize :: Int
boardSize = 13
visionRange :: Double
visionRange = 1.5
validCoord :: Coord -> Bool
validCoord (x, y)
= inRange x && inRange y where
inRange c = c >= 0 && c < boardSize
isCorner :: Coord -> Bool
isCorner (x, y)
= x == 0 && y == 0
|| x == 0 && y == boardSize
|| x == boardSize && y == 0
|| x == boardSize && y == boardSize
isBorder :: Coord -> Bool
isBorder (x, y)
= (x == 0 || y == 0 || x == boardSize || y == boardSize)
&& not (isCorner (x, y))
isCenter :: Coord -> Bool
isCenter (x, y)
= p x && p y where
p c = c >= 1 && c < boardSize - 1
distance :: Coord -> Coord -> Double
distance a b
= sqrt (sq (x1 - x2) + sq (y1 - y2)) where
(x1, y1, x2, y2)
= ( fromIntegral (fst a)
, fromIntegral (snd a)
, fromIntegral (fst b)
, fromIntegral (snd b)
)
sq x = x * x
type Board = [PieceGroup]
data PieceGroup
= PieceGroup
{ members :: [Coord]
, groupSide :: Side
}
data Side = Black | White
deriving (Eq, Show, Generic)
emptyBoard :: Board
emptyBoard = []
sideFromInt :: Int -> Side
sideFromInt 0 = Black
sideFromInt 1 = White
sideFromInt _ = undefined
isAdjacent :: Coord -> Coord -> Bool
isAdjacent (x1, y1) (x2, y2)
= sq (x1 - x2) + sq (y1 -y2) == 1 where
sq x = x * x
isAdjToGroup :: Coord -> PieceGroup -> Bool
isAdjToGroup c pg
= any (isAdjacent c) (members pg)
isVisibleToGroup:: Coord -> PieceGroup -> Bool
isVisibleToGroup c pg
= any ((<= visionRange) . distance c) (members pg)
isVisibleToSide :: Board -> Side -> Coord -> Bool
isVisibleToSide pgs mySide coord
= any
(isVisibleToGroup coord)
(filter ((== mySide) . groupSide) pgs)
adjacents :: Coord -> [Coord]
adjacents (x, y)
= up ++ down ++ left ++ right where
up = if y < boardSize - 1 then [(x, y + 1)] else []
down = if y > 0 then [(x, y - 1)] else []
left = if x > 1 then [(x - 1, y)] else []
right = if x < boardSize - 1 then [(x + 1, y)] else []
isOccupied :: Board -> Coord -> Bool
isOccupied board coord
= any (elem coord . members) board
groupLives :: Board -> PieceGroup -> Int
groupLives board (PieceGroup mems side)
= length . group . sort
. filter (not . isOccupied board)
. concatMap adjacents
$ mems
addPiece :: Board -> Side -> Coord -> Maybe Board
addPiece board side coord
= if isOccupied board coord
then
(if not $ isVisibleToSide board side coord
then Just board
else Nothing)
else
if groupLives (newGroup : newRest) newGroup == 0
then Nothing
else Just (newGroup : newRest) where
(adjs, rest)
= partition
(both (isAdjToGroup coord) ((== side) . groupSide))
board
newGroup
= if null adjs
then PieceGroup [coord] side
else PieceGroup ([coord] ++ concatMap members adjs) side
newRest = filter ((/= 0) . groupLives (newGroup : rest)) rest
visiblePieces :: Board -> Side -> [(Coord, Side)]
visiblePieces pgs mySide
= concatMap f pgs where
f (PieceGroup pieces side)
= map
(\c -> (c, side))
(if side == mySide
then pieces
else filter (isVisibleToSide pgs mySide) pieces)
flipSide :: Side -> Side
flipSide Black = White
flipSide White = Black
both :: (a -> Bool) -> (a -> Bool) -> a -> Bool
both p q = (&&) <$> p <*> q
|
nicball/playground
|
visgo/src/Model.hs
|
mit
| 3,621
| 0
| 19
| 1,118
| 1,474
| 792
| 682
| 111
| 5
|
{-# LANGUAGE OverloadedStrings #-}
module DarkSky.App.Config
( Config(..)
) where
import DarkSky.Types
import Data.Aeson
data Config = Config
{ key :: Maybe String
, coordinate :: Maybe Coordinate
} deriving (Eq, Show)
instance Monoid Config where
mempty =
Config
{ key = Nothing
, coordinate = Nothing
}
mappend (Config key1 coord1) (Config key2 coord2) =
Config
{ key = key2 `orElse` key1
, coordinate = coord2 `orElse` coord1
}
orElse :: Maybe a -> Maybe a -> Maybe a
orElse (Just a) _ = Just a
orElse _ x = x
instance FromJSON Config where
parseJSON =
withObject "config" $
\o -> do
key' <- o .:? "key"
latitude' <- o .:? "latitude"
longitude' <- o .:? "longitude"
return
Config
{ key = key'
, coordinate = Coordinate <$> latitude' <*> longitude'
}
|
peterstuart/dark-sky
|
src/DarkSky/App/Config.hs
|
mit
| 871
| 0
| 14
| 252
| 281
| 152
| 129
| 32
| 1
|
inc :: Int -> Int
inc n = n + 1
{-
strict evaluation:
inc (2*3)
inc 6
6+1
7
lazy evaluation:
inc (2*3)
(2*3)+1
6+1
7
-}
mult = \x -> \y -> x * y
inf = 1 + inf
{-
Strict:
fst (0, inf)
fst (0, 1+inf)
fst (0, 1+(1+inf))
fst (0, 1+(1+(1+inf)))
... (never terminates)
Lazy:
fst (0, inf)
0
-}
ones = 1:ones
-- head ones
-- head (tail ones)
-- take 3 ones
-- WARNING!!!
-- filter (<=5) [1..] --> Never returns
-- takeWhile (<=5) [1..] --> [1,2,3,4,5]
prime = sieve [2..]
sieve (p:xs) = p : sieve (filter (\x -> x `mod` p /= 0) xs)
--[ x | x <- xs, x `mod` p /= 0 ]
-- Lazy (the accumulator variable v doesn't work as intended because of lazy evaluation)
sumWith :: Int -> [Int] -> Int
sumWith v [] = v
sumWith v (x:xs) = sumWith (v+x) xs
-- sumWith 0 [1,2,3]
-- (((0+1)+2)+3)
-- Strict (force the evaluation of (v+x) with $! before it)
sumWith' :: Int -> [Int] -> Int
sumWith' v [] = v
sumWith' v (x:xs) = (sumWith' $! (v+x)) xs
|
feliposz/learning-stuff
|
haskell/c9lectures-ch12.hs
|
mit
| 938
| 0
| 12
| 211
| 250
| 140
| 110
| 13
| 1
|
module Raster.Palette (module Raster.Palette, black, white) where
import Raster.Color
blue, green, cyan, purple, brown, red, magenta, orange, yellow
:: RealColor a => a
blue = fromRGB 0 0 0xFF
green = fromRGB 0 0xFF 0
cyan = fromRGB 0 0xFF 0xFF
purple = fromRGB 0x80 0 0x80
brown = fromRGB 0x99 0x66 0x33
red = fromRGB 0xFF 0 0
magenta = fromRGB 0xFF 0 0xFF
orange = fromRGB 0xFF 0x80 0
yellow = fromRGB 0xFF 0xFF 0
grey :: GreyColor a => a
grey = fromGrey 0xCC
|
jwodder/hsgraphics
|
Raster/Palette.hs
|
mit
| 529
| 0
| 6
| 154
| 181
| 102
| 79
| 15
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{- |
Module : Database.Couch.Response
Description : Utilities for extracting specific types from Database.Couch JSON values
Copyright : Copyright (c) 2015, Michael Alan Dorman
License : MIT
Maintainer : mdorman@jaunder.io
Stability : experimental
Portability : POSIX
Calls to CouchDB can return values that have well-defined structure beyond their simple JSON content, but that don't necessarily warrant full-blown data types with 'FromJSON' instances and the like. We want to provide convenient conversions when that is the case.
> result <- Database.compact cxt `ap` asBool
> if result
> then ...
> else ...
-}
module Database.Couch.Response where
import Control.Monad ((>>=))
import Data.Aeson (FromJSON, Value (Object), fromJSON)
import qualified Data.Aeson as Aeson (Result (Error, Success))
import Data.Bool (Bool)
import Data.Either (Either (Left, Right))
import Data.Function (($), (.))
import Data.Functor (fmap)
import Data.HashMap.Strict (lookup)
import Data.Maybe (Maybe (Just, Nothing), catMaybes, maybe)
import Data.String (fromString)
import Data.Text (Text, intercalate, splitAt)
import Data.Text.Encoding (encodeUtf8)
import Data.UUID (UUID, fromASCIIBytes)
import Database.Couch.Types (Error (NotFound, ParseFail), Result)
{- | Attempt to decode the value into anything with a FromJSON constraint.
This is really about translating 'Data.Aeson.Result' values into our 'Database.Couch.Types.Result' values. -}
asAnything :: FromJSON a => Result Value -> Result a
asAnything v =
case v of
Left x -> Left x
Right (a, b) -> case fromJSON a of
Aeson.Error e -> (Left . ParseFail . fromString) e
Aeson.Success s -> Right (s, b)
{- | Attempt to construct a 'Data.Bool.Bool' value.
This assumes the routine conforms to CouchDB's @{"ok": true}@ return convention. -}
asBool :: Result Value -> Result Bool
asBool = getKey "ok"
{- | Attempt to construct a list of 'Data.UUID.UUID' values.
CouchDB returns uuids as string values in a form that "Data.UUID" cannot consume directly, so we provide this standard conversion. -}
asUUID :: Result Value -> Result [UUID]
asUUID v =
case v of
Left x -> Left x
Right (Object o, b) -> maybe (Left (ParseFail "Couldn't convert to UUID type"))
(Right . (,b) . catMaybes . reformat) $ lookup "uuids" o
_ -> Left NotFound
where
reformat i =
case fromJSON i of
Aeson.Error _ -> []
Aeson.Success a -> fmap (fromASCIIBytes . encodeUtf8 . reformatUuid) a
reformatUuid s =
let (first, second') = splitAt 8 s
(second, third') = splitAt 4 second'
(third, fourth') = splitAt 4 third'
(fourth, fifth) = splitAt 4 fourth'
in intercalate "-" [first, second, third, fourth, fifth]
{- | Attempt to extract the value of a particular key. -}
getKey :: FromJSON a => Text -> Result Value -> Result a
getKey k v =
case v of
Left x -> Left x
Right (Object o, b) -> maybe (Left NotFound) (Right . (, b)) $ lookup k o >>= reformat
_ -> Left NotFound
where
reformat i =
case fromJSON i of
Aeson.Error _ -> Nothing
Aeson.Success a -> Just a
|
mdorman/couch-simple
|
src/lib/Database/Couch/Response.hs
|
mit
| 3,593
| 0
| 14
| 1,013
| 787
| 426
| 361
| 54
| 4
|
{-# htermination fmap :: (a -> b) -> (Maybe a -> Maybe b) #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_fmap_3.hs
|
mit
| 62
| 0
| 2
| 14
| 3
| 2
| 1
| 1
| 0
|
{- Alec Snyder
- lab 1
- Sokal text generator
- suck.hs
- See README.md for instructions and additional documentation
- Github at: https://github.com/allonsy/sokal
-}
module Suck (runSuck) where
import Prelude hiding (map,filter)
import Network.HTTP
import Text.HTML.TagSoup
import Control.Monad
import Text.StringLike hiding (empty)
import Data.Char
import Data.Map.Strict hiding (findIndex)
import Data.List hiding (insert)
type PrimitiveModel = Map (String,String) [String]
type ProcessedModel = [(String, [(Int,Int)])]
runSuck :: String -> IO ()
runSuck file= do
contents <- readFile file
putStrLn "Successfully read in url file"
let ls = lines contents
putStrLn "Connecting to servers and downloading content..."
responses <- mapM grabResp ls --grab the HTML
putStrLn "Parsing in DOM..."
let souped = newMap parseTags responses --grab the tagsoup of the HTML
putStrLn "Filtering DOM..."
let process = processWords $ filterSoup souped --filter soup for p tags, turn it into text, and parse into words
putStrLn "Generating Models"
let prim = generatePrim process empty --generate primitive model
let final = primToProcess prim --turn to processed model
putStrLn "Printing Model to File"
let addNewLine w = (show w) ++ "\n" --add a new line after every entry of the list is displayed
writeFile "sokal.model" (concatMap (addNewLine) final) --write to file
grabResp :: String -> IO String --takes in a url and returns the HTML at that page
grabResp str = do
makeRequest <- simpleHTTP (getRequest str)
getResponseBody makeRequest
filterSoup :: (StringLike str, Show str) => [[Tag str]] -> [[str]] --tages in the p tags (and the stuff in between) and take out any non text tags
filterSoup ls = newMap ((newMap fromTagText) . (newFilter isTagText) . (flip extractP False)) ls
extractP :: StringLike str => [Tag str] -> Bool -> [Tag str] --takes in a list of tags and returns a list of tags but only those in between the <p> and </p> tags
extractP [] _ = []
extractP (x:xs) True --if true, then we are inside a p tag
| isTagClose x && x ~== TagClose "p" = x : extractP xs False --closing the p tage so we append the final p and go to false
| otherwise = x : extractP xs True --inside a p tag
extractP (x:xs ) False --outside a p tag
| isTagOpen x && x ~== TagOpen "p" [] = x : extractP xs True --we open a p tag and therefore, go to true and append this tag to the list
| otherwise = extractP xs False --we ignore this tag and proceed
processWords :: [[String]] -> [String] --takes in a list of list of strings and formats it into a list of words, omitting any newline characters and unprintable characters (due to bad unicode -> ascii translating)
processWords ls = newMap ((newMap toLower) . (newFilter isPrint)) $ (words (concat (((newMap ((newFilter (/='\n')) . concat) ls))))) --I feel that this can be simplified into a foldr however I haven't the brainpower to get the compositions to type check yet
generatePrim :: [String] -> PrimitiveModel -> PrimitiveModel --takes in a list of words (from process words) and a start point primitive model and outputs a primitive model
generatePrim (x:y:[]) pm -- if these are the last two words
| member (x,y) pm = pm
| otherwise = insert (x,y) [] pm
generatePrim (x:y:z:xs) pm
| member (x,y) pm = generatePrim (y:z:xs) (adjust (++ [z]) (x,y) pm) --if we have seen these before, add what comes after to the list
| otherwise = generatePrim (y:z:xs) (insert (x,y) [z] pm) --otherwise, add this to the dictionary
getFreqs :: PrimitiveModel -> [((String,String), [(Int,String)])] --It takes in a primitive model and it looks at the list of words following this pair and creates a list of (occurrences of this word, word itself) tuples
getFreqs pm = freqs (toList pm) where --turn the map into a list
freqs [] = []
freqs ((strs,assocs):xs) = (strs,countOcc assocs (nub assocs)) : freqs xs --iterate over the nubbed list but count occurrences in the original list
countOcc _ [] =[]
countOcc orig (x:xs) = ((length (elemIndices x orig)),x) : countOcc orig xs --traverse the list and see how many we have of each item
interProcessed :: [((String,String), [(Int,String)])] -> [((String,String), [(Int,Int)])] --takes in the result from getFreqs and associates the words in the frequency list with the index of the corresponding pair in the soon to be built array
interProcessed ls = newMap grabIndex ls where
grabIndex ((a,b), corr) = ((a,b), changeCorr corr b)
keyList = newMap fst ls --grab the (x,y) pairs from ls and turn it into a list to be turned into a map (in the next line)
keyMap = fromList (zip keyList [0..]) --map of (x,y) word pairs to index
changeCorr [] _ = []
changeCorr ((num,word):xs) match = (num, keyMap ! (match,word)):changeCorr xs match --find the index in the dictionary
processModel :: [((String,String), [(Int,Int)])] -> ProcessedModel --takes in the interprocessed model and removes the first element of each (x,y) pair to make this a hidden second order markov model
processModel xs = newMap remFirst xs where
remFirst ((a,b), corr) = (b,corr)
primToProcess :: PrimitiveModel -> ProcessedModel --streamlines the primitive to processed model processing
primToProcess pm = processModel $ interProcessed $ getFreqs pm
newMap :: (a -> b) -> [a] -> [b] --included because map and filter conflicted with Data.Map's definition, identical to the prelude's definition
newMap _ [] = []
newMap f (x:xs) = f x : newMap f xs
newFilter :: (a -> Bool) -> [a] -> [a]
newFilter p [] = []
newFilter p (x:xs)
| p x = x : newFilter p xs
| otherwise = newFilter p xs
|
allonsy/sokal
|
Suck.hs
|
mit
| 5,673
| 0
| 18
| 1,089
| 1,571
| 838
| 733
| 77
| 3
|
module Test.Hspec.Discover.RunSpec (spec) where
import Helper
import Test.Mockery.Directory
import Test.Hspec.Discover.Run hiding (Spec)
import qualified Test.Hspec.Discover.Run as Run
spec :: Spec
spec = do
describe "run" $ around_ inTempDirectory $ do
it "generates a test driver" $ do
touch "test/FooSpec.hs"
touch "test/Foo/Bar/BazSpec.hs"
touch "test/Foo/BarSpec.hs"
run ["test/Spec.hs", "", "out"]
readFile "out" `shouldReturn` unlines [
"{-# LINE 1 \"test/Spec.hs\" #-}"
, "{-# LANGUAGE NoImplicitPrelude #-}"
, "{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}"
, "module Main where"
, "import qualified FooSpec"
, "import qualified Foo.BarSpec"
, "import qualified Foo.Bar.BazSpec"
, "import Test.Hspec.Discover"
, "main :: IO ()"
, "main = hspec spec"
, "spec :: Spec"
, "spec = " ++ unwords [
"describe \"Foo\" FooSpec.spec"
, ">> describe \"Foo.Bar\" Foo.BarSpec.spec"
, ">> describe \"Foo.Bar.Baz\" Foo.Bar.BazSpec.spec"
]
]
it "generates a test driver with hooks" $ do
touch "test/FooSpec.hs"
touch "test/Foo/Bar/BazSpec.hs"
touch "test/Foo/BarSpec.hs"
touch "test/Foo/SpecHook.hs"
touch "test/SpecHook.hs"
run ["test/Spec.hs", "", "out"]
readFile "out" `shouldReturn` unlines [
"{-# LINE 1 \"test/Spec.hs\" #-}"
, "{-# LANGUAGE NoImplicitPrelude #-}"
, "{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}"
, "module Main where"
, "import qualified SpecHook"
, "import qualified FooSpec"
, "import qualified Foo.SpecHook"
, "import qualified Foo.BarSpec"
, "import qualified Foo.Bar.BazSpec"
, "import Test.Hspec.Discover"
, "main :: IO ()"
, "main = hspec spec"
, "spec :: Spec"
, "spec = " ++ unwords [
"(SpecHook.hook $ describe \"Foo\" FooSpec.spec"
, ">> (Foo.SpecHook.hook $ describe \"Foo.Bar\" Foo.BarSpec.spec"
, ">> describe \"Foo.Bar.Baz\" Foo.Bar.BazSpec.spec))"
]
]
it "generates a test driver for an empty directory" $ do
touch "test/Foo/Bar/Baz/.placeholder"
run ["test/Spec.hs", "", "out"]
readFile "out" `shouldReturn` unlines [
"{-# LINE 1 \"test/Spec.hs\" #-}"
, "{-# LANGUAGE NoImplicitPrelude #-}"
, "{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}"
, "module Main where"
, "import Test.Hspec.Discover"
, "main :: IO ()"
, "main = hspec spec"
, "spec :: Spec"
, "spec = return ()"
]
describe "pathToModule" $ do
it "derives module name from a given path" $ do
pathToModule "test/Spec.hs" `shouldBe` "Spec"
describe "driverWithFormatter" $ do
it "generates a test driver that uses a custom formatter" $ do
driverWithFormatter "Some.Module.formatter" "" `shouldBe` unlines [
"import qualified Some.Module"
, "main :: IO ()"
, "main = hspecWithFormatter Some.Module.formatter spec"
]
describe "moduleNameFromId" $ do
it "returns the module name of a fully qualified identifier" $ do
moduleNameFromId "Some.Module.someId" `shouldBe` "Some.Module"
describe "importList" $ do
it "generates imports for a list of specs" $ do
importList (Just [Run.Spec "Foo", Run.Spec "Bar"]) "" `shouldBe` unlines [
"import qualified FooSpec"
, "import qualified BarSpec"
]
describe "discover" $ do
it "discovers spec files" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/FooSpec.hs"
touch "test/BarSpec.hs"
discover "test/Spec.hs" `shouldReturn` Just (Forest WithoutHook [Leaf "Bar", Leaf "Foo"])
it "discovers nested spec files" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/Foo/BarSpec.hs"
touch "test/Foo/BazSpec.hs"
discover "test/Spec.hs" `shouldReturn` Just (Forest WithoutHook [Node "Foo" (Forest WithoutHook [Leaf "Bar", Leaf "Baz"])])
it "discovers hooks" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/FooSpec.hs"
touch "test/BarSpec.hs"
touch "test/SpecHook.hs"
discover "test/Spec.hs" `shouldReturn` Just (Forest WithHook [Leaf "Bar", Leaf "Foo"])
it "discovers nested hooks" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/Foo/BarSpec.hs"
touch "test/Foo/BazSpec.hs"
touch "test/Foo/SpecHook.hs"
discover "test/Spec.hs" `shouldReturn` Just (Forest WithoutHook [Node "Foo" (Forest WithHook [Leaf "Bar", Leaf "Baz"])])
it "ignores invalid module names" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/barSpec.hs"
discover "test/Spec.hs" `shouldReturn` Nothing
it "ignores empty directories" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/Foo/.keep"
discover "test/Spec.hs" `shouldReturn` Nothing
it "ignores directories with extension" $ do
inTempDirectory $ do
touch "test/Spec.hs"
touch "test/Foo.hs/BarSpec.hs"
discover "test/Spec.hs" `shouldReturn` Nothing
|
hspec/hspec
|
hspec-discover/test/Test/Hspec/Discover/RunSpec.hs
|
mit
| 5,410
| 0
| 25
| 1,518
| 1,015
| 484
| 531
| 126
| 1
|
doubleMe x = x + x
doubleUs x y = doubleMe x + doubleMe y
doubleSmallNumber x = if x > 100
then x
else x * 2
doubleSmallNumber' x = (if x > 100 then x else x * 2) + 1
boomBangs xs = [ if x < 10 then "BOOM!" else "BANG!" | x <- xs, odd x]
length' xs = sum [1 | _<-xs]
removeNonUpperCase :: [Char] -> String
removeNonUpperCase st = [c | c <- st, c `elem` ['A'..'Z']]
nestedList xxs = [[x | x <- xs, odd x] | xs <- xxs]
rightTriangle = [(a,b,c) | c <- [1..10], b <- [1..c], a <- [1..b], a^2 + b^2 == c^2, a + b + c == 24]
addThree :: Int -> Int -> Int -> Int
addThree x y z = x + y + z
lucky :: (Integral a) => a -> String
lucky 7 = "Lucky number seven"
lucky x = "Sorry, yo're out of luck pal"
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial n = n * factorial (n-1)
addVectors :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
addVectors' :: (Num a) => (a, a) -> (a, a) -> (a, a)
addVectors' a b = (fst a + fst b, snd a + snd b)
first :: (a, b, c) -> a
first (x, _, _) = x
second :: (a, b, c) -> b
second (_, y, _) = y
third :: (a, b, c) -> c
third (_, _, z) = z
head' :: [a] -> a
head' [] = error "Can't call head on empty list, dummy!"
head' (x:_) = x
head'' :: [a] -> a
head'' xs = case xs of [] -> error "No head of empty list"
(x:_) -> x
tell :: (Show a) => [a] -> String
tell [] = "The list is empty"
tell (x:[]) = "The list has one element: " ++ show x
tell (x:y:[]) = "The list has two element: " ++ show x ++ " and " ++ show y
tell (x:y:_) = "The list is long. The first two elements are: " ++ show x ++ " and " ++ show y
length'' :: (Num b) => [a] -> b
length'' [] = 0
length'' (_:xs) = 1 + length'' xs
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' (x:xs) = x + sum' xs
capital' :: String -> String
capital' "" = "Empty String, whoops!"
capital' all@(x:xs) = "The first letter of " ++ all ++ " is " ++ [x]
bmiTell :: (RealFloat a) => a -> a -> String
bmiTell weight height
| bmi <= skinny = "You're underweight, you emo, you!"
| bmi <= normal = "You're supposedly normal. Pfft, I bet you're ugly!"
| bmi <= fat = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulation!"
where bmi = weight / height ^ 2
(skinny, normal, fat) = (18.5, 25.0, 30.0)
calBmis :: (RealFloat a) => [(a,a)] -> [a]
calBmis xs = [bmi w h | (w,h) <- xs]
where bmi weight height = weight / height ^ 2
max' :: (Ord a) => a -> a -> a
max' a b
| a > b = a
| otherwise = b
myCompare :: (Ord a) => a -> a -> Ordering
a `myCompare` b
| a > b = GT
| a == b = EQ
| otherwise = LT
initials :: String -> String -> String
initials firstname lastname = [f] ++ ". " ++ [l] ++ "."
where (f:_) = firstname
(l:_) = lastname
cylinder :: (RealFloat a) => a -> a -> a
cylinder r h =
let sideArea = 2 * pi * r * h
topArea = pi * r ^ 2
in sideArea + 2 * topArea
calBmis' :: (RealFloat a) => [(a, a)] -> [a]
calBmis' xs = [bmi | (w, h) <- xs, let bmi = w / h ^ 2]
describeList :: [a] -> String
describeList xs = "The list is " ++ case xs of [] -> "empty."
[x] -> "a singleton list."
xs -> "a longer list."
describeList' :: [a] -> String
describeList' xs = "The list is " ++ what xs
where what [] = "empty."
what [x] = "a singleton."
what xs = "a longer list."
maximum' :: (Ord a) => [a] -> a
maximum' [] = error "maximum of an empty list"
maximum' [x] = x
maximum' (x:xs)
| x > maxTail = x
| otherwise = maxTail
where maxTail = maximum' xs
maximum'' :: (Ord a) => [a] -> a
maximum'' [] = error "maximum of an empty list"
maximum'' [x] = x
maximum'' (x:xs) = max x (maximum'' xs)
replicate' :: (Num i, Ord i) => i -> a -> [a]
replicate' n x
| n <= 0 = []
| otherwise = x:replicate' (n-1) x
take' :: (Num i, Ord i) => i -> [a] -> [a]
take' n _
| n <= 0 = []
take' _ [] = []
take' n (x:xs) = x : take' (n-1) xs
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = reverse' xs ++ [x]
repeat' :: a -> [a]
repeat' x = x : repeat' x
zip' :: [a] -> [b] -> [(a,b)]
zip' _ [] = []
zip' [] _ = []
zip' (x:xs) (y:ys) = (x,y) : zip' xs ys
elem' :: (Eq a) => a -> [a] -> Bool
elem' a [] = False
elem' a (x:xs)
| a == x = True
| otherwise = a `elem'` xs
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerSorted = quicksort [a | a <- xs, a <= x]
biggerSorted = quicksort [a | a <- xs, a > x]
in smallerSorted ++ [x] ++ biggerSorted
multiThree :: (Num a) => a -> a -> a -> a
multiThree x y z = x * y * z
compareWithHundred :: (Num a, Ord a) => a -> Ordering
compareWithHundred x = compare 100 x
compareHundred :: (Num a, Ord a) => a -> Ordering
compareHundred = compare 100
divideByTen :: (Floating a) => a -> a
divideByTen = (/10)
isUpperAlphanum :: Char -> Bool
isUpperAlphanum = (`elem` ['A'..'Z'])
applyTwice :: (a -> a) -> a -> a
applyTwice f x = f (f x)
zipWith' :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith' _ [] _ = []
zipWith' _ _ [] = []
zipWith' f (x:xs) (y:ys) = f x y : zipWith' f xs ys
flip' :: (a -> b -> c) -> (b -> a -> c)
flip' f = g
where g x y = f y x
filter' :: (a -> Bool) -> [a] -> [a]
filter' _ [] = []
filter' p (x:xs)
| p x = x : filter' p xs
| otherwise = filter' p xs
largestDivisible :: (Integral a) => a
largestDivisible = head (filter p [100000, 99999..])
where p x = x `mod` 3829 == 0
chain :: (Integral a) => a -> [a]
chain 1 = [1]
chain n
| even n = n: chain (n `div` 2)
| odd n = n: chain (n*3 +1)
numLongChains :: Int
numLongChains = length (filter isLong (map chain [1..100]))
where isLong xs = length xs > 15
numLongChains' :: Int
numLongChains' = length (filter (\xs -> length xs > 15) (map chain [1..100]))
sumFold :: (Num a) => [a] -> a
sumFold xs = foldl (\acc x -> acc + x) 0 xs
elemFold :: (Eq a) => a -> [a] -> Bool
elemFold y ys = foldl (\acc x -> if x == y then True else acc) False ys
elemFold' :: (Eq a) => a -> [a] -> Bool
elemFold' y ys = foldl (\acc x -> if x == y then True else acc) False ys
map' :: (a -> b) -> [a] -> [b]
map' f xs = foldr (\x acc -> f x: acc) [] xs
mapLeftFold :: (a -> b) -> [a] -> [b]
mapLeftFold f xs = foldl (\acc x -> acc ++ [f x]) [] xs
maxFold :: (Ord a) => [a] -> a
maxFold = foldl1 (\acc x -> if x > acc then x else acc)
reverseFold :: [a] -> [a]
reverseFold = foldl (\acc x -> x: acc) []
productFold :: (Num a) => [a] -> a
productFold = foldl1 (*)
filterFold :: (a -> Bool) -> [a] -> [a]
filterFold p = foldr (\x acc -> if p x then x : acc else acc) []
headFold :: [a] -> a
headFold = foldr1 (\x _ -> x)
lastFold :: [a] -> a
lastFold = foldl1 (\_ x -> x)
sqrtSums :: Int
sqrtSums = length (takeWhile (< 1000) (scanl1 (+) (map sqrt [1..]))) + 1
|
aksswami/learning-haskell
|
baby.hs
|
mit
| 6,850
| 64
| 13
| 1,872
| 3,816
| 2,027
| 1,789
| 191
| 3
|
module Config.AppConfig
( AppConfig (..)
, getAppConfig
) where
import Config.Environment (Environment (..))
import Config.RabbitMQ (RabbitMQConfig, readRabbitMQConfig)
import Data.Text as T
import LoadEnv
import System.Directory (getAppUserDataDirectory)
import System.FilePath.Posix ((</>), (<.>))
data AppConfig =
AppConfig { getAppDataDirectory :: FilePath
, getAppName :: AppName
, getEnv :: Environment
, getRabbitMQConfig :: RabbitMQConfig
}
type AppName = Text
getAppConfig :: AppName -> Environment -> IO AppConfig
getAppConfig appName env = do
dataDirectory <- loadEnvVars appName env
rabbitMQConfig <- readRabbitMQConfig
return $ AppConfig
{ getAppDataDirectory = dataDirectory
, getAppName = appName
, getEnv = env
, getRabbitMQConfig = rabbitMQConfig
}
-- laodEnvVars will look for configuration files matching the lowercase
-- environment name in the user's data directory
-- Ex. if the app name is 'cool-app' and the environment is Production,
-- the env vars will be loaded from ~/.cool-app/production.env
-- loadEnvVars will NOT raise an exception if the environment file is not found
loadEnvVars :: AppName -> Environment -> IO FilePath
loadEnvVars appName env = dataDirectory appName >>= \dataDir ->
let filePath = dataDir </> envName env <.> "env"
in loadEnvFrom filePath >> return dataDir
where
envName :: Environment -> FilePath
envName = T.unpack . toLower . T.pack . show
dataDirectory :: AppName -> IO FilePath
dataDirectory = getAppUserDataDirectory . T.unpack
|
gust/feature-creature
|
features-worker/src/Config/AppConfig.hs
|
mit
| 1,649
| 0
| 13
| 370
| 330
| 188
| 142
| 32
| 1
|
import Text.Printf
import Data.List
import Data.Ord
main :: IO ()
main = interact (printf "%.3f" . run . map read . words)
run :: [Double] -> Double
run (_ : w : xs) = solve w (sortBy (comparing (\(ci, wi) -> - ci / wi)) (pairs xs [])) 0 where
-- (ci, wi) pairs
pairs [] acc = acc
pairs (x : y : ys) acc = pairs ys ((x, y) : acc)
-- solution
solve 0 _ acc = acc
solve _ [] acc = acc
solve wc ((ci, wi) : ys) acc | wc <= wi = solve 0 ys (acc + ci * wc / wi)
| otherwise = solve (wc - wi) ys (acc + ci)
|
da-eto-ya/trash
|
haskell/algos/continuous-knapsack/Main.hs
|
mit
| 588
| 0
| 13
| 207
| 305
| 159
| 146
| 13
| 4
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-transitgatewayroutetableassociation.html
module Stratosphere.Resources.EC2TransitGatewayRouteTableAssociation where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2TransitGatewayRouteTableAssociation. See
-- 'ec2TransitGatewayRouteTableAssociation' for a more convenient
-- constructor.
data EC2TransitGatewayRouteTableAssociation =
EC2TransitGatewayRouteTableAssociation
{ _eC2TransitGatewayRouteTableAssociationTransitGatewayAttachmentId :: Val Text
, _eC2TransitGatewayRouteTableAssociationTransitGatewayRouteTableId :: Val Text
} deriving (Show, Eq)
instance ToResourceProperties EC2TransitGatewayRouteTableAssociation where
toResourceProperties EC2TransitGatewayRouteTableAssociation{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::EC2::TransitGatewayRouteTableAssociation"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ (Just . ("TransitGatewayAttachmentId",) . toJSON) _eC2TransitGatewayRouteTableAssociationTransitGatewayAttachmentId
, (Just . ("TransitGatewayRouteTableId",) . toJSON) _eC2TransitGatewayRouteTableAssociationTransitGatewayRouteTableId
]
}
-- | Constructor for 'EC2TransitGatewayRouteTableAssociation' containing
-- required fields as arguments.
ec2TransitGatewayRouteTableAssociation
:: Val Text -- ^ 'ectgrtaTransitGatewayAttachmentId'
-> Val Text -- ^ 'ectgrtaTransitGatewayRouteTableId'
-> EC2TransitGatewayRouteTableAssociation
ec2TransitGatewayRouteTableAssociation transitGatewayAttachmentIdarg transitGatewayRouteTableIdarg =
EC2TransitGatewayRouteTableAssociation
{ _eC2TransitGatewayRouteTableAssociationTransitGatewayAttachmentId = transitGatewayAttachmentIdarg
, _eC2TransitGatewayRouteTableAssociationTransitGatewayRouteTableId = transitGatewayRouteTableIdarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-transitgatewayroutetableassociation.html#cfn-ec2-transitgatewayroutetableassociation-transitgatewayattachmentid
ectgrtaTransitGatewayAttachmentId :: Lens' EC2TransitGatewayRouteTableAssociation (Val Text)
ectgrtaTransitGatewayAttachmentId = lens _eC2TransitGatewayRouteTableAssociationTransitGatewayAttachmentId (\s a -> s { _eC2TransitGatewayRouteTableAssociationTransitGatewayAttachmentId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ec2-transitgatewayroutetableassociation.html#cfn-ec2-transitgatewayroutetableassociation-transitgatewayroutetableid
ectgrtaTransitGatewayRouteTableId :: Lens' EC2TransitGatewayRouteTableAssociation (Val Text)
ectgrtaTransitGatewayRouteTableId = lens _eC2TransitGatewayRouteTableAssociationTransitGatewayRouteTableId (\s a -> s { _eC2TransitGatewayRouteTableAssociationTransitGatewayRouteTableId = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/EC2TransitGatewayRouteTableAssociation.hs
|
mit
| 3,007
| 0
| 15
| 258
| 280
| 161
| 119
| 31
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
module Data.InvertibleGrammar.TH where
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Data.InvertibleGrammar
import Data.Maybe
import Data.Text (pack)
import Language.Haskell.TH as TH
{- | Build a prism and the corresponding grammar that will match on the
given constructor and convert it to reverse sequence of :- stacks.
E.g. consider a data type:
> data FooBar a b c = Foo a b c | Bar
For constructor Foo
> fooGrammar = $(grammarFor 'Foo)
will expand into
> fooGrammar = PartialIso "Foo"
> (\(c :- b :- a :- t) -> Foo a b c :- t)
> (\case { Foo a b c :- t -> Just $ c :- b :- a :- t; _ -> Nothing })
Note the order of elements on the stack:
> ghci> :t fooGrammar
> fooGrammar :: Grammar g (c :- (b :- (a :- t))) (FooBar a b c :- t)
-}
grammarFor :: Name -> ExpQ
grammarFor constructorName = do
DataConI realConstructorName _typ parentName _fixity <- reify constructorName
TyConI dataDef <- reify parentName
let Just (single, constructorInfo) = do
(single, allConstr) <- constructors dataDef
constr <- findConstructor realConstructorName allConstr
return (single, constr)
let ts = fieldTypes constructorInfo
vs <- mapM (const $ newName "x") ts
t <- newName "t"
let matchStack [] = varP t
matchStack (_v:vs) = [p| $(varP _v) :- $_vs' |]
where
_vs' = matchStack vs
fPat = matchStack vs
buildConstructor = foldr (\v acc -> appE acc (varE v)) (conE realConstructorName) vs
fBody = [e| $buildConstructor :- $(varE t) |]
fFunc = lamE [fPat] fBody
let gPat = [p| $_matchConsructor :- $(varP t) |]
where
_matchConsructor = conP realConstructorName (map varP (reverse vs))
gBody = foldr (\v acc -> [e| $(varE v) :- $acc |]) (varE t) vs
gFunc = lamCaseE $ catMaybes
[ Just $ TH.match gPat (normalB [e| Right ($gBody) |]) []
, if single
then Nothing
else Just $ TH.match wildP (normalB [e| Left (expected . pack $ $(stringE (show constructorName))) |]) []
]
[e| PartialIso $(stringE (show constructorName)) $fFunc $gFunc |]
{- | Build prisms and corresponding grammars for all data constructors of given
type. Expects grammars to zip built ones with.
> $(match ''Maybe)
Will expand into a lambda:
> (\nothingG justG -> ($(grammarFor 'Nothing) . nothingG) <>
> ($(grammarFor 'Just) . justG))
-}
match :: Name -> ExpQ
match tyName = do
names <- map constructorName . extractConstructors <$> reify tyName
argTys <- mapM (\_ -> newName "a") names
let grammars = map (\(con, arg) -> [e| $(varE arg) $(grammarFor con) |]) (zip names argTys)
lamE (map varP argTys) (foldr1 (\e1 e2 -> [e| $e1 :<>: $e2 |]) grammars)
where
extractConstructors :: Info -> [Con]
extractConstructors info =
case info of
TyConI (DataD _ _ _ cons _) -> cons
TyConI (NewtypeD _ _ _ con _) -> [con]
_ -> error "Type name is expected"
----------------------------------------------------------------------
-- Utils
constructors :: Dec -> Maybe (Bool, [Con])
constructors (DataD _ _ _ cs _) = Just (length cs == 1, cs)
constructors (NewtypeD _ _ _ c _) = Just (True, [c])
constructors _ = Nothing
findConstructor :: Name -> [Con] -> Maybe Con
findConstructor _ [] = Nothing
findConstructor name (c:cs)
| constructorName c == name = Just c
| otherwise = findConstructor name cs
constructorName :: Con -> Name
constructorName con =
case con of
NormalC name _ -> name
RecC name _ -> name
InfixC _ name _ -> name
ForallC _ _ con' -> constructorName con'
fieldTypes :: Con -> [Type]
fieldTypes (NormalC _ fieldTypes) = map snd fieldTypes
fieldTypes (RecC _ fieldTypes) = map (\(_, _, t) ->t) fieldTypes
fieldTypes (InfixC (_,a) _b (_,b)) = [a, b]
fieldTypes (ForallC _ _ con') = fieldTypes con'
|
ricardopenyamari/ir2haskell
|
clir-parser-haskell-master/lib/sexp-grammar/src/Data/InvertibleGrammar/TH.hs
|
gpl-2.0
| 4,068
| 0
| 17
| 1,039
| 1,071
| 561
| 510
| -1
| -1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-
Copyright (C) 2008-9 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- Handlers for wiki functions.
-}
module Network.Gitit.Handlers (
handleAny
, debugHandler
, randomPage
, discussPage
, createPage
, showActivity
, goToPage
, searchResults
, uploadForm
, uploadFile
, indexPage
, categoryPage
, categoryListPage
, preview
, showRawPage
, showFileAsText
, showPageHistory
, showFileHistory
, showPage
, showPageDiff
, showFileDiff
, exportPage
, updatePage
, editPage
, deletePage
, confirmDelete
, showHighlightedSource
, expireCache
, feedHandler
)
where
import Safe
import Network.Gitit.Server
import Network.Gitit.Framework
import Network.Gitit.Layout
import Network.Gitit.Types
import Network.Gitit.Feed (filestoreToXmlFeed, FeedConfig(..))
import Network.Gitit.Util (orIfNull)
import Network.Gitit.Cache (expireCachedFile, lookupCache, cacheContents)
import Network.Gitit.ContentTransformer (showRawPage, showFileAsText, showPage,
exportPage, showHighlightedSource, preview, applyPreCommitPlugins)
import Network.Gitit.Page (readCategories)
import qualified Control.Exception as E
import System.FilePath
import Network.Gitit.State
import Text.XHtml hiding ( (</>), dir, method, password, rev )
import qualified Text.XHtml as X ( method )
import Data.List (intercalate, intersperse, delete, nub, sortBy, find, isPrefixOf, inits, sort, (\\))
import Data.List.Split (wordsBy)
import Data.Maybe (fromMaybe, mapMaybe, isJust, catMaybes)
import Data.Ord (comparing)
import Data.Char (toLower, isSpace)
import Control.Monad.Reader
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as S
import Network.HTTP (urlEncodeVars)
import Data.Time (getCurrentTime, addUTCTime)
import Data.Time.Clock (diffUTCTime, UTCTime(..))
import Data.FileStore
import System.Log.Logger (logM, Priority(..))
handleAny :: Handler
handleAny = withData $ \(params :: Params) -> uriRest $ \uri ->
let path' = uriPath uri
in do fs <- getFileStore
let rev = pRevision params
mimetype <- getMimeTypeForExtension
(takeExtension path')
res <- liftIO $ E.try
(retrieve fs path' rev :: IO B.ByteString)
case res of
Right contents -> ignoreFilters >> -- don't compress
(ok $ setContentType mimetype $
(toResponse noHtml) {rsBody = contents})
-- ugly hack
Left NotFound -> mzero
Left e -> error (show e)
debugHandler :: Handler
debugHandler = withData $ \(params :: Params) -> do
req <- askRq
liftIO $ logM "gitit" DEBUG (show req)
page <- getPage
liftIO $ logM "gitit" DEBUG $ "Page = '" ++ page ++ "'\n" ++
show params
mzero
randomPage :: Handler
randomPage = do
fs <- getFileStore
base' <- getWikiBase
prunedFiles <- liftIO (index fs) >>= filterM isPageFile >>= filterM isNotDiscussPageFile
let pages = map dropExtension prunedFiles
if null pages
then error "No pages found!"
else do
secs <- liftIO (fmap utctDayTime getCurrentTime)
let newPage = pages !!
(truncate (secs * 1000000) `mod` length pages)
seeOther (base' ++ urlForPage newPage) $ toResponse $
p << "Redirecting to a random page"
discussPage :: Handler
discussPage = do
page <- getPage
base' <- getWikiBase
seeOther (base' ++ urlForPage (if isDiscussPage page then page else ('@':page))) $
toResponse "Redirecting to discussion page"
createPage :: Handler
createPage = do
page <- getPage
base' <- getWikiBase
case page of
('_':_) -> mzero -- don't allow creation of _index, etc.
_ -> formattedPage defaultPageLayout{
pgPageName = page
, pgTabs = []
, pgTitle = "Create " ++ page ++ "?"
} $
(p << stringToHtml
("There is no page named '" ++ page ++ "'. You can:"))
+++
(unordList $
[ anchor !
[href $ base' ++ "/_edit" ++ urlForPage page] <<
("Create the page '" ++ page ++ "'")
, anchor !
[href $ base' ++ "/_search?" ++
(urlEncodeVars [("patterns", page)])] <<
("Search for pages containing the text '" ++
page ++ "'")])
uploadForm :: Handler
uploadForm = withData $ \(params :: Params) -> do
let origPath = pFilename params
let wikiname = pWikiname params `orIfNull` takeFileName origPath
let logMsg = pLogMsg params
let upForm = form ! [X.method "post", enctype "multipart/form-data"] <<
fieldset <<
[ p << [label ! [thefor "file"] << "File to upload:"
, br
, afile "file" ! [value origPath] ]
, p << [ label ! [thefor "wikiname"] << "Name on wiki, including extension"
, noscript << " (leave blank to use the same filename)"
, stringToHtml ":"
, br
, textfield "wikiname" ! [value wikiname]
, primHtmlChar "nbsp"
, checkbox "overwrite" "yes"
, label ! [thefor "overwrite"] << "Overwrite existing file" ]
, p << [ label ! [thefor "logMsg"] << "Description of content or changes:"
, br
, textfield "logMsg" ! [size "60", value logMsg]
, submit "upload" "Upload" ]
]
formattedPage defaultPageLayout{
pgMessages = pMessages params,
pgScripts = ["uploadForm.js"],
pgShowPageTools = False,
pgTabs = [],
pgTitle = "Upload a file"} upForm
uploadFile :: Handler
uploadFile = withData $ \(params :: Params) -> do
let origPath = pFilename params
let filePath = pFilePath params
let wikiname = normalise
$ dropWhile (=='/')
$ pWikiname params `orIfNull` takeFileName origPath
let logMsg = pLogMsg params
cfg <- getConfig
wPF <- isPageFile wikiname
mbUser <- getLoggedInUser
(user, email) <- case mbUser of
Nothing -> return ("Anonymous", "")
Just u -> return (uUsername u, uEmail u)
let overwrite = pOverwrite params
fs <- getFileStore
exists <- liftIO $ E.catch (latest fs wikiname >> return True) $ \e ->
if e == NotFound
then return False
else E.throwIO e >> return True
let inStaticDir = staticDir cfg `isPrefixOf` (repositoryPath cfg </> wikiname)
let inTemplatesDir = templatesDir cfg `isPrefixOf` (repositoryPath cfg </> wikiname)
let dirs' = splitDirectories $ takeDirectory wikiname
let imageExtensions = [".png", ".jpg", ".gif"]
let errors = validate
[ (null . filter (not . isSpace) $ logMsg,
"Description cannot be empty.")
, (".." `elem` dirs', "Wikiname cannot contain '..'")
, (null origPath, "File not found.")
, (inStaticDir, "Destination is inside static directory.")
, (inTemplatesDir, "Destination is inside templates directory.")
, (not overwrite && exists, "A file named '" ++ wikiname ++
"' already exists in the repository: choose a new name " ++
"or check the box to overwrite the existing file.")
, (wPF,
"This file extension is reserved for wiki pages.")
]
if null errors
then do
expireCachedFile wikiname `mplus` return ()
fileContents <- liftIO $ B.readFile filePath
let len = B.length fileContents
liftIO $ save fs wikiname (Author user email) logMsg fileContents
let contents = thediv <<
[ h2 << ("Uploaded " ++ show len ++ " bytes")
, if takeExtension wikiname `elem` imageExtensions
then p << "To add this image to a page, use:" +++
pre << ("")
else p << "To link to this resource from a page, use:" +++
pre << ("[link label](/" ++ wikiname ++ ")") ]
formattedPage defaultPageLayout{
pgMessages = pMessages params,
pgShowPageTools = False,
pgTabs = [],
pgTitle = "Upload successful"}
contents
else withMessages errors uploadForm
goToPage :: Handler
goToPage = withData $ \(params :: Params) -> do
let gotopage = pGotoPage params
fs <- getFileStore
pruned_files <- liftIO (index fs) >>= filterM isPageFile
let allPageNames = map dropExtension pruned_files
let findPage f = find f allPageNames
let exactMatch f = gotopage == f
let insensitiveMatch f = (map toLower gotopage) == (map toLower f)
let prefixMatch f = (map toLower gotopage) `isPrefixOf` (map toLower f)
base' <- getWikiBase
case findPage exactMatch of
Just m -> seeOther (base' ++ urlForPage m) $ toResponse
"Redirecting to exact match"
Nothing -> case findPage insensitiveMatch of
Just m -> seeOther (base' ++ urlForPage m) $ toResponse
"Redirecting to case-insensitive match"
Nothing -> case findPage prefixMatch of
Just m -> seeOther (base' ++ urlForPage m) $
toResponse $ "Redirecting" ++
" to partial match"
Nothing -> searchResults
searchResults :: Handler
searchResults = withData $ \(params :: Params) -> do
let patterns = pPatterns params `orIfNull` [pGotoPage params]
fs <- getFileStore
matchLines <- if null patterns
then return []
else liftIO $ E.catch (search fs SearchQuery{
queryPatterns = patterns
, queryWholeWords = True
, queryMatchAll = True
, queryIgnoreCase = True })
-- catch error, because newer versions of git
-- return 1 on no match, and filestore <=0.3.3
-- doesn't handle this properly:
(\(_ :: FileStoreError) -> return [])
let contentMatches = map matchResourceName matchLines
allPages <- liftIO (index fs) >>= filterM isPageFile
let slashToSpace = map (\c -> if c == '/' then ' ' else c)
let inPageName pageName' x = x `elem` (words $ slashToSpace $ dropExtension pageName')
let matchesPatterns pageName' = not (null patterns) &&
all (inPageName (map toLower pageName')) (map (map toLower) patterns)
let pageNameMatches = filter matchesPatterns allPages
prunedFiles <- filterM isPageFile (contentMatches ++ pageNameMatches)
let allMatchedFiles = nub $ prunedFiles
let matchesInFile f = mapMaybe (\x -> if matchResourceName x == f
then Just (matchLine x)
else Nothing) matchLines
let matches = map (\f -> (f, matchesInFile f)) allMatchedFiles
let relevance (f, ms) = length ms + if f `elem` pageNameMatches
then 100
else 0
let preamble = if null patterns
then h3 << ["Please enter a search term."]
else h3 << [ stringToHtml (show (length matches) ++ " matches found for ")
, thespan ! [identifier "pattern"] << unwords patterns]
base' <- getWikiBase
let toMatchListItem (file, contents) = li <<
[ anchor ! [href $ base' ++ urlForPage (dropExtension file)] << dropExtension file
, stringToHtml (" (" ++ show (length contents) ++ " matching lines)")
, stringToHtml " "
, anchor ! [href "#", theclass "showmatch",
thestyle "display: none;"] << if length contents > 0
then "[show matches]"
else ""
, pre ! [theclass "matches"] << unlines contents]
let htmlMatches = preamble +++
olist << map toMatchListItem
(reverse $ sortBy (comparing relevance) matches)
formattedPage defaultPageLayout{
pgMessages = pMessages params,
pgShowPageTools = False,
pgTabs = [],
pgScripts = ["search.js"],
pgTitle = "Search results"}
htmlMatches
showPageHistory :: Handler
showPageHistory = withData $ \(params :: Params) -> do
page <- getPage
cfg <- getConfig
showHistory (pathForPage page $ defaultExtension cfg) page params
showFileHistory :: Handler
showFileHistory = withData $ \(params :: Params) -> do
file <- getPage
showHistory file file params
showHistory :: String -> String -> Params -> Handler
showHistory file page params = do
fs <- getFileStore
hist <- liftIO $ history fs [file] (TimeRange Nothing Nothing)
(Just $ pLimit params)
base' <- getWikiBase
let versionToHtml rev pos = li ! [theclass "difflink", intAttr "order" pos,
strAttr "revision" (revId rev),
strAttr "diffurl" (base' ++ "/_diff/" ++ page)] <<
[ thespan ! [theclass "date"] << (show $ revDateTime rev)
, stringToHtml " ("
, thespan ! [theclass "author"] << anchor ! [href $ base' ++ "/_activity?" ++
urlEncodeVars [("forUser", authorName $ revAuthor rev)]] <<
(authorName $ revAuthor rev)
, stringToHtml "): "
, anchor ! [href (base' ++ urlForPage page ++ "?revision=" ++ revId rev)] <<
thespan ! [theclass "subject"] << revDescription rev
, noscript <<
([ stringToHtml " [compare with "
, anchor ! [href $ base' ++ "/_diff" ++ urlForPage page ++ "?to=" ++ revId rev] <<
"previous" ] ++
(if pos /= 1
then [ primHtmlChar "nbsp"
, primHtmlChar "bull"
, primHtmlChar "nbsp"
, anchor ! [href $ base' ++ "/_diff" ++ urlForPage page ++ "?from=" ++
revId rev] << "current"
]
else []) ++
[stringToHtml "]"])
]
let contents = if null hist
then noHtml
else ulist ! [theclass "history"] <<
zipWith versionToHtml hist
[length hist, (length hist - 1)..1]
let more = if length hist == pLimit params
then anchor ! [href $ base' ++ "/_history" ++ urlForPage page
++ "?limit=" ++ show (pLimit params + 100)] <<
"Show more..."
else noHtml
let tabs = if file == page -- source file, not wiki page
then [ViewTab,HistoryTab]
else pgTabs defaultPageLayout
formattedPage defaultPageLayout{
pgPageName = page,
pgMessages = pMessages params,
pgScripts = ["dragdiff.js"],
pgTabs = tabs,
pgSelectedTab = HistoryTab,
pgTitle = ("Changes to " ++ page)
} $ contents +++ more
showActivity :: Handler
showActivity = withData $ \(params :: Params) -> do
cfg <- getConfig
currTime <- liftIO getCurrentTime
let defaultDaysAgo = fromIntegral (recentActivityDays cfg)
let daysAgo = addUTCTime (defaultDaysAgo * (-60) * 60 * 24) currTime
let since = case pSince params of
Nothing -> Just daysAgo
Just t -> Just t
let forUser = pForUser params
fs <- getFileStore
hist <- liftIO $ history fs [] (TimeRange since Nothing)
(Just $ pLimit params)
let hist' = case forUser of
Nothing -> hist
Just u -> filter (\r -> authorName (revAuthor r) == u) hist
let fileFromChange (Added f) = f
fileFromChange (Modified f) = f
fileFromChange (Deleted f) = f
base' <- getWikiBase
let fileAnchor revis file = if takeExtension file == "." ++ (defaultExtension cfg)
then anchor ! [href $ base' ++ "/_diff" ++ urlForPage (dropExtension file) ++ "?to=" ++ revis] << dropExtension file
else anchor ! [href $ base' ++ urlForPage file ++ "?revision=" ++ revis] << file
let filesFor changes revis = intersperse (stringToHtml " ") $
map (fileAnchor revis . fileFromChange) changes
let heading = h1 << ("Recent changes by " ++ fromMaybe "all users" forUser)
let revToListItem rev = li <<
[ thespan ! [theclass "date"] << (show $ revDateTime rev)
, stringToHtml " ("
, thespan ! [theclass "author"] <<
anchor ! [href $ base' ++ "/_activity?" ++
urlEncodeVars [("forUser", authorName $ revAuthor rev)]] <<
(authorName $ revAuthor rev)
, stringToHtml "): "
, thespan ! [theclass "subject"] << revDescription rev
, stringToHtml " ("
, thespan ! [theclass "files"] << filesFor (revChanges rev) (revId rev)
, stringToHtml ")"
]
let contents = ulist ! [theclass "history"] << map revToListItem hist'
formattedPage defaultPageLayout{
pgMessages = pMessages params,
pgShowPageTools = False,
pgTabs = [],
pgTitle = "Recent changes"
} (heading +++ contents)
showPageDiff :: Handler
showPageDiff = withData $ \(params :: Params) -> do
page <- getPage
cfg <- getConfig
showDiff (pathForPage page $ defaultExtension cfg) page params
showFileDiff :: Handler
showFileDiff = withData $ \(params :: Params) -> do
page <- getPage
showDiff page page params
showDiff :: String -> String -> Params -> Handler
showDiff file page params = do
let from = pFrom params
let to = pTo params
-- 'to' or 'from' must be given
when (from == Nothing && to == Nothing) mzero
fs <- getFileStore
-- if 'to' is not specified, defaults to current revision
-- if 'from' is not specified, defaults to revision immediately before 'to'
from' <- case (from, to) of
(Just _, _) -> return from
(Nothing, Nothing) -> return from
(Nothing, Just t) -> do
pageHist <- liftIO $ history fs [file]
(TimeRange Nothing Nothing)
Nothing
let (_, upto) = break (\r -> idsMatch fs (revId r) t)
pageHist
return $ if length upto >= 2
-- immediately preceding revision
then Just $ revId $ upto !! 1
else Nothing
result' <- liftIO $ E.try $ getDiff fs file from' to
case result' of
Left NotFound -> mzero
Left e -> liftIO $ E.throwIO e
Right htmlDiff -> formattedPage defaultPageLayout{
pgPageName = page,
pgRevision = from' `mplus` to,
pgMessages = pMessages params,
pgTabs = DiffTab :
pgTabs defaultPageLayout,
pgSelectedTab = DiffTab,
pgTitle = page
}
htmlDiff
getDiff :: FileStore -> FilePath -> Maybe RevisionId -> Maybe RevisionId
-> IO Html
getDiff fs file from to = do
rawDiff <- diff fs file from to
let diffLineToHtml (Both xs _) = thespan << unlines xs
diffLineToHtml (First xs) = thespan ! [theclass "deleted"] << unlines xs
diffLineToHtml (Second xs) = thespan ! [theclass "added"] << unlines xs
return $ h2 ! [theclass "revision"] <<
("Changes from " ++ fromMaybe "beginning" from ++
" to " ++ fromMaybe "current" to) +++
pre ! [theclass "diff"] << map diffLineToHtml rawDiff
editPage :: Handler
editPage = withData editPage'
editPage' :: Params -> Handler
editPage' params = do
let rev = pRevision params -- if this is set, we're doing a revert
fs <- getFileStore
page <- getPage
cfg <- getConfig
let getRevisionAndText = E.catch
(do c <- liftIO $ retrieve fs (pathForPage page $ defaultExtension cfg) rev
-- even if pRevision is set, we return revId of latest
-- saved version (because we're doing a revert and
-- we don't want gitit to merge the changes with the
-- latest version)
r <- liftIO $ latest fs (pathForPage page $ defaultExtension cfg) >>= revision fs
return (Just $ revId r, c))
(\e -> if e == NotFound
then return (Nothing, "")
else E.throwIO e)
(mbRev, raw) <- case pEditedText params of
Nothing -> liftIO getRevisionAndText
Just t -> let r = if null (pSHA1 params)
then Nothing
else Just (pSHA1 params)
in return (r, t)
let messages = pMessages params
let logMsg = pLogMsg params
let sha1Box = case mbRev of
Just r -> textfield "sha1" ! [thestyle "display: none",
value r]
Nothing -> noHtml
let readonly = if isJust (pRevision params)
-- disable editing of text box if it's a revert
then [strAttr "readonly" "yes",
strAttr "style" "color: gray"]
else []
base' <- getWikiBase
let editForm = gui (base' ++ urlForPage page) ! [identifier "editform"] <<
[ sha1Box
, textarea ! (readonly ++ [cols "80", name "editedText",
identifier "editedText"]) << raw
, br
, label ! [thefor "logMsg"] << "Description of changes:"
, br
, textfield "logMsg" ! (readonly ++ [value (logMsg `orIfNull` defaultSummary cfg) ])
, submit "update" "Save"
, primHtmlChar "nbsp"
, submit "cancel" "Discard"
, primHtmlChar "nbsp"
, input ! [thetype "button", theclass "editButton",
identifier "previewButton",
strAttr "onClick" "updatePreviewPane();",
strAttr "style" "display: none;",
value "Preview" ]
, thediv ! [ identifier "previewpane" ] << noHtml
]
let pgScripts' = ["preview.js"]
let pgScripts'' = case mathMethod cfg of
JsMathScript -> "jsMath/easy/load.js" : pgScripts'
MathML -> "MathMLinHTML.js" : pgScripts'
MathJax url -> url : pgScripts'
_ -> pgScripts'
formattedPage defaultPageLayout{
pgPageName = page,
pgMessages = messages,
pgRevision = rev,
pgShowPageTools = False,
pgShowSiteNav = False,
pgMarkupHelp = Just $ markupHelp cfg,
pgSelectedTab = EditTab,
pgScripts = pgScripts'',
pgTitle = ("Editing " ++ page)
} editForm
confirmDelete :: Handler
confirmDelete = do
page <- getPage
fs <- getFileStore
cfg <- getConfig
-- determine whether there is a corresponding page, and if not whether there
-- is a corresponding file
pageTest <- liftIO $ E.try $ latest fs (pathForPage page $ defaultExtension cfg)
fileToDelete <- case pageTest of
Right _ -> return $ pathForPage page $ defaultExtension cfg -- a page
Left NotFound -> do
fileTest <- liftIO $ E.try $ latest fs page
case fileTest of
Right _ -> return page -- a source file
Left NotFound -> return ""
Left e -> fail (show e)
Left e -> fail (show e)
let confirmForm = gui "" <<
[ p << "Are you sure you want to delete this page?"
, input ! [thetype "text", name "filetodelete",
strAttr "style" "display: none;", value fileToDelete]
, submit "confirm" "Yes, delete it!"
, stringToHtml " "
, submit "cancel" "No, keep it!"
, br ]
formattedPage defaultPageLayout{ pgTitle = "Delete " ++ page ++ "?" } $
if null fileToDelete
then ulist ! [theclass "messages"] << li <<
"There is no file or page by that name."
else confirmForm
deletePage :: Handler
deletePage = withData $ \(params :: Params) -> do
page <- getPage
cfg <- getConfig
let file = pFileToDelete params
mbUser <- getLoggedInUser
(user, email) <- case mbUser of
Nothing -> return ("Anonymous", "")
Just u -> return (uUsername u, uEmail u)
let author = Author user email
let descrip = deleteSummary cfg
base' <- getWikiBase
if pConfirm params && (file == page || file == page <.> (defaultExtension cfg))
then do
fs <- getFileStore
liftIO $ Data.FileStore.delete fs file author descrip
seeOther (base' ++ "/") $ toResponse $ p << "File deleted"
else seeOther (base' ++ urlForPage page) $ toResponse $ p << "Not deleted"
updatePage :: Handler
updatePage = withData $ \(params :: Params) -> do
page <- getPage
cfg <- getConfig
mbUser <- getLoggedInUser
(user, email) <- case mbUser of
Nothing -> return ("Anonymous", "")
Just u -> return (uUsername u, uEmail u)
editedText <- case pEditedText params of
Nothing -> error "No body text in POST request"
Just b -> applyPreCommitPlugins b
let logMsg = pLogMsg params `orIfNull` defaultSummary cfg
let oldSHA1 = pSHA1 params
fs <- getFileStore
base' <- getWikiBase
if null . filter (not . isSpace) $ logMsg
then withMessages ["Description cannot be empty."] editPage
else do
when (length editedText > fromIntegral (maxPageSize cfg)) $
error "Page exceeds maximum size."
-- check SHA1 in case page has been modified, merge
modifyRes <- if null oldSHA1
then liftIO $ create fs (pathForPage page $ defaultExtension cfg)
(Author user email) logMsg editedText >>
return (Right ())
else do
expireCachedFile (pathForPage page $ defaultExtension cfg) `mplus` return ()
liftIO $ E.catch (modify fs (pathForPage page $ defaultExtension cfg)
oldSHA1 (Author user email) logMsg
editedText)
(\e -> if e == Unchanged
then return (Right ())
else E.throwIO e)
case modifyRes of
Right () -> seeOther (base' ++ urlForPage page) $ toResponse $ p << "Page updated"
Left (MergeInfo mergedWithRev conflicts mergedText) -> do
let mergeMsg = "The page has been edited since you checked it out. " ++
"Changes from revision " ++ revId mergedWithRev ++
" have been merged into your edits below. " ++
if conflicts
then "Please resolve conflicts and Save."
else "Please review and Save."
editPage' $
params{ pEditedText = Just mergedText,
pSHA1 = revId mergedWithRev,
pMessages = [mergeMsg] }
indexPage :: Handler
indexPage = do
path' <- getPath
base' <- getWikiBase
cfg <- getConfig
let ext = defaultExtension cfg
let prefix' = if null path' then "" else path' ++ "/"
fs <- getFileStore
listing <- liftIO $ directory fs prefix'
let isNotDiscussionPage (FSFile f) = isNotDiscussPageFile f
isNotDiscussionPage (FSDirectory _) = return True
prunedListing <- filterM isNotDiscussionPage listing
let htmlIndex = fileListToHtml base' prefix' ext prunedListing
formattedPage defaultPageLayout{
pgPageName = prefix',
pgShowPageTools = False,
pgTabs = [],
pgScripts = [],
pgTitle = "Contents"} htmlIndex
fileListToHtml :: String -> String -> String -> [Resource] -> Html
fileListToHtml base' prefix ext files =
let fileLink (FSFile f) | takeExtension f == "." ++ ext =
li ! [theclass "page" ] <<
anchor ! [href $ base' ++ urlForPage (prefix ++ dropExtension f)] <<
dropExtension f
fileLink (FSFile f) = li ! [theclass "upload"] << concatHtml
[ anchor ! [href $ base' ++ urlForPage (prefix ++ f)] << f
, anchor ! [href $ base' ++ "_delete" ++ urlForPage (prefix ++ f)] << "(delete)"
]
fileLink (FSDirectory f) =
li ! [theclass "folder"] <<
anchor ! [href $ base' ++ urlForPage (prefix ++ f) ++ "/"] << f
updirs = drop 1 $ inits $ splitPath $ '/' : prefix
uplink = foldr (\d accum ->
concatHtml [ anchor ! [theclass "updir",
href $ if length d <= 1
then base' ++ "/_index"
else base' ++
urlForPage (joinPath $ drop 1 d)] <<
lastNote "fileListToHtml" d, accum]) noHtml updirs
in uplink +++ ulist ! [theclass "index"] << map fileLink files
-- NOTE: The current implementation of categoryPage does not go via the
-- filestore abstraction. That is bad, but can only be fixed if we add
-- more sophisticated searching options to filestore.
categoryPage :: Handler
categoryPage = do
path' <- getPath
cfg <- getConfig
let pcategories = wordsBy (==',') path'
let repoPath = repositoryPath cfg
let categoryDescription = "Category: " ++ (intercalate " + " pcategories)
fs <- getFileStore
pages <- liftIO (index fs) >>= filterM isPageFile >>= filterM isNotDiscussPageFile
matches <- liftM catMaybes $
forM pages $ \f -> do
categories <- liftIO $ readCategories $ repoPath </> f
return $ if all ( `elem` categories) pcategories
then Just (f, categories \\ pcategories)
else Nothing
base' <- getWikiBase
let toMatchListItem file = li <<
[ anchor ! [href $ base' ++ urlForPage (dropExtension file)] << dropExtension file ]
let toRemoveListItem cat = li <<
[ anchor ! [href $ base' ++
(if null (tail pcategories)
then "/_categories"
else "/_category" ++ urlForPage (intercalate "," $ Data.List.delete cat pcategories)) ]
<< ("-" ++ cat) ]
let toAddListItem cat = li <<
[ anchor ! [href $ base' ++
"/_category" ++ urlForPage (path' ++ "," ++ cat) ]
<< ("+" ++ cat) ]
let matchList = ulist << map toMatchListItem (fst $ unzip matches) +++
thediv ! [ identifier "categoryList" ] <<
ulist << (++) (map toAddListItem (nub $ concat $ snd $ unzip matches))
(map toRemoveListItem pcategories)
formattedPage defaultPageLayout{
pgPageName = categoryDescription,
pgShowPageTools = False,
pgTabs = [],
pgScripts = ["search.js"],
pgTitle = categoryDescription }
matchList
categoryListPage :: Handler
categoryListPage = do
cfg <- getConfig
let repoPath = repositoryPath cfg
fs <- getFileStore
pages <- liftIO (index fs) >>= filterM isPageFile >>= filterM isNotDiscussPageFile
categories <- liftIO $ liftM (nub . sort . concat) $ forM pages $ \f ->
readCategories (repoPath </> f)
base' <- getWikiBase
let toCatLink ctg = li <<
[ anchor ! [href $ base' ++ "/_category" ++ urlForPage ctg] << ctg ]
let htmlMatches = ulist << map toCatLink categories
formattedPage defaultPageLayout{
pgPageName = "Categories",
pgShowPageTools = False,
pgTabs = [],
pgScripts = ["search.js"],
pgTitle = "Categories" } htmlMatches
expireCache :: Handler
expireCache = do
page <- getPage
cfg <- getConfig
-- try it as a page first, then as an uploaded file
expireCachedFile (pathForPage page $ defaultExtension cfg)
expireCachedFile page
ok $ toResponse ()
feedHandler :: Handler
feedHandler = do
cfg <- getConfig
when (not $ useFeed cfg) mzero
base' <- getWikiBase
feedBase <- if null (baseUrl cfg) -- if baseUrl blank, try to get it from Host header
then do
mbHost <- getHost
case mbHost of
Nothing -> error "Could not determine base URL"
Just hn -> return $ "http://" ++ hn ++ base'
else case baseUrl cfg ++ base' of
w@('h':'t':'t':'p':'s':':':'/':'/':_) -> return w
x@('h':'t':'t':'p':':':'/':'/':_) -> return x
y -> return $ "http://" ++ y
let fc = FeedConfig{
fcTitle = wikiTitle cfg
, fcBaseUrl = feedBase
, fcFeedDays = feedDays cfg }
path' <- getPath -- e.g. "foo/bar" if they hit /_feed/foo/bar
let file = (path' `orIfNull` "_site") <.> "feed"
let mbPath = if null path' then Nothing else Just path'
-- first, check for a cached version that is recent enough
now <- liftIO getCurrentTime
let isRecentEnough t = truncate (diffUTCTime now t) < 60 * feedRefreshTime cfg
mbCached <- lookupCache file
case mbCached of
Just (modtime, contents) | isRecentEnough modtime -> do
let emptyResponse = setContentType "application/atom+xml; charset=utf-8" . toResponse $ ()
ok $ emptyResponse{rsBody = B.fromChunks [contents]}
_ -> do
fs <- getFileStore
resp' <- liftM toResponse $ liftIO (filestoreToXmlFeed fc fs mbPath)
cacheContents file $ S.concat $ B.toChunks $ rsBody resp'
ok . setContentType "application/atom+xml; charset=UTF-8" $ resp'
|
bergmannf/gitit
|
src/Network/Gitit/Handlers.hs
|
gpl-2.0
| 37,244
| 0
| 26
| 13,997
| 9,552
| 4,824
| 4,728
| 735
| 9
|
import Graphics.Gloss
main :: IO ()
main = display (InWindow "Dibujo" (300,300) (20,20)) white cuadrado
cuadrado :: Picture
cuadrado = Polygon [(72,72),(144,72),(144,144),(72,144),(72,72)]
|
jaalonso/I1M-Cod-Temas
|
src/Tema_25/poligono.hs
|
gpl-2.0
| 191
| 0
| 8
| 24
| 108
| 64
| 44
| 5
| 1
|
import Control.Applicative
import Data.List
modulo :: Integer
modulo = 10^9 + 7
processQueries
:: [(Char, Int, Int)] -> SegTree -> [Integer] -> [Integer]
processQueries [] _ acc = reverse acc
processQueries (('Q', l, r):q) root acc =
let ans = (query (l, r) root) `mod` modulo
in processQueries q root (ans : acc)
processQueries ((_, idx, value):q) root acc =
let oldVal = query (idx, idx) root
newVal = oldVal * (fromIntegral value)
nroot = update idx newVal root
in processQueries q nroot acc
solve :: Int -> [Integer] -> [(Char, Int, Int)] -> [Integer]
solve n arr queries = processQueries queries tree []
where
tree = foldl' (\ root (idx, v) -> update idx v root)
(initSegTree n)
(zip [0..] arr)
main :: IO ()
main = do
n <- (read :: String -> Int) <$> getLine
arr <- map (read :: String -> Integer) . words <$> getLine
_ <- (read :: String -> Int) <$> getLine
queryString <- lines <$> getContents
let queries = map (\ x -> (\ [[p], q, r] ->(p, read q :: Int, read r :: Int))
(words x)) queryString
mapM_ print $ solve n arr queries
-- | The following code is related with segment tree
data SegTree =
Node {
val :: Integer
, left, right :: Int
, leftChild, rightChild :: SegTree
} |
Leaf {
val :: Integer
, left, right :: Int
}
initSegTree :: Int -> SegTree
initSegTree n = aux 0 (n - 1)
where aux l r
| l == r = Leaf {val = -1, left = l, right = r}
| otherwise =
let mid = (l + r) `div` 2
in Node { val = -1, left = l, right = r
, leftChild = aux l mid
, rightChild = aux (succ mid) r
}
query :: (Int, Int) -> SegTree -> Integer
query range@(l, r) root
| r < left root = 1
| l > right root = 1
| l <= left root && right root <= r = val root
| otherwise =
lcm (query range (leftChild root)) (query range (rightChild root))
update :: Int -> Integer -> SegTree -> SegTree
update idx newVal root
| left root <= idx && idx <= right root =
case root of
Leaf {} -> root {val = newVal }
_ -> root {val = lcm newVal (val root),
leftChild = lChild, rightChild = rChild }
| otherwise = root
where
lChild = update idx newVal $ leftChild root
rChild = update idx newVal $ rightChild root
|
m00nlight/hackerrank
|
functional/Persistent-Structure/Minimum-Multiple/main2.hs
|
gpl-2.0
| 2,532
| 0
| 17
| 879
| 1,036
| 551
| 485
| 63
| 2
|
-- |This is a program which uses OpenGL and the PupEvents framework to
-- demonstrate an example usage pattern. It simply takes a click in
-- the window, sends it to the server (which negates the coordinates),
-- and then paints the original and new Click coordinates with a point.
-- This uses the -threaded option in GHC.
module Main where
import Graphics.Rendering.OpenGL.GL (($=), ($=!), makeGettableStateVar)
import qualified Graphics.Rendering.OpenGL.GL as GL
import qualified Graphics.UI.GLUT as GLUT
import qualified PupEventsPQueue as PQ
import qualified EventsClient as Events
import Control.Concurrent.STM
import Control.Concurrent
import Control.Monad
import System.Environment
-- |Initial OpenGL setup function. The interesting thing here is that
-- we fork a thread using 'checkEvents' to check for events.
main :: IO ()
main =
do args <- getArgs
let ip = args !! 0
let priorities = read (args !! 1) :: Int
(progname, _) <- GLUT.getArgsAndInitialize
(outqueue, inqueue) <- Events.doClient (Just ip) priorities
GLUT.createWindow "Hello World"
GLUT.initialDisplayMode $= [GLUT.DoubleBuffered, GLUT.RGBAMode]
GLUT.keyboardMouseCallback $=! Just (keyboardMouse inqueue outqueue)
GLUT.displayCallback $=! render
forkOS $ checkEvents inqueue
GL.clearColor $= GL.Color4 0 0 0 1
GL.blend $= GL.Enabled
GL.blendFunc $= (GL.SrcAlpha, GL.OneMinusSrcAlpha)
GL.lineSmooth $= GL.Enabled
GL.pointSmooth $= GL.Enabled
GL.polygonSmooth $= GL.Enabled
GL.clear [GL.ColorBuffer]
GL.normalize $= GL.Enabled
GLUT.mainLoop
-- |This is very simple, since all of the drawing is done by Event
-- handlers. We set this function as the display callback and call it
-- after each event has been handled.
render :: IO ()
render = GLUT.swapBuffers
-- |This checks the inqueue returned by the Client module for Events,
-- blocking if it can't find one. We fork this off because if the
-- functionality here resided in the display callback it would block
-- the entire application from doing anything while it was waiting for
-- an event, including calling the display callback. Using this method
-- we handle Events as the come in and call the display callback to
-- swap the buffers when it's done.
checkEvents :: PQ.PQueue Events.Event -- ^ The queue to check on
-> IO b
checkEvents pqueue = forever $
do event <- atomically $
do e <- PQ.getThing pqueue
case e of
Nothing -> retry
Just event -> return event
putStrLn $ "Got event"
(Events.lookupHandlerClient event) event
GLUT.postRedisplay Nothing
-- |This is the keyboardMouse callback that gets used by OpenGL. We
-- first normalize the coordinates to a -1,1 range, then we push a
-- Click event to the incoming queue (not all events on the queue have
-- to come from the Server!) and also on the outgoing queue so the
-- server can handle it.
keyboardMouse :: PQ.PQueue Events.Event -- ^ The incoming events queue
-> PQ.PQueue Events.Event -- ^ The outgoing events queue
-> GLUT.Key
-> GLUT.KeyState
-> t
-> GLUT.Position
-> IO ()
keyboardMouse iqueue oqueue (GLUT.MouseButton GLUT.LeftButton) GLUT.Down _ (GLUT.Position x y) =
do (p1, p2) <- normalizeXY (fromIntegral x) (fromIntegral y)
putStrLn $ "click coords: " ++ show x ++ ", " ++ show y
putStrLn $ "normalized: " ++ show p1 ++ ", " ++ show p2
let event = Events.Click p1 p2
putStrLn $ "KeyboardMouse callback"
atomically $ PQ.writeThing oqueue (Events.lookupPriorityClient event) event
atomically $ PQ.writeThing iqueue (Events.lookupPriorityClient event) event
putStrLn $ "wrote things"
keyboardMouse _ _ _ _ _ _ = return ()
-- |This is a function to normalize coordinates that are given with
-- respect to window dimensions. OpenGL likes it's primitives to have
-- drawing coordinates ranging from -1 to 1 on the x or y axis.
normalizeXY :: (Fractional t, Fractional t1) =>
t -- ^ X coordinate
-> t1 -- ^ Y coordinate
-> IO (t, t1) -- ^ Normalized coordinates (x, y)
normalizeXY x y =
do (_, (GLUT.Size width height)) <- GLUT.get GLUT.viewport
let w = fromIntegral width
let h = fromIntegral height
return ((x-w/2.0)/(w/2.0), (y-h/2.0)/((-h)/2.0))
|
RocketPuppy/PupEvents
|
Pup-Events-Demo/EventsClient/Demo.hs
|
gpl-3.0
| 4,581
| 0
| 15
| 1,170
| 947
| 488
| 459
| 70
| 2
|
pattern $ \db ->
pseq (fmap pevent (query (fileNameMatch "*") db)) 1
|
kaoskorobase/mescaline
|
tests/patterns/sequence.hs
|
gpl-3.0
| 73
| 2
| 12
| 16
| 39
| 19
| 20
| -1
| -1
|
{-----------------------------------------------------------------
(c) 2008-2009 Markus Dittrich
This program is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public
License Version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License Version 3 for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free
Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
--------------------------------------------------------------------}
-- | functionality related to parsing tokens
module TokenParser ( module Control.Applicative
, module Text.ParserCombinators.Parsec
, braces
, brackets
, builtinFunctions
, colon
, comma
, commaSep
, charLiteral
, float
, identifier
, integer
, lineToken
, parens
, keywords
, lexer
, naturalOrFloat
, operator
, operators
, reservedOp
, reserved
, semi
, stringLiteral
, symbol
, whiteSpace
) where
-- imports
import Control.Applicative
import Control.Monad (ap, MonadPlus (..))
import Prelude
import Text.ParserCombinators.Parsec hiding (many,optional, (<|>))
import qualified Text.ParserCombinators.Parsec.Token as PT
import Text.ParserCombinators.Parsec.Language (haskellDef
, reservedOpNames
, reservedNames )
-- local imports
import ExtraFunctions
{- Definitions for Applicative Parsec instance -}
{--
-- | Applicative instance for Monad
instance Applicative (GenParser s a) where
pure = return
(<*>) = ap
-- |Alternative instance for MonadPlus
instance Alternative (GenParser s a) where
empty = mzero
(<|>) = mplus
--}
{- set up the Token Parser -}
-- | builtin functions of the form (Double -> Double)
builtinFunctions :: [(String, Double -> Double)]
builtinFunctions = [ ("sqrt",sqrt)
, ("exp",exp)
, ("log",log)
, ("log2", logBase 2)
, ("log10", logBase 10)
, ("sin",sin)
, ("cos",cos)
, ("tan",tan)
, ("asin", asin)
, ("acos", acos)
, ("atan", atan)
, ("sinh", sinh)
, ("cosh", cosh)
, ("tanh", tanh)
, ("asinh", sinh)
, ("acosh", cosh)
, ("atanh", atanh)
, ("erf", erf)
, ("erfc", erfc)
, ("abs", abs)
]
-- | all other keywords that are not regular functions
keywords :: [String]
keywords = [ "end", "events", "molecules", "output", "outputBuffer"
, "outputFile", "outputFreq", "parameters", "molecules", "reactions"
, "systemVol", "time", "variables"]
operators :: [String]
operators = ["+", "->", "::", "=", "{", "}", ">=", "==", "<="
, "<", ">", "*", "/", "-", "&&"]
-- | function generating a token parser based on a
-- lexical parser combined with a language record definition
lexer :: PT.TokenParser st
lexer = PT.makeTokenParser
( haskellDef { reservedOpNames = operators
, reservedNames = keywords
++ map fst builtinFunctions
} )
-- | parser for parser sandwitched between line
-- symbols '|'
lineToken :: CharParser st a -> CharParser st a
lineToken = between (symbol "|") (symbol "|")
-- | token parser for parenthesis
parens :: CharParser st a -> CharParser st a
parens = PT.parens lexer
-- | token parser for parenthesis
braces :: CharParser st a -> CharParser st a
braces = PT.braces lexer
-- | token parser for brackets
brackets :: CharParser st a -> CharParser st a
brackets = PT.brackets lexer
-- | token parser for Integer
integer :: CharParser st Integer
integer = PT.integer lexer
-- | token parser for Char
stringLiteral :: CharParser st String
stringLiteral = PT.stringLiteral lexer
-- | token parser for Char
charLiteral :: CharParser st Char
charLiteral = PT.charLiteral lexer
-- | token parser for Double
float :: CharParser st Double
float = PT.float lexer
-- | token parser for Either Integer Double
naturalOrFloat :: CharParser st (Either Integer Double)
naturalOrFloat = PT.naturalOrFloat lexer
-- | token parser for keywords
reservedOp :: String -> CharParser st ()
reservedOp = PT.reservedOp lexer
-- | token parser for keywords
reserved :: String -> CharParser st ()
reserved = PT.reserved lexer
-- | token parser for whitespace
whiteSpace :: CharParser st ()
whiteSpace = PT.whiteSpace lexer
-- | token parser for colon
colon:: CharParser st String
colon = PT.colon lexer
-- | token parser for semicolon
semi :: CharParser st String
semi = PT.semi lexer
-- | token parser for comma
comma :: CharParser st String
comma = PT.comma lexer
-- | token parser for comma separated list of items
commaSep :: CharParser st a -> CharParser st [a]
commaSep = PT.commaSep lexer
-- | token parser for symbol
symbol :: String -> CharParser st String
symbol = PT.symbol lexer
-- | token parser for symbol
identifier :: CharParser st String
identifier = PT.identifier lexer
-- | token parser for symbol
operator:: CharParser st String
operator = PT.operator lexer
|
haskelladdict/simgi
|
src/TokenParser.hs
|
gpl-3.0
| 6,070
| 0
| 10
| 1,943
| 1,026
| 606
| 420
| 105
| 1
|
{-
The Delve Programming Language
Copyright 2009 John Morrice
Distributed under the terms of the GNU General Public License v3, or ( at your option ) any later version.
This file is part of Delve.
Delve is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Delve is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Delve. If not, see <http://www.gnu.org/licenses/>.
-}
module LocalCore where
import Core
-- A simplified version of core where only simple expressions can be applied to functions and the scope used is well defined for local interactions
type LCore =
[ LStmt ]
type LExecCore =
[ LExecStmt ]
data LExecStmt =
LStmt LStmt
| LExecExpr LExecExpr
deriving ( Show , Eq )
data LStmt =
LSetHere Symbol LExecStmt
| LSetLocal Int [ Symbol ] LExecStmt
| LSetObject [ Symbol ] LExecStmt
| LStandalone LCoreExpr
| LBegin LExecCore
| LCoreSpecial Symbol String
deriving ( Show , Eq )
data LCoreExpr =
LApp LVar [ LVar ]
| LCoreMatch LVar LAlternatives
| LSimple LSimpleExpr
deriving ( Show , Eq )
data LVar =
LLocalVar Int [ Symbol ]
| LObjVar [ Symbol ]
deriving ( Show , Eq )
type LAlternatives =
[ LCoreAlternative ]
type LCoreAlternative =
( Symbol , LExecCore )
data LSimpleExpr =
LLit Lit
| LReserved Reserved
| LVar LVar
deriving ( Show , Eq )
data LExecExpr =
LFunction [ Symbol ] LExecCore
| LMethod [ Symbol ] LExecCore
deriving ( Show , Eq )
|
elginer/Delve
|
src/LocalCore.hs
|
gpl-3.0
| 1,955
| 0
| 7
| 497
| 269
| 159
| 110
| 40
| 0
|
module Util where
-- | Determine whether all elements of a list are unique.
allDistinct :: Eq a => [a] -> Bool
allDistinct [] = True
allDistinct (x:xs) = all (x/=) xs && allDistinct xs
-- | Determine whether a positive integer is a power of 2.
powerOf2 :: Int -> Bool
powerOf2 1 = True
powerOf2 n = (n `mod` 2 == 0) && (powerOf2 (n `div` 2) || n `div` 2 == 0)
-- | Compute the floor of base-2 log of a positive integer.
flg :: Int -> Int
flg = floor . logBase 2 . fromIntegral
|
zcesur/h99
|
src/Util.hs
|
gpl-3.0
| 480
| 0
| 12
| 103
| 163
| 90
| 73
| 9
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Spectrum.Paws.GetSpectrumBatch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- The Google Spectrum Database does not support batch requests, so this
-- method always yields an UNIMPLEMENTED error.
--
-- /See:/ <http://developers.google.com/spectrum Google Spectrum Database API Reference> for @spectrum.paws.getSpectrumBatch@.
module Network.Google.Resource.Spectrum.Paws.GetSpectrumBatch
(
-- * REST Resource
PawsGetSpectrumBatchResource
-- * Creating a Request
, pawsGetSpectrumBatch
, PawsGetSpectrumBatch
-- * Request Lenses
, pgsbPayload
) where
import Network.Google.Prelude
import Network.Google.Spectrum.Types
-- | A resource alias for @spectrum.paws.getSpectrumBatch@ method which the
-- 'PawsGetSpectrumBatch' request conforms to.
type PawsGetSpectrumBatchResource =
"spectrum" :>
"v1explorer" :>
"paws" :>
"getSpectrumBatch" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] PawsGetSpectrumBatchRequest :>
Post '[JSON] PawsGetSpectrumBatchResponse
-- | The Google Spectrum Database does not support batch requests, so this
-- method always yields an UNIMPLEMENTED error.
--
-- /See:/ 'pawsGetSpectrumBatch' smart constructor.
newtype PawsGetSpectrumBatch =
PawsGetSpectrumBatch'
{ _pgsbPayload :: PawsGetSpectrumBatchRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PawsGetSpectrumBatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pgsbPayload'
pawsGetSpectrumBatch
:: PawsGetSpectrumBatchRequest -- ^ 'pgsbPayload'
-> PawsGetSpectrumBatch
pawsGetSpectrumBatch pPgsbPayload_ =
PawsGetSpectrumBatch' {_pgsbPayload = pPgsbPayload_}
-- | Multipart request metadata.
pgsbPayload :: Lens' PawsGetSpectrumBatch PawsGetSpectrumBatchRequest
pgsbPayload
= lens _pgsbPayload (\ s a -> s{_pgsbPayload = a})
instance GoogleRequest PawsGetSpectrumBatch where
type Rs PawsGetSpectrumBatch =
PawsGetSpectrumBatchResponse
type Scopes PawsGetSpectrumBatch = '[]
requestClient PawsGetSpectrumBatch'{..}
= go (Just AltJSON) _pgsbPayload spectrumService
where go
= buildClient
(Proxy :: Proxy PawsGetSpectrumBatchResource)
mempty
|
brendanhay/gogol
|
gogol-spectrum/gen/Network/Google/Resource/Spectrum/Paws/GetSpectrumBatch.hs
|
mpl-2.0
| 3,107
| 0
| 13
| 647
| 309
| 190
| 119
| 50
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.DetachRolePolicy
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes the specified managed policy from the specified role.
--
-- A role can also have inline policies embedded with it. To delete an
-- inline policy, use the DeleteRolePolicy API. For information about
-- policies, refer to
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies and Inline Policies>
-- in the /IAM User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_DetachRolePolicy.html AWS API Reference> for DetachRolePolicy.
module Network.AWS.IAM.DetachRolePolicy
(
-- * Creating a Request
detachRolePolicy
, DetachRolePolicy
-- * Request Lenses
, drpRoleName
, drpPolicyARN
-- * Destructuring the Response
, detachRolePolicyResponse
, DetachRolePolicyResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'detachRolePolicy' smart constructor.
data DetachRolePolicy = DetachRolePolicy'
{ _drpRoleName :: !Text
, _drpPolicyARN :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DetachRolePolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drpRoleName'
--
-- * 'drpPolicyARN'
detachRolePolicy
:: Text -- ^ 'drpRoleName'
-> Text -- ^ 'drpPolicyARN'
-> DetachRolePolicy
detachRolePolicy pRoleName_ pPolicyARN_ =
DetachRolePolicy'
{ _drpRoleName = pRoleName_
, _drpPolicyARN = pPolicyARN_
}
-- | The name (friendly name, not ARN) of the role to detach the policy from.
drpRoleName :: Lens' DetachRolePolicy Text
drpRoleName = lens _drpRoleName (\ s a -> s{_drpRoleName = a});
-- | Undocumented member.
drpPolicyARN :: Lens' DetachRolePolicy Text
drpPolicyARN = lens _drpPolicyARN (\ s a -> s{_drpPolicyARN = a});
instance AWSRequest DetachRolePolicy where
type Rs DetachRolePolicy = DetachRolePolicyResponse
request = postQuery iAM
response = receiveNull DetachRolePolicyResponse'
instance ToHeaders DetachRolePolicy where
toHeaders = const mempty
instance ToPath DetachRolePolicy where
toPath = const "/"
instance ToQuery DetachRolePolicy where
toQuery DetachRolePolicy'{..}
= mconcat
["Action" =: ("DetachRolePolicy" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"RoleName" =: _drpRoleName,
"PolicyArn" =: _drpPolicyARN]
-- | /See:/ 'detachRolePolicyResponse' smart constructor.
data DetachRolePolicyResponse =
DetachRolePolicyResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DetachRolePolicyResponse' with the minimum fields required to make a request.
--
detachRolePolicyResponse
:: DetachRolePolicyResponse
detachRolePolicyResponse = DetachRolePolicyResponse'
|
olorin/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/DetachRolePolicy.hs
|
mpl-2.0
| 3,688
| 0
| 9
| 723
| 444
| 271
| 173
| 62
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DeploymentManager.Deployments.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a deployment and all of the resources described by the
-- deployment manifest.
--
-- /See:/ <https://cloud.google.com/deployment-manager Cloud Deployment Manager V2 API Reference> for @deploymentmanager.deployments.insert@.
module Network.Google.Resource.DeploymentManager.Deployments.Insert
(
-- * REST Resource
DeploymentsInsertResource
-- * Creating a Request
, deploymentsInsert
, DeploymentsInsert
-- * Request Lenses
, diCreatePolicy
, diXgafv
, diUploadProtocol
, diProject
, diAccessToken
, diUploadType
, diPayload
, diPreview
, diCallback
) where
import Network.Google.DeploymentManager.Types
import Network.Google.Prelude
-- | A resource alias for @deploymentmanager.deployments.insert@ method which the
-- 'DeploymentsInsert' request conforms to.
type DeploymentsInsertResource =
"deploymentmanager" :>
"v2" :>
"projects" :>
Capture "project" Text :>
"global" :>
"deployments" :>
QueryParam "createPolicy"
DeploymentsInsertCreatePolicy
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "preview" Bool :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Deployment :>
Post '[JSON] Operation
-- | Creates a deployment and all of the resources described by the
-- deployment manifest.
--
-- /See:/ 'deploymentsInsert' smart constructor.
data DeploymentsInsert =
DeploymentsInsert'
{ _diCreatePolicy :: !DeploymentsInsertCreatePolicy
, _diXgafv :: !(Maybe Xgafv)
, _diUploadProtocol :: !(Maybe Text)
, _diProject :: !Text
, _diAccessToken :: !(Maybe Text)
, _diUploadType :: !(Maybe Text)
, _diPayload :: !Deployment
, _diPreview :: !(Maybe Bool)
, _diCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DeploymentsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'diCreatePolicy'
--
-- * 'diXgafv'
--
-- * 'diUploadProtocol'
--
-- * 'diProject'
--
-- * 'diAccessToken'
--
-- * 'diUploadType'
--
-- * 'diPayload'
--
-- * 'diPreview'
--
-- * 'diCallback'
deploymentsInsert
:: Text -- ^ 'diProject'
-> Deployment -- ^ 'diPayload'
-> DeploymentsInsert
deploymentsInsert pDiProject_ pDiPayload_ =
DeploymentsInsert'
{ _diCreatePolicy = DICPCreateOrAcquire
, _diXgafv = Nothing
, _diUploadProtocol = Nothing
, _diProject = pDiProject_
, _diAccessToken = Nothing
, _diUploadType = Nothing
, _diPayload = pDiPayload_
, _diPreview = Nothing
, _diCallback = Nothing
}
-- | Sets the policy to use for creating new resources.
diCreatePolicy :: Lens' DeploymentsInsert DeploymentsInsertCreatePolicy
diCreatePolicy
= lens _diCreatePolicy
(\ s a -> s{_diCreatePolicy = a})
-- | V1 error format.
diXgafv :: Lens' DeploymentsInsert (Maybe Xgafv)
diXgafv = lens _diXgafv (\ s a -> s{_diXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
diUploadProtocol :: Lens' DeploymentsInsert (Maybe Text)
diUploadProtocol
= lens _diUploadProtocol
(\ s a -> s{_diUploadProtocol = a})
-- | The project ID for this request.
diProject :: Lens' DeploymentsInsert Text
diProject
= lens _diProject (\ s a -> s{_diProject = a})
-- | OAuth access token.
diAccessToken :: Lens' DeploymentsInsert (Maybe Text)
diAccessToken
= lens _diAccessToken
(\ s a -> s{_diAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
diUploadType :: Lens' DeploymentsInsert (Maybe Text)
diUploadType
= lens _diUploadType (\ s a -> s{_diUploadType = a})
-- | Multipart request metadata.
diPayload :: Lens' DeploymentsInsert Deployment
diPayload
= lens _diPayload (\ s a -> s{_diPayload = a})
-- | If set to true, creates a deployment and creates \"shell\" resources but
-- does not actually instantiate these resources. This allows you to
-- preview what your deployment looks like. After previewing a deployment,
-- you can deploy your resources by making a request with the \`update()\`
-- method or you can use the \`cancelPreview()\` method to cancel the
-- preview altogether. Note that the deployment will still exist after you
-- cancel the preview and you must separately delete this deployment if you
-- want to remove it.
diPreview :: Lens' DeploymentsInsert (Maybe Bool)
diPreview
= lens _diPreview (\ s a -> s{_diPreview = a})
-- | JSONP
diCallback :: Lens' DeploymentsInsert (Maybe Text)
diCallback
= lens _diCallback (\ s a -> s{_diCallback = a})
instance GoogleRequest DeploymentsInsert where
type Rs DeploymentsInsert = Operation
type Scopes DeploymentsInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/ndev.cloudman"]
requestClient DeploymentsInsert'{..}
= go _diProject (Just _diCreatePolicy) _diXgafv
_diUploadProtocol
_diAccessToken
_diUploadType
_diPreview
_diCallback
(Just AltJSON)
_diPayload
deploymentManagerService
where go
= buildClient
(Proxy :: Proxy DeploymentsInsertResource)
mempty
|
brendanhay/gogol
|
gogol-deploymentmanager/gen/Network/Google/Resource/DeploymentManager/Deployments/Insert.hs
|
mpl-2.0
| 6,542
| 0
| 22
| 1,642
| 956
| 557
| 399
| 140
| 1
|
{-|
Module : Main
Description : hnfs-tester - Nfs (client library) test tool
Copyright : (c) 2014 Arne Redlich <arne.redlich@googlemail.com>
License : LGPL v2.1
Maintainer : Arne Redlich <arne.redlich@googlemail.com>
Stability : experimental
Portability : POSIX
hnfs-tester main.
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import qualified Async
import qualified Sync
import Base
import qualified BasicTests as BT
import qualified AdvancedTests as AT
import qualified ConduitTests as CT
import qualified System.Nfs as Nfs
import Control.Monad.Trans.Reader
import Data.Proxy
import Data.Typeable (Typeable)
import Test.Tasty
import Test.Tasty.HUnit as HU
import Test.Tasty.Options
mk_test :: Nfs.ServerAddress ->
Nfs.ExportName ->
SyncNfs ->
(ReaderT TestContext IO (), String) ->
TestTree
mk_test addr xprt nfs (assertion, desc) =
HU.testCase desc (withContext $ \ctx ->
runReaderT assertion $ TestContext nfs ctx addr xprt)
sync_tests :: Nfs.ServerAddress -> Nfs.ExportName -> TestTree
sync_tests srv xprt = testGroup "Synchronous interface tests" $
fmap (mk_test srv xprt Sync.nfs) AT.tests
async_tests :: Nfs.ServerAddress -> Nfs.ExportName -> TestTree
async_tests srv xprt = testGroup "Asynchronous interface tests" $
fmap (mk_test srv xprt Async.nfs) AT.tests
conduit_tests :: Nfs.ServerAddress -> Nfs.ExportName -> TestTree
conduit_tests srv xprt = testGroup "Nfs conduit tests" $
fmap (mk_test srv xprt Sync.nfs) CT.tests
-- TODO: make this fail with a nicer error message if server / export are not
-- specified
newtype ServerAddressOpt = ServerAddressOpt Nfs.ServerAddress
deriving (Eq, Show, Ord, Typeable)
instance IsOption ServerAddressOpt where
parseValue s = Just $ ServerAddressOpt s
optionName = return "server"
optionHelp = return "NFS server to connect to"
newtype ExportNameOpt = ExportNameOpt Nfs.ExportName
deriving (Eq, Show, Ord, Typeable)
instance IsOption ExportNameOpt where
parseValue s = Just $ ExportNameOpt s
optionName = return "export"
optionHelp = return "NFS export to mount"
main :: IO ()
main = let ings = includingOptions [ Option (Proxy :: Proxy ServerAddressOpt)
, Option (Proxy :: Proxy ExportNameOpt)
] : defaultIngredients
in
defaultMainWithIngredients ings $
askOption $ \(ServerAddressOpt server) ->
askOption $ \(ExportNameOpt export) ->
testGroup "HNfs tests" [ BT.tests
, sync_tests server export
, async_tests server export
, conduit_tests server export ]
-- Local Variables: **
-- mode: haskell **
-- compile-command: "cd .. && cabal install -v" **
-- End: **
|
aredlich/hnfs
|
hnfs-tester/Main.hs
|
lgpl-2.1
| 2,937
| 0
| 14
| 764
| 614
| 332
| 282
| 55
| 1
|
module Garden where
type Gardener = String
data GardenNF =
Gardenia Gardener
| Daisy Gardener
| Rose Gardener
| Lilac Gardener
deriving (Show)
|
thewoolleyman/haskellbook
|
11/10/maor/garden.hs
|
unlicense
| 159
| 0
| 6
| 39
| 41
| 25
| 16
| 8
| 0
|
module Git.Command.UpdateIndex (run) where
import System.IO
import System.Exit
import System.Console.GetOpt
import Data.List
data Flag = Add | Remove
deriving (Eq, Ord, Show)
flags = [
Option [] ["add"] (NoArg Add) "Add files to the index",
Option [] ["remove"] (NoArg Remove) "Remove files from the index" ]
displayHelp errs = do
hPutStrLn stderr (concat errs ++ usageInfo header flags) >> exitWith (ExitFailure 1)
where header = "Usage: yag update-index [...]"
parse argv = case getOpt Permute flags argv of
(args, xs, []) -> do
return $ nub (concatMap (:[]) args)
-- Errors while parsing the arguments.
(_, _, errs) -> do displayHelp errs
run :: [String] -> IO ()
run args = do
args1 <- parse args
putStrLn $ show args1
if null args1
then exitWith (ExitFailure 1)
else exitWith ExitSuccess
|
wereHamster/yag
|
Git/Command/UpdateIndex.hs
|
unlicense
| 884
| 0
| 15
| 221
| 313
| 162
| 151
| 24
| 2
|
import qualified Data.Vector.Unboxed.Mutable as M
sample1 = do
v <- M.new 10
M.rite v 0 (4 :: Int)
x <- M.read v 0
print x
sample2 = do
v <- M.replicate 10 (3 :: Int)
x <- M.read v 0
print x
main = sample2
|
seckcoder/lang-learn
|
haskell/samples/src/Vector.hs
|
unlicense
| 225
| 0
| 9
| 64
| 116
| 57
| 59
| 11
| 1
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Spark.Core.JoinsSpec where
import Test.Hspec
import Spark.Core.Context
import Spark.Core.Dataset
import Spark.Core.Column
import qualified Spark.Core.ColumnFunctions as C
import Spark.Core.Row
import Spark.Core.Functions
import Spark.Core.SimpleAddSpec(run)
spec :: Spec
spec = do
describe "Path test" $ do
run "test_path1" $ do
let ds1 = dataset [1] :: Dataset Int
let x1 = C.sum (asCol ds1) @@ "x1"
let x2 = ((x1 + 1) @@ "x2") `logicalParents` [untyped ds1]
res <- exec1Def x2
res `shouldBe` 2
-- describe "Join test - join on ints" $ do
-- run "empty_ints1" $ do
-- let ds1 = dataset [(1,2)] :: Dataset (Int, Int)
-- let ds2 = dataset [(1,3)] :: Dataset (Int, Int)
-- let df1 = asDF ds1
-- let df2 = asDF ds2
-- let df = joinInner' (df1/-"_1") (df1/-"_2") (df2/-"_1") (df2/-"_2" @@ "_3")
-- res <- exec1Def' (collect' (asCol' df))
-- res `shouldBe` rowArray [rowArray [IntElement 1, IntElement 2, IntElement 3]]
|
tjhunter/karps
|
haskell/test-integration/Spark/Core/JoinsSpec.hs
|
apache-2.0
| 1,084
| 0
| 20
| 243
| 202
| 116
| 86
| 20
| 1
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE Rank2Types, ExistentialQuantification #-}
-- * Type representation, equality and the safe typecast:
-- * the above-the-board version of Data.Typeable
module Typ where
import Control.Monad
import Control.Monad.Error
-- * The language of type representations: first-order and typed
-- It is quite like the language of int/neg/add we have seen before,
-- but it is now typed.
-- It is first order: the language of simple types is first order
class TSYM trepr where
tint :: trepr Int
tarr :: trepr a -> trepr b -> trepr (a->b)
-- * The view interpreter
-- The first interpreter is to view the types
newtype ShowT a = ShowT String
instance TSYM ShowT where
tint = ShowT $ "Int"
tarr (ShowT a) (ShowT b) = ShowT $ "(" ++ a ++ " -> " ++ b ++ ")"
view_t :: ShowT a -> String
view_t (ShowT s) = s
-- * //
-- * Quantifying over the TSYM interpreter
-- This closes the type universe
newtype TQ t = TQ{unTQ :: forall trepr. TSYM trepr => trepr t}
-- TQ is itself an interpreter, the trivial one
instance TSYM TQ where
tint = TQ tint
tarr (TQ a) (TQ b) = TQ (tarr a b)
-- Sample type expressions
tt1 = (tint `tarr` tint) `tarr` tint
-- tt1 :: (TSYM trepr) => trepr ((Int -> Int) -> Int)
tt2 = tint `tarr` (tint `tarr` tint)
-- tt2 :: (TSYM trepr) => trepr (Int -> Int -> Int)
tt1_view = view_t (unTQ tt1)
-- "((Int -> Int) -> Int)"
tt2_view = view_t (unTQ tt2)
-- "(Int -> (Int -> Int))"
-- * //
-- * Show Typ-able expressions
-- * No Show type class constraint!
-- The signature is quite like gshow in a generic programming
-- library such as EMGM or LIGD
show_as :: TQ a -> a -> String
show_as tr a =
case unTQ tr of ShowAs _ f -> f a
-- The implementation of the interpreter ShowAs shows off
-- the technique of accumulating new TQ as we traverse the old
-- one. We shall see more examples later.
-- One is again reminded of attribute grammars.
data ShowAs a = ShowAs (TQ a) (a -> String)
instance TSYM ShowAs where
tint = ShowAs tint show -- as Int
tarr (ShowAs t1 _) (ShowAs t2 _) =
let t = tarr t1 t2 in
ShowAs t (\_ -> "<function of the type " ++
view_t (unTQ t) ++ ">")
tt0_show = show_as tint 5
-- "5"
tt1_show = show_as tt1 undefined
-- "<function of the type ((Int -> Int) -> Int)>"
-- We can't show functional values, but at least we should be
-- able to show their types
-- * //
-- * Type representation
-- * Compare with Dyta.Typeable.TypeRep
-- It is not a data structure!
data Typ = forall t. Typ (TQ t)
-- * //
-- * Alternative to quantification: copying
-- Before instantiating one interpreter, we fork it.
-- One copy can be instantiated, but the other remains polymorphic
-- Compare with Prolog's copy_term
-- This approach keeps the type universe extensible
-- Again the same pattern: traverse one TQ and build another
-- (another two actually)
data TCOPY trep1 trep2 a = TCOPY (trep1 a) (trep2 a)
instance (TSYM trep1, TSYM trep2)
=> TSYM (TCOPY trep1 trep2)
where
tint = TCOPY tint tint
tarr (TCOPY a1 a2) (TCOPY b1 b2) =
TCOPY (tarr a1 b1) (tarr a2 b2)
-- * //
-- * Equality and safe type cast
-- * c is NOT necessarily a functor or injective!
-- For example, repr is not always a functor
-- I wonder if we can generalize to an arbitrary type function
-- represented by its label lab:
-- newtype EQU a b = EQU (forall lab. (Apply lab a -> Apply lab b)
-- That would let us _easily_ show, for example, that
-- EQU (a,b) (c,d) implies EQU a c, for all types a, b, c, d.
newtype EQU a b = EQU{equ_cast:: forall c. c a -> c b}
-- * Leibniz equality is reflexive, symmetric and transitive
-- Here is the constructive proof
refl :: EQU a a
refl = EQU id
-- * An Unusual `functor'
tran :: EQU a u -> EQU u b -> EQU a b
tran au ub = equ_cast ub au
-- Why it works? We consider (EQU a u) as (EQU a) u,
-- and so instantiate c to be EQU a
newtype FS b a = FS{unFS:: EQU a b}
symm :: EQU a b -> EQU b a
symm equ = unFS . equ_cast equ . FS $ refl
-- Useful type-level lambdas, so to speak
newtype F1 t b a = F1{unF1:: EQU t (a->b)}
newtype F2 t a b = F2{unF2:: EQU t (a->b)}
eq_arr :: EQU a1 a2 -> EQU b1 b2 -> EQU (a1->b1) (a2->b2)
eq_arr a1a2 b1b2 =
unF2 . equ_cast b1b2 . F2 . unF1 . equ_cast a1a2 . F1 $ refl
-- How does this work? what is the type of refl above?
-- * //
-- * Decide if (trepr a) represents a type that is equal to some type b
-- Informally, we compare a value that _represents_ a type b
-- against the _type_ b
-- We do that by interpreting trepr a in a particular way
-- * A constructive `deconstructor'
data AsInt a = AsInt (Maybe (EQU a Int))
instance TSYM AsInt where
tint = AsInt $ Just refl
tarr _ _ = AsInt $ Nothing
-- This function proves useful later
as_int :: AsInt a -> c a -> Maybe (c Int)
as_int (AsInt (Just equ)) r = Just $ equ_cast equ r
as_int _ _ = Nothing
-- * Another constructive `deconstructor'
data AsArrow a =
forall b1 b2. AsArrow (TQ a) (Maybe (TQ b1, TQ b2, EQU a (b1->b2)))
instance TSYM AsArrow where
tint = AsArrow tint Nothing
tarr (AsArrow t1 _) (AsArrow t2 _) =
AsArrow (tarr t1 t2) $ Just (t1,t2,refl)
as_arrow :: AsArrow a -> AsArrow a
as_arrow = id
-- More cases could be added later on...
newtype SafeCast a = SafeCast (forall b. TQ b -> Maybe (EQU a b))
instance TSYM SafeCast where
tint = SafeCast $ \tb ->
case unTQ tb of AsInt eq -> fmap symm eq
tarr (SafeCast t1) (SafeCast t2) =
SafeCast $ \tb -> do
AsArrow _ (Just (b1,b2,equ_bb1b2)) <-
return $ as_arrow (unTQ tb)
equ_t1b1 <- t1 b1
equ_t2b2 <- t2 b2
return $ tran (eq_arr equ_t1b1 equ_t2b2) (symm equ_bb1b2)
-- * Cf. Data.Typeable.gcast
-- Data.Typeable.gcast :: (Data.Typeable.Typeable b, Data.Typeable.Typeable a) =>
-- c a -> Maybe (c b)
-- We use our own `Typeable', implemented without
-- invoking GHC internals
safe_gcast :: TQ a -> c a -> TQ b -> Maybe (c b)
safe_gcast (TQ ta) ca tb = cast ta
where cast (SafeCast f) =
maybe Nothing (\equ -> Just (equ_cast equ ca)) $ f tb
-- There is a tantalizing opportunity of making SafeCast extensible
-- * //
-- * Our own version of Data.Dynamic
-- We replace Data.Typeable with TQ a
data Dynamic = forall t. Dynamic (TQ t) t
tdn1 = Dynamic tint 5
tdn2 = Dynamic tt1 ($ 1)
tdn3 = Dynamic (tint `tarr` (tint `tarr` tint)) (*)
tdn_show (Dynamic tr a) = show_as tr a
newtype Id a = Id a
tdn_eval1 (Dynamic tr d) x = do
Id f <- safe_gcast tr (Id d) tt1
return . show $ f x
tdn_eval2 (Dynamic tr d) x y = do
Id f <- safe_gcast tr (Id d) tt2
return . show $ f x y
tdn1_show = tdn_show tdn1
-- "5"
tdn2_show = tdn_show tdn2
-- "<function of the type ((Int -> Int) -> Int)>"
tdn3_show = tdn_show tdn3
-- "<function of the type (Int -> (Int -> Int))>"
tdn1_eval = tdn_eval1 tdn1 (+4)
-- Nothing
tdn2_eval = tdn_eval1 tdn2 (+4)
-- Just "5"
tdn2_eval' = tdn_eval2 tdn2 3 14
-- Nothing
tdn3_eval = tdn_eval2 tdn3 3 14
-- Just "42"
main = do
print tt1_view
print tt2_view
print tt0_show
print tt1_show
print tdn1_show
print tdn2_show
print tdn3_show
print tdn1_eval
print tdn2_eval
print tdn2_eval'
print tdn3_eval
|
egaburov/funstuff
|
Haskell/tytag/codes3/Typ.hs
|
apache-2.0
| 7,320
| 16
| 15
| 1,743
| 1,951
| 1,035
| 916
| 116
| 1
|
import Control.Monad
movies = [("name", Just "Atilla \"The Hun\""), ("occupation", Just "Khan")]
data MovieReview = MovieReview {
revTitle :: String
, revUser :: String
, revReview :: String
} deriving Show
-- Create movie review only if all 3 values (title, user & review) are present
-- simpleReview [("title", Just "Don"), ("review", Just "Don Review"), ("user", Just "me")]
simpleReview :: [(String, Maybe String)] -> Maybe MovieReview
simpleReview aList =
case lookup "title" aList of
Just (Just title@(_:_)) ->
case lookup "review" aList of
Just (Just review@(_:_)) ->
case lookup "user" aList of
Just (Just user@(_:_)) ->
Just (MovieReview title user review)
_ -> Nothing
_ -> Nothing -- no review
_ -> Nothing -- no title
maybeReview aList = do
title <- lookup1 "title" aList
review <- lookup1 "review" aList
user <- lookup1 "user" aList
return $ MovieReview title user review
lookup1 :: String -> [(String, Maybe String)]-> Maybe String
lookup1 key list = case lookup key list of
Just(Just value@(_:_)) -> Just value
_ -> Nothing
liftedReview aList = liftM3 MovieReview (lookup1 "title" aList)
(lookup1 "user" aList)
(lookup1 "review" aList)
-- Here only MovieReview have 3 arguments
-- In case we have n number of arguments, then liftM is not available
-- Here we can user Control.Monad.ap i.e. generialized lifting
apReview aList = MovieReview `liftM` lookup1 "title" aList
`ap` lookup1 "user" aList
`ap` lookup1 "review" aList
|
dongarerahul/lyah
|
chapter11-functorApplicativeMonad/MovieReview.hs
|
apache-2.0
| 1,748
| 3
| 16
| 536
| 458
| 237
| 221
| 34
| 4
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Simulate where
import Projects
import Data.Random
import Data.Random.Distribution.Triangular
import Control.Monad
data Report = Report [ProjectCompletion] deriving (Show)
data ProjectCompletion = ProjectCompletion
{
project :: Project
, completionTimes :: [Double]
} deriving (Show)
sampleSize :: Int
sampleSize = 100000
simulate :: [Project] -> Report
simulate = undefined
estimate :: MonadRandom m => Project -> m [Double]
estimate p = replicateM sampleSize (sample $ pdf p)
pdf :: Project -> RVar Double
pdf p = floatingTriangular
(bestCaseEstimate p)
(mostLikelyEstimate p)
(worstCaseEstimate p)
normalPair :: RVar (Double,Double)
normalPair = do
u <- stdUniform
t <- stdUniform
let r = sqrt (-2 * log u)
theta = (2 * pi) * t
x = r * cos theta
y = r * sin theta
return (x,y)
|
fffej/haskellprojects
|
project-simulator/src/Simulate.hs
|
bsd-2-clause
| 1,026
| 0
| 13
| 328
| 301
| 162
| 139
| 31
| 1
|
module Main where
import Data.Array.Repa (Array, DIM2, computeP)
import Data.Array.Repa.Repr.ForeignPtr (F)
import System.Directory (getTemporaryDirectory)
import System.Exit (exitFailure, exitSuccess)
import System.FilePath ((</>))
import Data.Array.Repa.IO.Sndfile (readSF, writeSF)
import Data.Array.Repa.IO.Sndfile.Examples (genSine, waveMonoPcm16)
main :: IO ()
main = do
tmpdir <- getTemporaryDirectory
let outFile = tmpdir </> "sin440.wav"
dur, frq, sr :: Num a => a
dur = 3
frq = 440
sr = 48000
fmt = waveMonoPcm16 (dur * sr)
arr <- computeP (genSine dur frq) :: IO (Array F DIM2 Double)
writeSF outFile fmt arr
(fmt', _arr') <- asTypeOf (fmt, arr) `fmap` readSF outFile
if fmt' == fmt then exitSuccess else exitFailure
|
8c6794b6/repa-sndfile
|
exec/tests.hs
|
bsd-3-clause
| 812
| 0
| 12
| 185
| 273
| 157
| 116
| 21
| 2
|
-- Compiler Toolkit: miscellaneous utility routines
--
-- Author : Manuel M. T. Chakravarty
-- Created: 8 February 95
--
-- Version $Revision: 1.12 $ from $Date: 2000/02/28 06:28:59 $
--
-- Copyright (c) [1995..2000], Manuel M. T. Chakravarty
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Library General Public
-- License as published by the Free Software Foundation; either
-- version 2 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Library General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides miscellaneous utility routines used in different parts
-- of the Compiler Tookit.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
module Text.CTK.Utils (sort, sortBy, lookupBy, indentMultilineString, quantifySubject,
ordinal, Tag(..), mapMaybeM, mapMaybeM_, mapEitherM, mapEitherM_)
where
import Data.List (find)
-- list operations
-- ---------------
-- naive sort for a list of which the elements are in the Ord class (EXPORTED)
--
sort :: Ord a => [a] -> [a]
sort [] = []
sort (m:l) = (sort . filter (< m)) l ++ [m] ++ (sort . filter (>= m)) l
-- naive sort for a list with explicit ordering relation (smaller than)
-- (EXPORTED)
--
sortBy :: (a -> a -> Bool) -> [a] -> [a]
sortBy _ [] = []
sortBy smaller (m:l) = (sortBy smaller . filter (`smaller` m)) l
++ [m]
++ (sortBy smaller . filter (not . (`smaller` m))) l
-- generic lookup
--
lookupBy :: (a -> a -> Bool) -> a -> [(a, b)] -> Maybe b
lookupBy eq x = fmap snd . find (eq x . fst)
-- string operations
-- -----------------
-- string manipulation
--
-- indent the given multiline text by the given number of spaces
--
indentMultilineString :: Int -> String -> String
indentMultilineString n = unlines . (map (spaces++)) . lines
where
spaces = take n (repeat ' ')
-- aux. routines for output
--
-- given a number and a string containing the quantified subject, yields two
-- strings; one contains the quantified subject and the other contains ``is''
-- or ``are'' depending on the quantification (EXPORTED)
--
quantifySubject :: Int -> String -> (String, String)
quantifySubject no subj = (noToStr no ++ " " ++ subj
++ (if plural then "s" else ""),
if plural then "are" else "is")
where
plural = (no /= 1)
noToStr 0 = "no"
noToStr 1 = "one"
noToStr 2 = "two"
noToStr 3 = "three"
noToStr 4 = "four"
noToStr 5 = "five"
noToStr 6 = "six"
noToStr 7 = "seven"
noToStr 8 = "eight"
noToStr 9 = "nine"
noToStr 10 = "ten"
noToStr 11 = "eleven"
noToStr 12 = "twelve"
noToStr no = show no
-- stringfy a ordinal number (must be positive) (EXPORTED)
--
ordinal :: Int -> String
ordinal n = if n < 0
then
error "FATAL ERROR: Utilis: ordinal: Negative number!"
else
case (n `mod` 10) of
1 | n /= 11 -> show n ++ "st"
2 | n /= 12 -> show n ++ "nd"
3 | n /= 13 -> show n ++ "rd"
_ -> show n ++ "th"
-- tags
-- ----
-- tag values of a type are meant to define a mapping that collapses values
-- onto a single integer that are meant to be identified in comparisons etc
--
class Tag a where
tag :: a -> Int
-- monad operations
-- ----------------
-- maps some monad operation into a `Maybe', yielding a monad
-- providing the mapped `Maybe' as its result (EXPORTED)
--
mapMaybeM :: Monad m
=> (a -> m b) -> Maybe a -> m (Maybe b)
mapMaybeM m Nothing = return Nothing
mapMaybeM m (Just a) = m a >>= \r -> return (Just r)
-- like above, but ignoring the result (EXPORTED)
--
mapMaybeM_ :: Monad m => (a -> m b) -> Maybe a -> m ()
mapMaybeM_ m x = mapMaybeM m x >> return ()
-- maps monad operations into a `Either', yielding a monad
-- providing the mapped `Either' as its result (EXPORTED)
--
mapEitherM :: Monad m
=> (a -> m c)
-> (b -> m d)
-> Either a b
-> m (Either c d)
mapEitherM m n (Left x) = m x >>= \r -> return (Left r)
mapEitherM m n (Right y) = n y >>= \r -> return (Right r)
-- like above, but ignoring the result (EXPORTED)
--
mapEitherM_ :: Monad m => (a -> m c) -> (b -> m d) -> Either a b -> m ()
mapEitherM_ m n x = mapEitherM m n x >> return ()
|
mwotton/ctkl
|
src/Text/CTK/Utils.hs
|
bsd-3-clause
| 5,118
| 0
| 12
| 1,519
| 1,187
| 643
| 544
| 60
| 16
|
--------------------------------------------------------------------------------
module Copilot.Kind
( module Copilot.Kind.Prover
, module Copilot.Kind.Prove
, module Copilot.Kind.ProofScheme
--, module Copilot.Kind.Lib
) where
import Copilot.Kind.ProofScheme
import Copilot.Kind.Prove
import Copilot.Kind.Prover
-- import Copilot.Kind.Lib
--------------------------------------------------------------------------------
|
jonathan-laurent/copilot-kind
|
src/Copilot/Kind.hs
|
bsd-3-clause
| 439
| 0
| 5
| 45
| 51
| 36
| 15
| 7
| 0
|
module Config (
currentBranch,
getGitBranch
) where
import Control.Monad
import Data.List
import qualified Data.Text as T
import GHC.IO.Exception
import System.Directory
import System.IO.Unsafe
import System.Process
import SubMods
import Tools
currentBranch::String
currentBranch = unsafePerformIO $ do
before <- getCurrentDirectory
rootAndBranches <-
forM (map modDir subMods) $ \modRoot -> do
branch <- inDir modRoot $ getGitBranch
return (modRoot, branch)
let branches = map snd rootAndBranches
case nub branches of
[branch] -> return branch
_ -> error $ "Your submodules are in different branches, mgit can not work properly in this configuration" -- ++ unlines (map show rootAndBranches)
getGitBranch::IO String
getGitBranch = do
(exitCode, path, stderr) <-
readProcessWithExitCode "git" ["symbolic-ref", "--short", "-q", "HEAD"] ""
case exitCode of
ExitFailure _ -> return $ "HEAD detached"
_ -> return $ T.unpack $ T.strip $ T.pack path
getGitTag::IO (Maybe String)
getGitTag = do
(exitCode, tag, stderr) <-
readProcessWithExitCode "git" ["describe", "--tags"] ""
case exitCode of
ExitFailure _ -> return Nothing
_ -> return $ Just $ T.unpack $ T.strip $ T.pack tag
|
blockapps/mgit
|
src/Config.hs
|
bsd-3-clause
| 1,344
| 0
| 15
| 336
| 372
| 194
| 178
| 37
| 2
|
{-# LANGUAGE GADTs, StandaloneDeriving #-}
module LTS where
import MTBDD ( MTBDD, values, (||), unitWithVars )
import Control.DeepSeq ( NFData(..) )
import qualified Data.Set as S ( union, empty, singleton )
import Data.Set ( Set )
import qualified Data.Map.Strict as M ( keysSet, foldr, insertWith )
import Data.Map.Strict ( Map )
import MapSetUtils ( bigUnion )
type LTSState = Int
-- A 'LTSTransition' is a Map from States @s@, to (set-valued) MTBDDs over some
-- label component type, @l@.
type LTSTransitionsBDD s l = Map s (TransitionBDD s l)
type TransitionBDD s l = MTBDD l (Set s)
data LTS s l where
LTS :: (Ord s, Ord l) => LTSTransitionsBDD s l -> LTS s l
deriving instance (Eq s, Eq l) => Eq (LTS s l)
deriving instance (Show s, Show l) => Show (LTS s l)
instance (NFData s) => NFData (LTS s l) where
rnf (LTS m) = rnf m
-- | Given an LTS, extract the set of states. N.B. a state is only considered
-- to be part of an LTS if that state appears as a source or target of some
-- transition of the LTS.
statesLTS :: LTS s l -> Set s
statesLTS (LTS trans) = M.keysSet trans `S.union`
M.foldr (\bdd -> S.union (bigUnion $ MTBDD.values bdd)) S.empty trans
addTransition :: LTS s l -> s -> [(l, Bool)] -> s -> LTS s l
addTransition (LTS transMap) src lbls tgt = LTS transMap'
where
transMap' = M.insertWith (MTBDD.||) src bdd transMap
bdd = MTBDD.unitWithVars S.empty lbls (S.singleton tgt)
|
owst/Penrose
|
src/LTS.hs
|
bsd-3-clause
| 1,428
| 0
| 13
| 289
| 490
| 274
| 216
| 25
| 1
|
module Data.SimPOL.PersonalData where
import Semantics.POL.HumanReadable
import Text.Printf
data PersonalData = PersonalData
{ attribute :: String
, value :: String
} deriving (Eq, Ord)
instance Show PersonalData where
show a = printf "%s: %s" (attribute a) (value a)
instance PrettyPrintable PersonalData where
pretty = text . show
-- vim: ft=haskell:sts=2:sw=2:et:nu:ai
|
ZjMNZHgG5jMXw/privacy-option-simpol
|
Data/SimPOL/PersonalData.hs
|
bsd-3-clause
| 386
| 0
| 8
| 63
| 103
| 58
| 45
| 11
| 0
|
-- | This module provides functions for processing 'Text' values.
module CommonMark.Util.Text
(
-- * Tab and NUL replacement
replaceNullChars
, detab
-- * Whitespace
, stripAsciiSpaces
, stripAsciiSpacesAndNewlines
, collapseWhitespace
-- * ATX headers
, stripATXSuffix
) where
import Data.Text ( Text )
import qualified Data.Text as T
import Data.CharSet ( CharSet )
import qualified Data.CharSet as CS
import qualified Data.CharSet.Unicode.Category as CS ( punctuation, space )
import CommonMark.Util.Char ( isWhitespace, replacementChar )
-- | Replace null characters (U+0000) with the replacement character (U+FFFD).
replaceNullChars :: Text -> Text
replaceNullChars = T.map replaceNUL
where
replaceNUL c
| c == '\NUL' = replacementChar
| otherwise = c
-- | Converts tabs to spaces using a 4-space tab stop. Intended to operate on
-- a single line of input.
detab :: Text -> Text
detab = T.concat . pad . T.split (== '\t')
where
pad [] = []
pad [t] = [t]
pad (t : ts@(_ : _)) = T.justifyLeft n ' ' t : pad ts
where
tl = T.length t
n = tl - (tl `rem` tabw) + tabw
tabw = 4
-- | Remove leading and trailing ASCII spaces from a string.
stripAsciiSpaces :: Text -> Text
stripAsciiSpaces = T.dropAround (== ' ')
-- | Remove leading and trailing ASCII spaces and newlines from a string.
stripAsciiSpacesAndNewlines :: Text -> Text
stripAsciiSpacesAndNewlines = T.dropAround (\c -> c == ' ' || c == '\n')
-- | Collapse each whitespace span to a single ASCII space.
collapseWhitespace :: Text -> Text
collapseWhitespace = T.intercalate (T.singleton ' ') . codeSpanWords
-- | Breaks a 'Text' up into a list of words, delimited by 'Char's
-- representing whitespace (as defined by the CommonMark spec).
-- Similar but different from 'Data.Text.words'.
codeSpanWords :: Text -> [Text]
codeSpanWords = go
where
go t | T.null word = []
| otherwise = word : go rest
where (word, rest) = T.break isWhitespace $
T.dropWhile isWhitespace t
{-# INLINE codeSpanWords #-}
-- | @stripATXSuffix t@ strips @t@ from its ATX-header suffix (if any).
stripATXSuffix :: Text -> Text
stripATXSuffix t
| T.null t' = t
| (' ' /=) . T.last $ t' = t
| otherwise = T.init t'
where
t' = T.dropWhileEnd (== '#') .
T.dropWhileEnd (== ' ') $ t
|
Jubobs/CommonMark-WIP
|
src/CommonMark/Util/Text.hs
|
bsd-3-clause
| 2,496
| 0
| 12
| 664
| 576
| 319
| 257
| 47
| 3
|
module Text.StrToHex
( strToHexUtf8
, strToHexUtf16LE
, strToHexUtf16BE
, strToHexUtf32LE
, strToHexUtf32BE
) where
import qualified Data.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as LB
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LTE
strToHex :: (LT.Text -> LB.ByteString) -> Text -> LB.ByteString
strToHex encode = BB.toLazyByteString . LB.foldr (\w8 b -> BB.word8HexFixed w8 <> b) mempty . encode . LT.fromStrict
strToHexUtf8 :: Text -> LB.ByteString
strToHexUtf8 = strToHex LTE.encodeUtf8
strToHexUtf16LE :: Text -> LB.ByteString
strToHexUtf16LE = strToHex LTE.encodeUtf16LE
strToHexUtf16BE :: Text -> LB.ByteString
strToHexUtf16BE = strToHex LTE.encodeUtf16BE
strToHexUtf32LE :: Text -> LB.ByteString
strToHexUtf32LE = strToHex LTE.encodeUtf32LE
strToHexUtf32BE :: Text -> LB.ByteString
strToHexUtf32BE = strToHex LTE.encodeUtf32BE
|
siphilia/str2hex
|
src/Text/StrToHex.hs
|
bsd-3-clause
| 1,016
| 0
| 13
| 189
| 256
| 147
| 109
| 24
| 1
|
-- | Generic equality.
--
-- This module contains a generic equality function defined using
-- @generics-sop@.
--
module Generics.SOP.Eq (geq) where
import Data.Function
import Generics.SOP
-- | Generic equality.
--
-- This function reimplements the built-in generic equality that
-- you get by using @deriving Eq@.
--
-- Assuming you have a 'Generics.SOP.Generic' instance for a
-- datatype @T@, you can use 'geq' as follows:
--
-- > instance Eq T where
-- > (==) = geq
--
geq :: (Generic a, All2 Eq (Code a)) => a -> a -> Bool
geq = go `on` from
where
go :: forall xss. (All2 Eq xss, All SListI xss) => SOP I xss -> SOP I xss -> Bool
go (SOP (Z xs)) (SOP (Z ys)) = and . hcollapse $ hcliftA2 p eq xs ys
go (SOP (S xss)) (SOP (S yss)) = go (SOP xss) (SOP yss)
go _ _ = False
p :: Proxy Eq
p = Proxy
eq :: forall (a :: *). Eq a => I a -> I a -> K Bool a
eq (I a) (I b) = K (a == b)
|
well-typed/basic-sop
|
src/Generics/SOP/Eq.hs
|
bsd-3-clause
| 948
| 0
| 11
| 255
| 333
| 180
| 153
| -1
| -1
|
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Queries.CaseSplit
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- A couple of demonstrations for the caseSplit tactic.
-----------------------------------------------------------------------------
module Data.SBV.Examples.Queries.CaseSplit where
import Data.SBV
import Data.SBV.Control
-- | A simple floating-point problem, but we do the sat-analysis via a case-split.
-- Due to the nature of floating-point numbers, a case-split on the characteristics
-- of the number (such as NaN, negative-zero, etc. is most suitable.)
--
-- We have:
-- >>> csDemo1
-- Case fpIsNegativeZero: Starting
-- Case fpIsNegativeZero: Unsatisfiable
-- Case fpIsPositiveZero: Starting
-- Case fpIsPositiveZero: Unsatisfiable
-- Case fpIsNormal: Starting
-- Case fpIsNormal: Unsatisfiable
-- Case fpIsSubnormal: Starting
-- Case fpIsSubnormal: Unsatisfiable
-- Case fpIsPoint: Starting
-- Case fpIsPoint: Unsatisfiable
-- Case fpIsNaN: Starting
-- Case fpIsNaN: Satisfiable
-- ("fpIsNaN",NaN)
csDemo1 :: IO (String, Float)
csDemo1 = runSMT $ do
x <- sFloat "x"
constrain $ x ./= x -- yes, in the FP land, this does hold
query $ do mbR <- caseSplit True [ ("fpIsNegativeZero", fpIsNegativeZero x)
, ("fpIsPositiveZero", fpIsPositiveZero x)
, ("fpIsNormal", fpIsNormal x)
, ("fpIsSubnormal", fpIsSubnormal x)
, ("fpIsPoint", fpIsPoint x)
, ("fpIsNaN", fpIsNaN x)
]
case mbR of
Nothing -> error "Cannot find a FP number x such that x == x + 1" -- Won't happen!
Just (s, _) -> do xv <- getValue x
return (s, xv)
-- | Demonstrates the "coverage" case.
--
-- We have:
-- >>> csDemo2
-- Case negative: Starting
-- Case negative: Unsatisfiable
-- Case less than 8: Starting
-- Case less than 8: Unsatisfiable
-- Case Coverage: Starting
-- Case Coverage: Satisfiable
-- ("Coverage",10)
csDemo2 :: IO (String, Integer)
csDemo2 = runSMT $ do
x <- sInteger "x"
constrain $ x .== 10
query $ do mbR <- caseSplit True [ ("negative" , x .< 0)
, ("less than 8", x .< 8)
]
case mbR of
Nothing -> error "Cannot find a solution!" -- Won't happen!
Just (s, _) -> do xv <- getValue x
return (s, xv)
|
josefs/sbv
|
Data/SBV/Examples/Queries/CaseSplit.hs
|
bsd-3-clause
| 2,882
| 0
| 17
| 987
| 393
| 225
| 168
| 27
| 2
|
import WASH.CGI.CGI
import Control.Monad
main =
run page1
page1 =
do at <- table_io $ return [["Ratio"], ["Complex"], ["Numeric"], ["Ix"], ["Array"], ["List"], ["Maybe"], ["Char"], ["Monad"], ["IO"], ["Directory"], ["System"], ["Time"], ["Locale"], ["CPUTime"], ["Random"]]
standardQuery "Haskell Library Modules" $
do sg <- selectionGroup
let makeRow row = tr (mapM (makeCol row) [0..(as_cols at -1)])
makeCol row col = td (getText at row col)
makeRow' row = selectionDisplay sg at row dispRow
dispRow button texts = tr (do td button
sequence (Prelude.map td texts))
p $ table (mapM makeRow' [0..(as_rows at -1)])
submit sg page2 empty
page2 sg =
let lib = unAR (value sg) in
standardQuery "Selected Haskell Library Module" $
do text "You selected "
text (show lib)
page2a lib =
standardQuery "Directly Selected Haskell Library Module" $
do text "You selected "
text (show lib)
|
nh2/WashNGo
|
Examples/old/UseAT.hs
|
bsd-3-clause
| 971
| 9
| 14
| 227
| 400
| 208
| 192
| 24
| 1
|
module Main where
import JSON
import System.Process
import System.Exit
import System.Directory
import System.FilePath
import Data.Maybe (fromMaybe)
import System.IO
import Data.Monoid (mempty)
import System.IO.Temp (withSystemTempDirectory)
import System.Process (rawSystem)
import Text.Printf
import qualified Data.Aeson as Aeson
import Text.Pandoc.Definition
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.ByteString.Char8 as B
import qualified Text.Pandoc.UTF8 as UTF8
import Text.Pandoc.Shared (normalize)
import Text.Pandoc.Process (pipeProcess)
import Text.Pandoc.Options (WriterOptions(..))
import qualified Data.Yaml as Yaml
import Text.Pandoc (writeNative, writeHtmlString, readNative, def)
import Text.CSL.Pandoc (processCites')
import Data.List (isSuffixOf)
import System.Environment
import Control.Monad (when)
main :: IO ()
main = do
args <- getArgs
let regenerate = args == ["--regenerate"]
testnames <- fmap (map (dropExtension . takeBaseName) .
filter (".in.native" `isSuffixOf`)) $
getDirectoryContents "tests"
citeprocTests <- mapM (testCase regenerate) testnames
fs <- filter (\f -> takeExtension f `elem` [".bibtex",".biblatex"])
`fmap` getDirectoryContents "tests/biblio2yaml"
biblio2yamlTests <- mapM biblio2yamlTest fs
let allTests = citeprocTests ++ biblio2yamlTests
let numpasses = length $ filter (== Passed) allTests
let numskipped = length $ filter (== Skipped) allTests
let numfailures = length $ filter (== Failed) allTests
let numerrors = length $ filter (== Errored) allTests
putStrLn $ show numpasses ++ " passed; " ++ show numfailures ++
" failed; " ++ show numskipped ++ " skipped; " ++
show numerrors ++ " errored."
exitWith $ if numfailures == 0 && numerrors == 0
then ExitSuccess
else ExitFailure $ numfailures + numerrors
err :: String -> IO ()
err = hPutStrLn stderr
data TestResult =
Passed
| Skipped
| Failed
| Errored
deriving (Show, Eq)
testCase :: Bool -> String -> IO TestResult
testCase regenerate csl = do
hPutStr stderr $ "[" ++ csl ++ ".in.native] "
indataNative <- UTF8.readFile $ "tests/" ++ csl ++ ".in.native"
expectedNative <- UTF8.readFile $ "tests/" ++ csl ++ ".expected.native"
let jsonIn = Aeson.encode $ (read indataNative :: Pandoc)
let expectedDoc = normalize $ read expectedNative
testProgPath <- getExecutablePath
let pandocCiteprocPath = takeDirectory testProgPath </> ".." </>
"pandoc-citeproc" </> "pandoc-citeproc"
(ec, jsonOut, errout) <- pipeProcess
(Just [("LANG","en_US.UTF-8"),("HOME",".")])
pandocCiteprocPath
[] jsonIn
if ec == ExitSuccess
then do
let outDoc = normalize $ fromMaybe mempty $ Aeson.decode $ jsonOut
if outDoc == expectedDoc
then err "PASSED" >> return Passed
else do
err $ "FAILED"
showDiff (writeNative def expectedDoc) (writeNative def outDoc)
when regenerate $
UTF8.writeFile ("tests/" ++ csl ++ ".expected.native") $
writeNative def{ writerStandalone = True } outDoc
return Failed
else do
err "ERROR"
err $ "Error status " ++ show ec
err $ UTF8.toStringLazy errout
return Errored
showDiff :: String -> String -> IO ()
showDiff expected result =
withSystemTempDirectory "test-pandoc-citeproc-XXX" $ \fp -> do
let expectedf = fp </> "expected"
let actualf = fp </> "actual"
UTF8.writeFile expectedf expected
UTF8.writeFile actualf result
oldDir <- getCurrentDirectory
setCurrentDirectory fp
rawSystem "diff" ["-U1","expected","actual"]
setCurrentDirectory oldDir
biblio2yamlTest :: String -> IO TestResult
biblio2yamlTest fp = do
hPutStr stderr $ "[biblio2yaml/" ++ fp ++ "] "
let yamlf = "tests/biblio2yaml/" ++ fp
raw <- UTF8.readFile yamlf
let yamlStart = "---"
let (biblines, yamllines) = break (== yamlStart) $ lines raw
let bib = unlines biblines
let expected = unlines yamllines
testProgPath <- getExecutablePath
let pandocCiteprocPath = takeDirectory testProgPath </> ".." </>
"pandoc-citeproc" </> "pandoc-citeproc"
(ec, result', errout) <- pipeProcess
(Just [("LANG","en_US.UTF-8"),("HOME",".")])
pandocCiteprocPath
["--bib2yaml", "-f", drop 1 $ takeExtension fp]
(UTF8.fromStringLazy bib)
let result = UTF8.toStringLazy result'
if ec == ExitSuccess
then do
if expected == result
then err "PASSED" >> return Passed
else do
err $ "FAILED"
showDiff expected result
return Failed
else do
err "ERROR"
err $ "Error status " ++ show ec
err $ UTF8.toStringLazy errout
return Errored
|
jkr/pandoc-citeproc
|
tests/test-pandoc-citeproc.hs
|
bsd-3-clause
| 4,955
| 0
| 18
| 1,232
| 1,465
| 743
| 722
| 129
| 3
|
{-# LANGUAGE TupleSections #-}
module States.ResizingPlatform where
#include "Utils.cpp"
import Control.Applicative ((<$>))
import Data.Composition ((.:))
import Gamgine.Control ((?))
import qualified Gamgine.Math.Vect as V
import qualified Gamgine.Math.Box as B
import qualified Gamgine.State.State as ST
import qualified GameData.Level as LV
import qualified GameData.Data as GD
import qualified GameData.Entity as E
import qualified GameData.Platform as PF
import qualified Entity.Id as EI
import qualified Entity.Position as EP
import qualified States.GameRunning as GR
import qualified States.CreatingPlatform as CP
IMPORT_LENS_AS_LE
data ResizingPlatform = ResizingPlatform {
entityId :: Maybe Int,
minPt :: V.Vect,
maxPt :: V.Vect,
basePos :: V.Vect
}
-- | the state for resizing a platform during edit mode
mkResizingPlatformState :: ST.State GD.Data
mkResizingPlatformState =
mkState $ ResizingPlatform Nothing V.nullVec V.nullVec V.nullVec
where
mkState rp = ST.State {
ST.enter = \mp gd ->
case LV.findEntityAt mp $ LE.getL GD.currentLevelL gd of
Just e@E.Platform {} ->
let pos = EP.position e
bound = E.platformBound e
minPt = pos + B.minPt bound
maxPt = pos + B.maxPt bound
in Just (gd, mkState (rp {entityId = Just $ EI.entityId e,
minPt = minPt,
maxPt = maxPt,
basePos = mp}))
_ -> Nothing,
ST.leave = (, mkState (rp {entityId = Nothing, minPt = V.nullVec, maxPt = V.nullVec, basePos = V.nullVec})),
ST.update = (, mkState rp) . GR.update,
ST.render = ((, mkState rp) <$>) .: GR.render,
ST.keyEvent = (, mkState rp) .: flip const,
ST.mouseEvent = (, mkState rp) .: flip const,
ST.mouseMoved = \mp gd ->
case rp of
ResizingPlatform {entityId = Just id, minPt = minPt, maxPt = maxPt, basePos = bp} ->
let diffVec = mp - bp
in (E.eMap (\e -> id == EI.entityId e ? CP.updatePosAndBound minPt (maxPt + diffVec) e $ e) gd,
mkState rp)
_ -> (gd, mkState rp)
}
|
dan-t/layers
|
src/States/ResizingPlatform.hs
|
bsd-3-clause
| 2,436
| 0
| 23
| 854
| 678
| 395
| 283
| -1
| -1
|
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntMap.Strict
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of maps from integer keys to values
-- (dictionaries).
--
-- API of this module is strict in both the keys and the values.
-- If you need value-lazy maps, use "Data.IntMap.Lazy" instead.
-- The 'IntMap' type itself is shared between the lazy and strict modules,
-- meaning that the same 'IntMap' value can be passed to functions in
-- both modules (although that is rarely needed).
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import Data.IntMap.Strict (IntMap)
-- > import qualified Data.IntMap.Strict as IntMap
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced map implementation (see "Data.Map").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://citeseer.ist.psu.edu/okasaki98fast.html>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation <http://en.wikipedia.org/wiki/Big_O_notation>.
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
--
-- Be aware that the 'Functor', 'Traversable' and 'Data' instances
-- are the same as for the "Data.IntMap.Lazy" module, so if they are used
-- on strict maps, the resulting maps will be lazy.
-----------------------------------------------------------------------------
-- See the notes at the beginning of Data.IntMap.Base.
module Data.IntMap.Strict (
-- * Strictness properties
-- $strictness
-- * Map type
#if !defined(TESTING)
IntMap, Key -- instance Eq,Show
#else
IntMap(..), Key -- instance Eq,Show
#endif
-- * Operators
, (!), (\\)
-- * Query
, null
, size
, member
, notMember
, lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, map
, mapWithKey
, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, foldr
, foldl
, foldrWithKey
, foldlWithKey
, foldMapWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
, keysSet
, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
) where
import Prelude hiding (lookup,map,filter,foldr,foldl,null)
import Data.Bits
import Data.IntMap.Base hiding
( findWithDefault
, singleton
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
, unionsWith
, unionWith
, unionWithKey
, differenceWith
, differenceWithKey
, intersectionWith
, intersectionWithKey
, mergeWithKey
, updateMinWithKey
, updateMaxWithKey
, updateMax
, updateMin
, map
, mapWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeysWith
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, fromSet
, fromList
, fromListWith
, fromListWithKey
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
)
import Data.BitUtil
import qualified Data.IntSet.Base as IntSet
import Data.StrictPair
-- $strictness
--
-- This module satisfies the following strictness properties:
--
-- 1. Key and value arguments are evaluated to WHNF;
--
-- 2. Keys and values are evaluated to WHNF before they are stored in
-- the map.
--
-- Here are some examples that illustrate the first property:
--
-- > insertWith (\ new old -> old) k undefined m == undefined
-- > delete undefined m == undefined
--
-- Here are some examples that illustrate the second property:
--
-- > map (\ v -> undefined) m == undefined -- m is not empty
-- > mapKeys (\ k -> undefined) m == undefined -- m is not empty
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(min(n,W))/. The expression @('findWithDefault' def k map)@
-- returns the value at key @k@ or returns @def@ when the key is not an
-- element of the map.
--
-- > findWithDefault 'x' 1 (fromList [(5,'a'), (3,'b')]) == 'x'
-- > findWithDefault 'x' 5 (fromList [(5,'a'), (3,'b')]) == 'a'
-- See IntMap.Base.Note: Local 'go' functions and capturing]
findWithDefault :: a -> Key -> IntMap a -> a
findWithDefault def k = def `seq` k `seq` go
where
go (Bin p m l r) | nomatch k p m = def
| zero k m = go l
| otherwise = go r
go (Tip kx x) | k == kx = x
| otherwise = def
go Nil = def
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. A map of one element.
--
-- > singleton 1 'a' == fromList [(1, 'a')]
-- > size (singleton 1 'a') == 1
singleton :: Key -> a -> IntMap a
singleton k x
= x `seq` Tip k x
{-# INLINE singleton #-}
{--------------------------------------------------------------------
Insert
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Insert a new key\/value pair in the map.
-- If the key is already present in the map, the associated value is
-- replaced with the supplied value, i.e. 'insert' is equivalent to
-- @'insertWith' 'const'@.
--
-- > insert 5 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'x')]
-- > insert 7 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'a'), (7, 'x')]
-- > insert 5 'x' empty == singleton 5 'x'
insert :: Key -> a -> IntMap a -> IntMap a
insert k x t = k `seq` x `seq`
case t of
Bin p m l r
| nomatch k p m -> join k (Tip k x) p t
| zero k m -> Bin p m (insert k x l) r
| otherwise -> Bin p m l (insert k x r)
Tip ky _
| k==ky -> Tip k x
| otherwise -> join k (Tip k x) ky t
Nil -> Tip k x
-- right-biased insertion, used by 'union'
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWith' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f new_value old_value@.
--
-- > insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "xxxa")]
-- > insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWith (++) 5 "xxx" empty == singleton 5 "xxx"
insertWith :: (a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWith f k x t
= insertWithKey (\_ x' y' -> f x' y') k x t
-- | /O(min(n,W))/. Insert with a combining function.
-- @'insertWithKey' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert @f key new_value old_value@.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:xxx|a")]
-- > insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWithKey f 5 "xxx" empty == singleton 5 "xxx"
--
-- If the key exists in the map, this function is lazy in @x@ but strict
-- in the result of @f@.
insertWithKey :: (Key -> a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWithKey f k x t = k `seq` x `seq`
case t of
Bin p m l r
| nomatch k p m -> join k (Tip k x) p t
| zero k m -> Bin p m (insertWithKey f k x l) r
| otherwise -> Bin p m l (insertWithKey f k x r)
Tip ky y
| k==ky -> Tip k $! f k x y
| otherwise -> join k (Tip k x) ky t
Nil -> Tip k x
-- | /O(min(n,W))/. The expression (@'insertLookupWithKey' f k x map@)
-- is a pair where the first element is equal to (@'lookup' k map@)
-- and the second element equal to (@'insertWithKey' f k x map@).
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
-- > insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
-- > insertLookupWithKey f 5 "xxx" empty == (Nothing, singleton 5 "xxx")
--
-- This is how to define @insertLookup@ using @insertLookupWithKey@:
--
-- > let insertLookup kx x t = insertLookupWithKey (\_ a _ -> a) kx x t
-- > insertLookup 5 "x" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "x")])
-- > insertLookup 7 "x" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "x")])
insertLookupWithKey :: (Key -> a -> a -> a) -> Key -> a -> IntMap a -> (Maybe a, IntMap a)
insertLookupWithKey f0 k0 x0 t0 = k0 `seq` x0 `seq` toPair $ go f0 k0 x0 t0
where
go f k x t =
case t of
Bin p m l r
| nomatch k p m -> Nothing :*: join k (Tip k x) p t
| zero k m -> let (found :*: l') = go f k x l in (found :*: Bin p m l' r)
| otherwise -> let (found :*: r') = go f k x r in (found :*: Bin p m l r')
Tip ky y
| k==ky -> (Just y :*: (Tip k $! f k x y))
| otherwise -> (Nothing :*: join k (Tip k x) ky t)
Nil -> Nothing :*: Tip k x
{--------------------------------------------------------------------
Deletion
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjust ("new " ++) 7 empty == empty
adjust :: (a -> a) -> Key -> IntMap a -> IntMap a
adjust f k m
= adjustWithKey (\_ x -> f x) k m
-- | /O(min(n,W))/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > let f key x = (show key) ++ ":new " ++ x
-- > adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjustWithKey f 7 empty == empty
adjustWithKey :: (Key -> a -> a) -> Key -> IntMap a -> IntMap a
adjustWithKey f
= updateWithKey (\k' x -> Just (f k' x))
-- | /O(min(n,W))/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > update f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > update f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > update f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
update :: (a -> Maybe a) -> Key -> IntMap a -> IntMap a
update f
= updateWithKey (\_ x -> f x)
-- | /O(min(n,W))/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f k x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateWithKey :: (Key -> a -> Maybe a) -> Key -> IntMap a -> IntMap a
updateWithKey f k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> t
| zero k m -> bin p m (updateWithKey f k l) r
| otherwise -> bin p m l (updateWithKey f k r)
Tip ky y
| k==ky -> case f k y of
Just y' -> y' `seq` Tip ky y'
Nothing -> Nil
| otherwise -> t
Nil -> Nil
-- | /O(min(n,W))/. Lookup and update.
-- The function returns original value, if it is updated.
-- This is different behavior than 'Data.Map.updateLookupWithKey'.
-- Returns the original key value if the map entry is deleted.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:new a")])
-- > updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a")])
-- > updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) == (Just "b", singleton 5 "a")
updateLookupWithKey :: (Key -> a -> Maybe a) -> Key -> IntMap a -> (Maybe a,IntMap a)
updateLookupWithKey f0 k0 t0 = k0 `seq` toPair $ go f0 k0 t0
where
go f k t =
case t of
Bin p m l r
| nomatch k p m -> (Nothing :*: t)
| zero k m -> let (found :*: l') = go f k l in (found :*: bin p m l' r)
| otherwise -> let (found :*: r') = go f k r in (found :*: bin p m l r')
Tip ky y
| k==ky -> case f k y of
Just y' -> y' `seq` (Just y :*: Tip ky y')
Nothing -> (Just y :*: Nil)
| otherwise -> (Nothing :*: t)
Nil -> (Nothing :*: Nil)
-- | /O(log n)/. The expression (@'alter' f k map@) alters the value @x@ at @k@, or absence thereof.
-- 'alter' can be used to insert, delete, or update a value in an 'IntMap'.
-- In short : @'lookup' k ('alter' f k m) = f ('lookup' k m)@.
alter :: (Maybe a -> Maybe a) -> Key -> IntMap a -> IntMap a
alter f k t = k `seq`
case t of
Bin p m l r
| nomatch k p m -> case f Nothing of
Nothing -> t
Just x -> x `seq` join k (Tip k x) p t
| zero k m -> bin p m (alter f k l) r
| otherwise -> bin p m l (alter f k r)
Tip ky y
| k==ky -> case f (Just y) of
Just x -> x `seq` Tip ky x
Nothing -> Nil
| otherwise -> case f Nothing of
Just x -> x `seq` join k (Tip k x) ky t
Nothing -> t
Nil -> case f Nothing of
Just x -> x `seq` Tip k x
Nothing -> Nil
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
-- | The union of a list of maps, with a combining operation.
--
-- > unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
unionsWith :: (a->a->a) -> [IntMap a] -> IntMap a
unionsWith f ts
= foldlStrict (unionWith f) empty ts
-- | /O(n+m)/. The union with a combining function.
--
-- > unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "aA"), (7, "C")]
unionWith :: (a -> a -> a) -> IntMap a -> IntMap a -> IntMap a
unionWith f m1 m2
= unionWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. The union with a combining function.
--
-- > let f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
-- > unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
unionWithKey :: (Key -> a -> a -> a) -> IntMap a -> IntMap a -> IntMap a
unionWithKey f m1 m2
= mergeWithKey' Bin (\(Tip k1 x1) (Tip _k2 x2) -> Tip k1 $! f k1 x1 x2) id id m1 m2
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference with a combining function.
--
-- > let f al ar = if al == "b" then Just (al ++ ":" ++ ar) else Nothing
-- > differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
-- > == singleton 3 "b:B"
differenceWith :: (a -> b -> Maybe a) -> IntMap a -> IntMap b -> IntMap a
differenceWith f m1 m2
= differenceWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. Difference with a combining function. When two equal keys are
-- encountered, the combining function is applied to the key and both values.
-- If it returns 'Nothing', the element is discarded (proper set difference).
-- If it returns (@'Just' y@), the element is updated with a new value @y@.
--
-- > let f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
-- > differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
-- > == singleton 3 "3:b|B"
differenceWithKey :: (Key -> a -> b -> Maybe a) -> IntMap a -> IntMap b -> IntMap a
differenceWithKey f m1 m2
= mergeWithKey f id (const Nil) m1 m2
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. The intersection with a combining function.
--
-- > intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "aA"
intersectionWith :: (a -> b -> c) -> IntMap a -> IntMap b -> IntMap c
intersectionWith f m1 m2
= intersectionWithKey (\_ x y -> f x y) m1 m2
-- | /O(n+m)/. The intersection with a combining function.
--
-- > let f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
-- > intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "5:a|A"
intersectionWithKey :: (Key -> a -> b -> c) -> IntMap a -> IntMap b -> IntMap c
intersectionWithKey f m1 m2
= mergeWithKey' bin (\(Tip k1 x1) (Tip _k2 x2) -> Tip k1 $! f k1 x1 x2) (const Nil) (const Nil) m1 m2
{--------------------------------------------------------------------
MergeWithKey
--------------------------------------------------------------------}
-- | /O(n+m)/. A high-performance universal combining function. Using
-- 'mergeWithKey', all combining functions can be defined without any loss of
-- efficiency (with exception of 'union', 'difference' and 'intersection',
-- where sharing of some nodes is lost with 'mergeWithKey').
--
-- Please make sure you know what is going on when using 'mergeWithKey',
-- otherwise you can be surprised by unexpected code growth or even
-- corruption of the data structure.
--
-- When 'mergeWithKey' is given three arguments, it is inlined to the call
-- site. You should therefore use 'mergeWithKey' only to define your custom
-- combining functions. For example, you could define 'unionWithKey',
-- 'differenceWithKey' and 'intersectionWithKey' as
--
-- > myUnionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) id id m1 m2
-- > myDifferenceWithKey f m1 m2 = mergeWithKey f id (const empty) m1 m2
-- > myIntersectionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) (const empty) (const empty) m1 m2
--
-- When calling @'mergeWithKey' combine only1 only2@, a function combining two
-- 'IntMap's is created, such that
--
-- * if a key is present in both maps, it is passed with both corresponding
-- values to the @combine@ function. Depending on the result, the key is either
-- present in the result with specified value, or is left out;
--
-- * a nonempty subtree present only in the first map is passed to @only1@ and
-- the output is added to the result;
--
-- * a nonempty subtree present only in the second map is passed to @only2@ and
-- the output is added to the result.
--
-- The @only1@ and @only2@ methods /must return a map with a subset (possibly empty) of the keys of the given map/.
-- The values can be modified arbitrarily. Most common variants of @only1@ and
-- @only2@ are 'id' and @'const' 'empty'@, but for example @'map' f@ or
-- @'filterWithKey' f@ could be used for any @f@.
mergeWithKey :: (Key -> a -> b -> Maybe c) -> (IntMap a -> IntMap c) -> (IntMap b -> IntMap c)
-> IntMap a -> IntMap b -> IntMap c
mergeWithKey f g1 g2 = mergeWithKey' bin combine g1 g2
where -- We use the lambda form to avoid non-exhaustive pattern matches warning.
combine = \(Tip k1 x1) (Tip _k2 x2) -> case f k1 x1 x2 of Nothing -> Nil
Just x -> x `seq` Tip k1 x
{-# INLINE combine #-}
{-# INLINE mergeWithKey #-}
{--------------------------------------------------------------------
Min\/Max
--------------------------------------------------------------------}
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"3:b"), (5,"a")]
-- > updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMinWithKey :: (Key -> a -> Maybe a) -> IntMap a -> IntMap a
updateMinWithKey f t =
case t of Bin p m l r | m < 0 -> bin p m l (go f r)
_ -> go f t
where
go f' (Bin p m l r) = bin p m (go f' l) r
go f' (Tip k y) = case f' k y of
Just y' -> y' `seq` Tip k y'
Nothing -> Nil
go _ Nil = error "updateMinWithKey Nil"
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"b"), (5,"5:a")]
-- > updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMaxWithKey :: (Key -> a -> Maybe a) -> IntMap a -> IntMap a
updateMaxWithKey f t =
case t of Bin p m l r | m < 0 -> bin p m (go f l) r
_ -> go f t
where
go f' (Bin p m l r) = bin p m l (go f' r)
go f' (Tip k y) = case f' k y of
Just y' -> y' `seq` Tip k y'
Nothing -> Nil
go _ Nil = error "updateMaxWithKey Nil"
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "Xa")]
-- > updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
updateMax :: (a -> Maybe a) -> IntMap a -> IntMap a
updateMax f = updateMaxWithKey (const f)
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "Xb"), (5, "a")]
-- > updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
updateMin :: (a -> Maybe a) -> IntMap a -> IntMap a
updateMin f = updateMinWithKey (const f)
{--------------------------------------------------------------------
Mapping
--------------------------------------------------------------------}
-- | /O(n)/. Map a function over all values in the map.
--
-- > map (++ "x") (fromList [(5,"a"), (3,"b")]) == fromList [(3, "bx"), (5, "ax")]
map :: (a -> b) -> IntMap a -> IntMap b
map f t
= case t of
Bin p m l r -> Bin p m (map f l) (map f r)
Tip k x -> Tip k $! f x
Nil -> Nil
-- | /O(n)/. Map a function over all values in the map.
--
-- > let f key x = (show key) ++ ":" ++ x
-- > mapWithKey f (fromList [(5,"a"), (3,"b")]) == fromList [(3, "3:b"), (5, "5:a")]
mapWithKey :: (Key -> a -> b) -> IntMap a -> IntMap b
mapWithKey f t
= case t of
Bin p m l r -> Bin p m (mapWithKey f l) (mapWithKey f r)
Tip k x -> Tip k $! f k x
Nil -> Nil
-- | /O(n)/. The function @'mapAccum'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a b = (a ++ b, b ++ "X")
-- > mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) == ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
mapAccum :: (a -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccum f = mapAccumWithKey (\a' _ x -> f a' x)
-- | /O(n)/. The function @'mapAccumWithKey'@ threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
-- > mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) == ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
mapAccumWithKey :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumWithKey f a t
= mapAccumL f a t
-- | /O(n)/. The function @'mapAccumL'@ threads an accumulating
-- argument through the map in ascending order of keys. Strict in
-- the accumulating argument and the both elements of the
-- result of the function.
mapAccumL :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumL f0 a0 t0 = toPair $ go f0 a0 t0
where
go f a t
= case t of
Bin p m l r -> let (a1 :*: l') = go f a l
(a2 :*: r') = go f a1 r
in (a2 :*: Bin p m l' r')
Tip k x -> let (a',x') = f a k x in x' `seq` (a' :*: Tip k x')
Nil -> (a :*: Nil)
-- | /O(n)/. The function @'mapAccumR'@ threads an accumulating
-- argument through the map in descending order of keys.
mapAccumRWithKey :: (a -> Key -> b -> (a,c)) -> a -> IntMap b -> (a,IntMap c)
mapAccumRWithKey f0 a0 t0 = toPair $ go f0 a0 t0
where
go f a t
= case t of
Bin p m l r -> let (a1 :*: r') = go f a r
(a2 :*: l') = go f a1 l
in (a2 :*: Bin p m l' r')
Tip k x -> let (a',x') = f a k x in x' `seq` (a' :*: Tip k x')
Nil -> (a :*: Nil)
-- | /O(n*log n)/.
-- @'mapKeysWith' c f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the associated values will be
-- combined using @c@.
--
-- > mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "cdab"
-- > mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "cdab"
mapKeysWith :: (a -> a -> a) -> (Key->Key) -> IntMap a -> IntMap a
mapKeysWith c f = fromListWith c . foldrWithKey (\k x xs -> (f k, x) : xs) []
{--------------------------------------------------------------------
Filter
--------------------------------------------------------------------}
-- | /O(n)/. Map values and collect the 'Just' results.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > mapMaybe f (fromList [(5,"a"), (3,"b")]) == singleton 5 "new a"
mapMaybe :: (a -> Maybe b) -> IntMap a -> IntMap b
mapMaybe f = mapMaybeWithKey (\_ x -> f x)
-- | /O(n)/. Map keys\/values and collect the 'Just' results.
--
-- > let f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
-- > mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) == singleton 3 "key : 3"
mapMaybeWithKey :: (Key -> a -> Maybe b) -> IntMap a -> IntMap b
mapMaybeWithKey f (Bin p m l r)
= bin p m (mapMaybeWithKey f l) (mapMaybeWithKey f r)
mapMaybeWithKey f (Tip k x) = case f k x of
Just y -> y `seq` Tip k y
Nothing -> Nil
mapMaybeWithKey _ Nil = Nil
-- | /O(n)/. Map values and separate the 'Left' and 'Right' results.
--
-- > let f a = if a < "c" then Left a else Right a
-- > mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
-- >
-- > mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
mapEither :: (a -> Either b c) -> IntMap a -> (IntMap b, IntMap c)
mapEither f m
= mapEitherWithKey (\_ x -> f x) m
-- | /O(n)/. Map keys\/values and separate the 'Left' and 'Right' results.
--
-- > let f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
-- > mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
-- >
-- > mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
mapEitherWithKey :: (Key -> a -> Either b c) -> IntMap a -> (IntMap b, IntMap c)
mapEitherWithKey f0 t0 = toPair $ go f0 t0
where
go f (Bin p m l r)
= bin p m l1 r1 :*: bin p m l2 r2
where
(l1 :*: l2) = go f l
(r1 :*: r2) = go f r
go f (Tip k x) = case f k x of
Left y -> y `seq` (Tip k y :*: Nil)
Right z -> z `seq` (Nil :*: Tip k z)
go _ Nil = (Nil :*: Nil)
{--------------------------------------------------------------------
Conversions
--------------------------------------------------------------------}
-- | /O(n)/. Build a map from a set of keys and a function which for each key
-- computes its value.
--
-- > fromSet (\k -> replicate k 'a') (Data.IntSet.fromList [3, 5]) == fromList [(5,"aaaaa"), (3,"aaa")]
-- > fromSet undefined Data.IntSet.empty == empty
fromSet :: (Key -> a) -> IntSet.IntSet -> IntMap a
fromSet _ IntSet.Nil = Nil
fromSet f (IntSet.Bin p m l r) = Bin p m (fromSet f l) (fromSet f r)
fromSet f (IntSet.Tip kx bm) = buildTree f kx bm (IntSet.suffixBitMask + 1)
where -- This is slightly complicated, as we to convert the dense
-- representation of IntSet into tree representation of IntMap.
--
-- We are given a nonzero bit mask 'bmask' of 'bits' bits with prefix 'prefix'.
-- We split bmask into halves corresponding to left and right subtree.
-- If they are both nonempty, we create a Bin node, otherwise exactly
-- one of them is nonempty and we construct the IntMap from that half.
buildTree g prefix bmask bits = prefix `seq` bmask `seq` case bits of
0 -> Tip prefix $! g prefix
_ -> case intFromNat ((natFromInt bits) `shiftRL` 1) of
bits2 | bmask .&. ((1 `shiftLL` bits2) - 1) == 0 ->
buildTree g (prefix + bits2) (bmask `shiftRL` bits2) bits2
| (bmask `shiftRL` bits2) .&. ((1 `shiftLL` bits2) - 1) == 0 ->
buildTree g prefix bmask bits2
| otherwise ->
Bin prefix bits2 (buildTree g prefix bmask bits2) (buildTree g (prefix + bits2) (bmask `shiftRL` bits2) bits2)
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs.
--
-- > fromList [] == empty
-- > fromList [(5,"a"), (3,"b"), (5, "c")] == fromList [(5,"c"), (3,"b")]
-- > fromList [(5,"c"), (3,"b"), (5, "a")] == fromList [(5,"a"), (3,"b")]
fromList :: [(Key,a)] -> IntMap a
fromList xs
= foldlStrict ins empty xs
where
ins t (k,x) = insert k x t
-- | /O(n*min(n,W))/. Create a map from a list of key\/value pairs with a combining function. See also 'fromAscListWith'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "ab"), (5, "aba")]
-- > fromListWith (++) [] == empty
fromListWith :: (a -> a -> a) -> [(Key,a)] -> IntMap a
fromListWith f xs
= fromListWithKey (\_ x y -> f x y) xs
-- | /O(n*min(n,W))/. Build a map from a list of key\/value pairs with a combining function. See also fromAscListWithKey'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "ab"), (5, "aba")]
-- > fromListWith (++) [] == empty
fromListWithKey :: (Key -> a -> a -> a) -> [(Key,a)] -> IntMap a
fromListWithKey f xs
= foldlStrict ins empty xs
where
ins t (k,x) = insertWithKey f k x t
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order.
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
fromAscList :: [(Key,a)] -> IntMap a
fromAscList xs
= fromAscListWithKey (\_ x _ -> x) xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order, with a combining function on equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "ba")]
fromAscListWith :: (a -> a -> a) -> [(Key,a)] -> IntMap a
fromAscListWith f xs
= fromAscListWithKey (\_ x y -> f x y) xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order, with a combining function on equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "ba")]
fromAscListWithKey :: (Key -> a -> a -> a) -> [(Key,a)] -> IntMap a
fromAscListWithKey _ [] = Nil
fromAscListWithKey f (x0 : xs0) = fromDistinctAscList (combineEq x0 xs0)
where
-- [combineEq f xs] combines equal elements with function [f] in an ordered list [xs]
combineEq z [] = [z]
combineEq z@(kz,zz) (x@(kx,xx):xs)
| kx==kz = let yy = f kx xx zz in yy `seq` combineEq (kx,yy) xs
| otherwise = z:combineEq x xs
-- | /O(n)/. Build a map from a list of key\/value pairs where
-- the keys are in ascending order and all distinct.
-- /The precondition (input list is strictly ascending) is not checked./
--
-- > fromDistinctAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
fromDistinctAscList :: [(Key,a)] -> IntMap a
fromDistinctAscList [] = Nil
fromDistinctAscList (z0 : zs0) = work z0 zs0 Nada
where
work (kx,vx) [] stk = vx `seq` finish kx (Tip kx vx) stk
work (kx,vx) (z@(kz,_):zs) stk = vx `seq` reduce z zs (branchMask kx kz) kx (Tip kx vx) stk
reduce :: (Key,a) -> [(Key,a)] -> Mask -> Prefix -> IntMap a -> Stack a -> IntMap a
reduce z zs _ px tx Nada = work z zs (Push px tx Nada)
reduce z zs m px tx stk@(Push py ty stk') =
let mxy = branchMask px py
pxy = mask px mxy
in if shorter m mxy
then reduce z zs m pxy (Bin pxy mxy ty tx) stk'
else work z zs (Push px tx stk)
finish _ t Nada = t
finish px tx (Push py ty stk) = finish p (join py ty px tx) stk
where m = branchMask px py
p = mask px m
data Stack a = Push {-# UNPACK #-} !Prefix !(IntMap a) !(Stack a) | Nada
|
ekmett/containers
|
Data/IntMap/Strict.hs
|
bsd-3-clause
| 37,187
| 0
| 22
| 9,465
| 7,138
| 3,816
| 3,322
| 420
| 7
|
-- | Core sound module.
module Data.Sound (
-- * Basic types
Time, Sample
, Sound
-- * Basic functions
, duration , rate
, channels , nSamples
, sample
-- * Wave generators
-- ** Basic wave generators
, zeroSound , zeroSoundWith
, sine , sineWith , sineRaw
, sawtooth , sawtoothWith , sawtoothRaw
, square , squareWith , squareRaw
, triangle , triangleWith , triangleRaw
-- ** Variable Frequency Basic wave generators
, sineV , sineVR
-- ** Functional wave generators
, fromFunction
-- ** Other wave generators
, noise , noiseR
, pnoise , pnoiseR
, karplus , karplusR
-- * Sound operators
-- ** Basic operators
, (<.>) , (<+>) , (<|>)
-- ** Other operators
, parWithPan , addAt
-- * Modifiers
, addSilenceBeg , addSilenceEnd
, velocity , mapSound
, pan , scale
, divide , multiply
, left , right
-- * Effects
, echo
-- * Utils
, loop, trim, backwards
, affineFunction
, linearFunction
-- * Experimental
, fourierSieve
) where
import Data.Monoid
import Data.Sound.Internal
import Data.Sound.Core.Chunked
import Data.Sound.Analysis
import qualified Data.Vector.Unboxed as A
import qualified Data.Complex as C
-- Maybe
import Data.Maybe (catMaybes)
-- Random
import Random.MWC.Pure
-- Sequences
import qualified Data.Sequence as Seq
-- | Add a silence at the beginning of a sound.
addSilenceBeg :: Time -- ^ Duration of the silence.
-> Sound -> Sound
addSilenceBeg d s = multiply n (zeroSoundWith r d) <.> s
where
r = rate s
n = channels s
-- | Add a silence at the end of a sound.
addSilenceEnd :: Time -- ^ Duration of the silence.
-> Sound -> Sound
addSilenceEnd d s = s <.> multiply n (zeroSoundWith r d)
where
r = rate s
n = channels s
-- | /Addition of sounds/. If one sound is longer, the remainder will remain without any change.
-- There are some restriction to use this function.
--
-- * Both arguments must share the same /sample rate/.
--
-- * Both arguments must share the same /number of channels/.
(<+>) :: Sound -> Sound -> Sound
s1@(S r l nc c) <+> s2@(S r' l' nc' c')
| r /= r' = soundError [s1,s2] "<+>" $ "Can't add sounds with different sample rates. "
++ "Please, consider to change the sample rate of one of them."
| nc /= nc' = soundError [s1,s2] "<+>" $ "Can't add two sounds with different number of channels. "
++ "Please, consider to change the number of channels in one of them."
| otherwise = S r (max l l') nc $
if l == l' then zipChunkedSame (zipSamples (+)) c c'
else zipChunked (zipSamples (+)) c c'
-- | /Parallelization of sounds/. Often refered as the /par/ operator.
-- Applying this operator over two sounds will make them sound at the same
-- time, but in different channels. The sound at the left will be at left-most
-- channels, and the right one at the right-most channels.
-- There are some restriction to use this function.
--
-- * Both arguments must share the same /sample rate/.
--
(<|>) :: Sound -> Sound -> Sound
s1@(S r l nc c) <|> s2@(S r' l' nc' c')
| r /= r' = soundError [s1,s2] "<|>" $ "Can't par sounds with different sample rates. "
++ "Please, consider to change the sample rate of one of them."
| otherwise = let c'' = if l < l' then zipChunkedSame appendSamples (c <> zeroChunks (l'-l) nc) c'
else zipChunkedSame appendSamples c (c' <> zeroChunks (l-l') nc')
in S r (max l l') (nc+nc') c''
{- About the associativity of the sequencing operator.
If we are using balaced chunk appending, the sequencing operator (<.>) should be
left associative (infixl). Suppose we have three sound chunks of size n. When we
append two chunks, the right chunk gets balanced (unless it is already balanced)
in order to get a balanced chunk after the appending. This makes balancing have
at most n steps where n is the length of the right argument.
If we compare the number of balancing steps with left and right association,
we observe that, if the inputs are of similar size, it is better to associate
to the left.
n n n
(--------------- <.> ---------------) <.> ---------------
=> n balancing steps
2n n
------------------------------ <.> ---------------
=> n balancing steps
3n
---------------------------------------------
Total balancing steps: 2n
n n n
--------------- <.> (--------------- <.> ---------------)
=> n balancing steps
n 2n
--------------- <.> ------------------------------
=> 2n balancing steps
3n
---------------------------------------------
Total balancing steps: 3n
Priority 5 is just a provisional number (very arbitrary).
-}
infixl 5 <.>
-- | /Sequencing of sounds/. The sequencing operator, as the name says, sequences a couple of
-- sounds.
-- There are some restriction to use this function.
--
-- * Both arguments must share the same /sample rate/.
--
-- * Both arguments must share the same /number of channels/.
(<.>) :: Sound -> Sound -> Sound
s1@(S r l nc c) <.> s2@(S r' l' nc' c')
| r /= r' = soundError [s1,s2] "<.>" $ "Can't sequence sounds with different sample rates. "
++ "Please, consider to change the sample rate of one of them."
| nc /= nc' = soundError [s1,s2] "<.>" $ "Can't sequence two sounds with different number of channels. "
++ "Please, consider to change the number of channels in one of them."
| otherwise = S r (l+l') nc $ c <> c'
{-# RULES
"sound/multiplyFunction"
forall n r d p f. multiply n (fromFunction r d p f) = fromFunction r d p (multiplySample n . f)
#-}
-- | Multiply a sound over different channels. It will be just repeated over the different channels
-- with the same amplitude (unlike 'divide'). The number of channels will be multiplied by the
-- given factor.
--
-- > multiply n (fromFunction r d p f) = fromFunction r d p (multiplySample n . f)
--
multiply :: Int -- ^ Number of channels factor.
-> Sound -> Sound
{-# INLINE[1] multiply #-}
multiply n s = f 1
where
f k = if k == n then s else s <|> f (k+1)
-- | Similar to 'multiply', but also dividing the amplitude of the sound by the factor.
divide :: Int -- ^ Number of channels factor.
-> Sound -> Sound
{-# INLINE divide #-}
divide n s = scale (recip $ fromIntegral n) $ multiply n s
-- | This function works like '<+>', but it allows you to choose at which time add the sound.
-- This way, @insertAt t s1 s2@ will add @s1@ to @s2@ starting at the second @t@.
addAt :: Time -> Sound -> Sound -> Sound
addAt t s1 s2 = addSilenceBeg t s1 <+> s2
{-# RULES
"sound/velocity" forall f g s. velocity f (velocity g s) = velocity (\t -> f t * g t) s
#-}
-- | Time-dependent amplitude modifier.
--
-- > velocity f (velocity g s) = velocity (\t -> f t * g t) s
--
velocity :: (Time -> Double) -- ^ @0 <= v t <= 1@.
-> Sound
-> Sound
{-# INLINE[1] velocity #-}
velocity v s = mapSoundAt (\i -> mapSample $ \x -> v (f i) * x) s
where
r = rate s
f = sampleTime r
-- | Scale a sound by a given factor.
--
-- > scale = velocity . const
scale :: Double -- ^ Scaling factor. @0 <= k <= 1@
-> Sound -- ^ Original sound.
-> Sound -- ^ Scaled sound.
{-# INLINE scale #-}
scale = velocity . const
-- | Similar to the /par operator/ ('<|>') but using a time-dependent panning function.
--
-- > parWithPan (const (-1)) s1 s2 = s1 <|> s2
-- > parWithPan (const 0 ) s1 s2 = scale (1/2) (s1 <+> s2) <|> scale (1/2) (s1 <+> s2)
-- > parWithPan (const 1 ) s1 s2 = s2 <|> s1
--
parWithPan :: (Time -> Double) -- ^ @-1 <= p t <= 1@.
-> Sound
-> Sound
-> Sound
{-# INLINE parWithPan #-}
parWithPan p s1@(S r1 n1 c1 ss1) s2@(S r2 n2 c2 ss2)
| r1 /= r2 = soundError [s1,s2] "parWithPan" $ "Can't par sounds with different sample rates. "
++ "Please, consider to change the sample rate of one of them."
| c1 /= c2 = soundError [s1,s2] "parWithPan" $ "Can't par sounds with different number of channels. "
++ "Please, consider to change the number of channels in one of them."
| otherwise = S r1 (max n1 n2) (c1*2) $ if n1 == n2 then zipChunkedAtSame f ss1 ss2
else zipChunkedAt f ss1 ss2
where
f i sx sy = let t = sampleTime r1 i
q1 = (1 - p t) / 2
q2 = (1 + p t) / 2
l = zipSamples (\x y -> q1*x + q2*y) sx sy
r = zipSamples (\x y -> q1*y + q2*x) sx sy
in appendSamples l r
-- | Pan a sound from left (-1) to right (1) with a time-dependent function.
--
-- > pan (const (-1)) = left
-- > pan (const 0 ) = divide 2
-- > pan (const 1 ) = right
--
pan :: (Time -> Double) -- ^ @-1 <= p t <= 1@.
-> Sound
-> Sound
pan p s = parWithPan p s $ zeroSoundWith (rate s) $ duration s
-- | Move a sound completely to the left.
left :: Sound -> Sound
left s = s <|> mapSound (mapSample $ const 0) s
-- | Move a sound completely to the right.
right :: Sound -> Sound
right s = mapSound (mapSample $ const 0) s <|> s
{-# RULES
"sound/loop" forall n m s. loop n (loop m s) = loop (n*m) s
"sound/mapLoop" forall f n s. mapSound f (loop n s) = loop n (mapSound f s)
#-}
-- | Repeat a sound cyclically a given number of times.
-- It obeys the following rules:
--
-- > loop n (loop m s) = loop (n*m) s
-- > mapSound f (loop n s) = loop n (mapSound f s)
--
loop :: Int -> Sound -> Sound
loop n = foldr1 (<.>) . replicate n
{-# RULES
"sound/mapTrim" forall t0 t1 f s. trim t0 t1 (mapSound f s) = mapSound f (trim t0 t1 s)
#-}
-- | Extract a continous segment of the sound.
--
-- > trim t0 t1 (mapSound f s) = mapSound f (trim t0 t1 s)
--
trim :: Time -- ^ Start time
-> Time -- ^ End time
-> Sound
-> Sound
trim t0 t1 s = trimIndex n0 n1 s
where
r = rate s
n0 = timeSample r t0
n1 = timeSample r t1
trimIndex :: Int -- ^ Start index
-> Int -- ^ End index
-> Sound
-> Sound
trimIndex n0 n1 s@(S r n c ss)
| n0 >= n = S r 0 c mempty
| n1 >= n = trimIndex n0 (n-1) s
| otherwise = S r (n1-n0) c $ trimChunked n0 n1 ss
-- | Reverse a sound. Note that the entire sound is required
-- to be kept in memory to perform the reversion, since the
-- /first/ sample of the reversed sound is the /last/ sample
-- of the original. But to get the last sample the evaluation
-- of all the previous samples is required.
backwards :: Sound -> Sound
backwards s = s { schunks = reverseChunked $ schunks s }
-- ECHOING
-- | Echo effect.
--
-- > echo 0 dec del s = s
--
echo :: Int -- ^ Repetitions. How many times the sound is repeated.
-> Double -- ^ Decay (@0 < decay < 1@). How fast the amplitude of the repetitions decays.
-> Time -- ^ Delay @0 < delay@. Time between repetitions.
-> Sound -- ^ Original sound.
-> Sound -- ^ Echoed sound.
echo 0 _ _ s = s
echo n dec del s = s { schunks = causaltr f e $ schunks s }
where
e = Seq.empty
m = timeSample (rate s) del
f past x =
( let past' = if Seq.length past >= n*m
then seqInit past
else past
in x Seq.<| past'
, let xs = [ if k <= Seq.length past
then Just $ mapSample (*q) $ Seq.index past (k-1)
else Nothing
| i <- [1 .. n]
, let k = i*m
, let q = dec ^ i
]
in foldr1 (zipSamples (+)) $ x : catMaybes xs
)
seqInit :: Seq.Seq a -> Seq.Seq a
seqInit xs = case Seq.viewr xs of
ys Seq.:> _ -> ys
_ -> Seq.empty
{-
-- INTEGRATION (possibly useful in the future)
simpson :: Time -> Time -> (Time -> Double) -> Double
simpson a b f = (b-a) / 6 * (f a + 4 * f ((a+b)/2) + f b)
intervalWidth :: Time
intervalWidth = 0.1
integrate :: Time -> Time -> (Time -> Double) -> Double
integrate a b f = sum [ simpson i (i + intervalWidth) f | i <- [a , a + intervalWidth .. b - intervalWidth] ]
-}
-- Simpson integration error
--
-- 1/90 * (intervalWidth/2)^5 * abs (f''''(c))
--
---------------
-- COMMON WAVES
{- About the common waves definitions
Functions describing these common waves have been created using
usual definitions, but then algebraically transformed to use a
smaller number of operations.
-}
-- | Double of 'pi'.
pi2 :: Time
pi2 = 2*pi
timeFloor :: Time -> Time
timeFloor = fromIntegral . (floor :: Time -> Int) -- Don't use truncate!
decimals :: Time -> Time
decimals = snd . (properFraction :: Time -> (Int,Time))
-- | Like 'zeroSound', but allowing to choose a custom sample rate.
zeroSoundWith :: Int -> Time -> Sound
{-# INLINE zeroSoundWith #-}
zeroSoundWith r d = S r n 1 $ zeroChunks n 1
where
n = timeSample r d
-- | Creates a mono and constantly null sound.
--
-- <<http://i.imgur.com/BP5PFIY.png>>
zeroSound :: Time -> Sound
{-# INLINE zeroSound #-}
zeroSound = zeroSoundWith 44100
sineRaw :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sineRaw #-}
sineRaw r d a f p = fromFunction r d (Just $ 1/f) $
let pi2f = pi2*f
in \t ->
let s :: Time
s = pi2f*t + p
in monoSample $ a * sin s
-- | Like 'sine', but allowing to choose a custom sample rate.
sineWith :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sineWith #-}
sineWith r d a f = sineRaw r d' a f
where
q = recip $ 2*f
(n,rm) = properFraction (d/q) :: (Int,Double)
d' = if rm < 0.001 then d else fromIntegral (n+1) * q
-- | Create a sine wave with the given duration, amplitude, frequency and phase (mono).
--
-- <<http://i.imgur.com/46ry4Oq.png>>
sine :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sine #-}
sine = sineWith 44100
-- | Like 'sineV', but allowing to choose the sample rate.
sineVR :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> (Time -> Time) -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sineVR #-}
sineVR r d a f p = fromFunction r d Nothing $
\t -> let s :: Time
s = pi2*f t*t + p
in monoSample $ a * sin s
-- | A variation of 'sine' with frequency that changes over time.
-- If you are going to use a constant frequency, consider to use
-- 'sine' for a better performance.
sineV :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> (Time -> Time) -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sineV #-}
sineV = sineVR 44100
-- | Like 'sawtooth', but allowing to choose the sample rate.
sawtoothRaw :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sawtoothRaw #-}
sawtoothRaw r d a f p = fromFunction r d (Just $ 1/f) $ \t ->
let s :: Time
s = f*t + p
in monoSample $ a * (2 * decimals s - 1)
sawtoothWith :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sawtoothWith #-}
sawtoothWith r d a f = sawtoothRaw r d' a f
where
q = recip f
(n,rm) = properFraction (d/q) :: (Int,Double)
d' = if rm < 0.001 then d else fromIntegral (n+1) * q
-- | Create a sawtooth wave with the given duration, amplitude, frequency and phase (mono).
--
-- <<http://i.imgur.com/uJVIpmv.png>>
sawtooth :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE sawtooth #-}
sawtooth = sawtoothWith 44100
-- | Like 'square', but allowing to choose the sample rate.
squareWith :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE squareWith #-}
squareWith r d a f = squareRaw r d' a f
where
q = recip f
(n,rm) = properFraction (d/q) :: (Int,Double)
d' = if rm < 0.001 then d else fromIntegral (n+1) * q
squareRaw :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE squareRaw #-}
squareRaw r d a f p = fromFunction r d (Just $ 1/f) $ \t ->
let s :: Time
s = f*t + p
h :: Time -> Double
h x = signum $ 0.5 - x
in monoSample $ a * h (decimals s)
-- | Create a square wave with the given duration, amplitude, frequency and phase (mono).
--
-- <<http://i.imgur.com/GQUCVwT.png>>
square :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE square #-}
square = squareWith 44100
triangleRaw :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE triangleRaw #-}
triangleRaw r d a f p = fromFunction r d (Just $ 1/f) $ \t ->
let s :: Time
s = f*t + p
in monoSample $ a * (1 - 4 * abs (timeFloor (s + 0.25) - s + 0.25))
-- | As in 'triangle', but allowing to choose the sample rate.
triangleWith :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE triangleWith #-}
triangleWith r d a f = triangleRaw r d' a f
where
q = recip f
(n,rm) = properFraction (d/q) :: (Int,Double)
d' = if rm < 0.001 then d else fromIntegral (n+1) * q
-- | Create a triange wave with the given duration, amplitude, frequency and phase (mono).
--
-- <<http://i.imgur.com/0RZ8gUh.png>>
triangle :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Time -- ^ Phase
-> Sound
{-# INLINE triangle #-}
triangle = triangleWith 44100
-------------------
-- OTHER SYNTHS
-- | Specialized random generator.
randomRs :: (Double,Double) -> Seed -> [Double]
randomRs (x,y) = go
where
go g = let (r,g') = range_random (x,y) g
in r : go g'
-- | Like 'pnoise', but allowing to choose the sample rate.
pnoiseR :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE pnoiseR #-}
pnoiseR r d a f sd = S r tn 1 cs
where
n = timeSample r $ recip f
xs = take n $ fmap monoSample $ randomRs (-a,a) $ seed [sd]
tn = timeSample r d
cs = chunkedFromList tn $ cycle xs
-- | A randomly generated sound (mono) with frequency. Different seeds generate
-- different sounds.
pnoise :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE pnoise #-}
pnoise = pnoiseR 44100
-- | Like 'karplus', but allowing to choose a custom sample rate.
karplusR :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Double -- ^ Decay (0~1)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE karplusR #-}
karplusR r d a f dc = velocity (dc**) . pnoiseR r d a f
-- | String-like sound based on randomly generated signals (see 'pnoise').
karplus :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Time -- ^ Frequency (Hz)
-> Double -- ^ Decay (0~1)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE karplus #-}
karplus = karplusR 44100
-- | A randomly generated sound (mono) without frequency. Different seeds generate
-- different sounds. For long sounds, a similar effect can be obtained using 'pnoise'
-- with much better performance. While 'noise' create new random values for the entire
-- length of the sound, 'pnoise' only creates a small portion that is repeated until
-- reaching the specified duration. If the frequency given to 'pnoise' is low enough
-- (any frequency lower than the human range should work) it should create a very similar
-- sound effect than the one 'noise' does.
noise :: Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE noise #-}
noise = noiseR 44100
-- | Like 'noise', but allowing to choose a custom sample rate.
noiseR :: Int -- ^ Sample rate
-> Time -- ^ Duration (0~)
-> Double -- ^ Amplitude (0~1)
-> Word32 -- ^ Random seed
-> Sound
{-# INLINE noiseR #-}
noiseR r d a sd = S r n 1 $ chunkedFromList n xs
where
n = timeSample r d
xs = fmap monoSample $ randomRs (-a,a) $ seed [sd]
-- Misc
-- | Build an affine function given two points of its graph. The first
-- coordinate of the two points must be different.
affineFunction :: (Double,Double) -> (Double,Double) -> Double -> Double
affineFunction (a,b) (c,d) x = m*x + n
where
m = (d-b)/(c-a)
n = b - m*a
-- | Build a linear function given a point of its graph outside of the Y axis.
--
-- > linearFunction = affineFunction (0,0)
--
linearFunction :: (Double,Double) -> Double -> Double
linearFunction = affineFunction (0,0)
--------------------------------
-- Room for experiments
fourierSieve :: Double -> Sound -> Sound
fourierSieve e s = s { schunks = f (schunks s) }
where
f = devectorize . fmap sampleFromVector . unsplitVector -- Reconstruct chunked data
. fmap (vectorMakeReal . g . vectorMakeComplex) -- Manipulate vector
. splitVector . fmap sampleVector . vectorize -- Deconstruct chunked data
g = fourierInverse . h . fourierTransform
h = A.map $ \c -> if C.magnitude c > e then c else 0
|
Daniel-Diaz/wavy
|
Data/Sound.hs
|
bsd-3-clause
| 22,954
| 0
| 18
| 6,763
| 4,594
| 2,545
| 2,049
| 394
| 3
|
module Mistral.Simple (
-- * High-level interface
compileFile
, compileModule
, linkProgram
-- * Low-level interface (subject to change outside of PVP)
, compile'
, version
) where
import Mistral.CodeGen.DeadCode ( elimDeadCode )
import Mistral.CodeGen.LambdaLift ( lambdaLift )
import Mistral.CodeGen.Link ( link )
import Mistral.CodeGen.ResolveTags ( resolveTags )
import Mistral.CodeGen.Specialize ( specialize )
import Mistral.Driver ( Driver, io, phase, failErrs, addErr, traceMsg )
import Mistral.ModuleSystem ( moduleSystem, saveIface )
import Mistral.Parser ( parseModule )
import Mistral.TypeCheck ( checkModule )
import Mistral.TypeCheck.AST ( Module, Program, saveModule )
import Mistral.TypeCheck.Interface ( genIface )
import Mistral.Utils.PP ( text, ($$), pp )
import Data.Version (Version)
import qualified Paths_mistral as P
import Control.Monad ( when, unless )
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.IO as L
-- | Compile the contents of a file
compileFile :: FilePath -> Driver Module
compileFile path = phase "compile" $
do bytes <- io (L.readFile path)
compile' True (Just path) bytes
-- | Compile a text buffer, optionally with a path attached, but do
-- not write the output to disk.
compileModule :: Maybe FilePath -> L.Text -> Driver Module
compileModule mbPath bytes =
phase "compile" (compile' False mbPath bytes)
-- | Parse, rename, typecheck and lambda-lift a module. Then, write out its
-- interface and compiled object if the flag is set
compile' :: Bool -> Maybe FilePath -> L.Text -> Driver Module
compile' writeFiles mbPath bytes = failErrs $
do (m,ifaces) <- moduleSystem =<< parseModule mbPath bytes
cm <- lambdaLift =<< checkModule ifaces m
when writeFiles $
do ifaceSaved <- saveIface (genIface cm)
unless ifaceSaved (addErr (text "failed to write interface"))
modSaved <- saveModule cm
unless modSaved (addErr (text "failed to write compiled module"))
return cm
-- | Link, specialize and resolve tags, in preparation for packaging for
-- runtime, or generating code.
linkProgram :: Module -> Driver Program
linkProgram m = phase "linkProgram" $
do prog <- resolveTags =<< elimDeadCode =<< specialize =<< link m
traceMsg (text "Linked program:" $$ pp prog)
return prog
version :: Version
version = P.version
|
GaloisInc/mistral
|
src/Mistral/Simple.hs
|
bsd-3-clause
| 2,429
| 0
| 15
| 489
| 594
| 323
| 271
| 47
| 1
|
module Main (main) where
import qualified VspDecodeTest (tests)
import qualified VspDecodeRealdata (tests)
import qualified VspEncodeTest (tests)
import Test.Framework (defaultMain, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit
main = defaultMain tests
tests = VspDecodeTest.tests ++
VspDecodeRealdata.tests ++
VspEncodeTest.tests
|
smly/haskell-xsystem
|
test/Main.hs
|
bsd-3-clause
| 380
| 0
| 7
| 53
| 93
| 57
| 36
| 11
| 1
|
module Blog.Widgets.StreamOfConsciousness.Controller where
import qualified Blog.Widgets.StreamOfConsciousness.Thought as T
import Blog.BackEnd.HttpPoller
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import qualified System.Log.Logger as L
data Worker = Worker { soc_controller :: SoCController
, poller :: HttpPoller }
data Snapshot = Snapshot { items :: [T.Thought]
, max_size :: Int
, version :: Int
, rendered :: String }
deriving ( Show, Read, Eq )
data SoCRequest = GetData { snapshot_handback :: MVar Snapshot }
| GetHtmlFragment { content :: MVar String }
| Update { retry :: MVar Bool
, snapshot :: Snapshot }
data SoCController = SoCController { tid :: ThreadId
, request_channel :: Chan SoCRequest }
start_soc :: Int -> IO SoCController
start_soc mx_sz = do { rc <- newChan
; let snap = Snapshot [] mx_sz 0 ""
; _tid <- forkIO $ loop rc snap
; return $ SoCController _tid rc }
stop_soc :: SoCController -> IO ()
stop_soc = killThread . tid
stop_worker :: Worker -> IO ()
stop_worker = stop_poller . poller
change_worker_polling_frequency :: Worker -> Int -> IO ()
change_worker_polling_frequency w n = change_polling_frequency (poller w) n
get_content :: SoCController -> IO String
get_content c = do { x <- newEmptyMVar
; writeChan (request_channel c) $ GetHtmlFragment x
; takeMVar x }
get_data :: SoCController -> IO Snapshot
get_data c = do { x <- newEmptyMVar
; writeChan (request_channel c) $ GetData x
; takeMVar x }
update :: SoCController -> Snapshot -> IO Bool
update c snap = do { ok <- newEmptyMVar
; writeChan (request_channel c) $ Update ok snap
; takeMVar ok }
collision_delay :: Int
collision_delay = 1000
log_handle :: String
log_handle = "StreamOfConsciousnessController"
commit :: SoCController -> [T.Thought] -> IO ()
commit socc new_items =
do { snap <- get_data socc
; L.infoM log_handle $ "Commit called for " ++ (show $ length new_items) ++ " items."
; let items' = take (max_size snap) $ T.merge new_items $ items snap
; let rendered' = T.thoughts_to_xhtml items'
; let snap' = snap { items = items'
, rendered = rendered' }
; ok <- update socc snap'
; if ok then
return ()
else
do { threadDelay collision_delay
; L.infoM log_handle $ "Collision detected; waiting."
; commit socc new_items }
}
loop :: Chan SoCRequest -> Snapshot -> IO ()
loop ch snap =
do { req <- readChan ch
; snap' <- case req of
GetHtmlFragment c ->
do { putMVar c $ rendered snap
; return snap }
GetData h ->
do { putMVar h snap
; return snap }
Update ok snap'' ->
if (version snap) == (version snap'') then
do { putMVar ok True
; let snap' = snap'' { version = (version snap) + 1 }
; return snap' }
else
do { putMVar ok False
; return snap }
; loop ch snap' }
|
prb/perpubplat
|
src/Blog/Widgets/StreamOfConsciousness/Controller.hs
|
bsd-3-clause
| 3,607
| 0
| 21
| 1,362
| 989
| 514
| 475
| 79
| 4
|
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.CSL.Proc.Disamb
-- Copyright : (c) Andrea Rossato
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Andrea Rossato <andrea.rossato@unitn.it>
-- Stability : unstable
-- Portability : unportable
--
-- This module provides functions for processing the evaluated
-- 'Output' for citation disambiguation.
--
-- Describe the disambiguation process.
--
-----------------------------------------------------------------------------
module Text.CSL.Proc.Disamb where
import Control.Arrow ( (&&&), (>>>), second )
import Data.Char ( chr )
import Data.List ( elemIndex, find, findIndex, sortBy, mapAccumL
, nub, groupBy, isPrefixOf )
import Data.Maybe
import Data.Ord ( comparing )
import Text.CSL.Eval
import Text.CSL.Output.Plain
import Text.CSL.Reference
import Text.CSL.Style
-- | Given the 'Style', the list of references and the citation
-- groups, disambiguate citations according to the style options.
disambCitations :: Style -> [Reference] -> Citations -> [CitationGroup]
-> ([(String, String)], [CitationGroup])
disambCitations s bibs cs groups
= (,) yearSuffs citOutput
where
-- utils
when_ b f = if b then f else []
filter_ f = concatMap (map fst) . map (filter f) . map (uncurry zip)
-- the list of the position and the reference of each citation
-- for each citation group.
refs = processCites bibs cs
-- name data of name duplicates
nameDupls = getDuplNameData groups
-- citation data of ambiguous cites
duplics = getDuplCiteData hasNamesOpt $ addGName groups
-- check the options set in the style
--isByCite = getOptionVal "givenname-disambiguation-rule" (citOptions . citation $ s) == "by-cite"
disOpts = getCitDisambOptions s
hasNamesOpt = "disambiguate-add-names" `elem` disOpts
hasGNameOpt = "disambiguate-add-givenname" `elem` disOpts
hasYSuffOpt = "disambiguate-add-year-suffix" `elem` disOpts
withNames = flip map duplics $ same . proc rmNameHash . proc rmGivenNames .
map (if hasNamesOpt then disambData else return . disambYS)
needNames = filter_ (not . snd) $ zip (map disambAddNames duplics) withNames
needYSuff = filter_ snd $ zip (map disambAddLast duplics) withNames
newNames :: [CiteData]
newNames = when_ hasNamesOpt $ needNames ++ needYSuff
newGName :: [NameData]
newGName = when_ hasGNameOpt $ concatMap disambAddGivenNames nameDupls
-- the list of citations that need re-evaluation with the
-- \"disambiguate\" condition set to 'True'
reEval = let chk = if hasYSuffOpt then filter ((==) [] . citYear) else id
in chk needYSuff
reEvaluated = if or (query hasIfDis s) && reEval /= []
then map (uncurry $ reEvaluate s reEval) $ zip refs groups
else groups
addGName = proc (updateOutput [] newGName)
addNames = proc (updateOutput newNames newGName)
withYearS = if hasYSuffOpt
then map (mapCitationGroup $ setYearSuffCollision hasNamesOpt needYSuff) $ reEvaluated
else rmYearSuff $ reEvaluated
yearSuffs = when_ hasYSuffOpt . generateYearSuffix bibs . query getYearSuffixes $ withYearS
processed = if hasYSuffOpt
then proc (updateYearSuffixes yearSuffs) .
addNames $ withYearS
else addNames $ withYearS
citOutput = if disOpts /= [] then processed else groups
mapDisambData :: (Output -> Output) -> CiteData -> CiteData
mapDisambData f (CD k c ys d r y) = CD k c ys (proc f d) r y
mapCitationGroup :: ([Output] -> [Output]) -> CitationGroup -> CitationGroup
mapCitationGroup f (CG cs fm d os) = CG cs fm d (zip (map fst os) . f $ map snd os)
disambAddNames :: [CiteData] -> [CiteData]
disambAddNames needName = addLNames
where
disSolved = zip needName (disambiguate $ map disambData needName)
addLNames = map (\(c,n) -> c { disambed = if null n then collision c else head n }) disSolved
disambAddLast :: [CiteData] -> [CiteData]
disambAddLast = map last_
where
last_ c = c { disambed = if disambData c /= [] then last $ disambData c else collision c }
disambAddGivenNames :: [NameData] -> [NameData]
disambAddGivenNames needName = addGName
where
disSolved = zip needName (disambiguate $ map nameDisambData needName)
addGName = map (\(c,n) -> c { nameDataSolved = if null n then nameCollision c else head n }) disSolved
-- | Given the list of 'CiteData' with the disambiguated field set
-- update the evaluated citations by setting the contributor list
-- accordingly.
updateOutput :: [CiteData] -> [NameData] -> Output -> Output
updateOutput c n o
| OContrib k r x _ d <- o = case elemIndex (CD k (clean x) [] [] [] []) c of
Just i -> OContrib k r (disambed $ c !! i) [] d
_ -> o
| OName _ _ [] _ <- o = o
| OName k x _ f <- o = case elemIndex (ND k (clean x) [] []) n of
Just i -> OName k (nameDataSolved $ n !! i) [] f
_ -> o
| otherwise = o
where
clean = proc rmGivenNames
-- | Evaluate again a citation group with the 'EvalState' 'disamb'
-- field set to 'True' (for matching the @\"disambiguate\"@
-- condition).
reEvaluate :: Style -> [CiteData] -> [(Cite, Reference)] -> CitationGroup -> CitationGroup
reEvaluate (Style {citation = ct, csMacros = ms , styleLocale = lo}) l cr (CG a f d os)
= CG a f d . flip concatMap (zip cr os) $
\((c,r),out) -> if refId r `elem` map key l
then return . second (flip Output emptyFormatting) $
(,) c $ evalLayout (citLayout ct) (EvalCite c) True lo ms (citOptions ct) r
else [out]
-- | Check if the 'Style' has any conditional for disambiguation. In
-- this case the conditional will be try after all other
-- disambiguation strategies have failed. To be used with the generic
-- 'query' function.
hasIfDis :: IfThen -> [Bool]
hasIfDis o
| IfThen (Condition {disambiguation = d}) _ _ <- o = [d /= []]
| otherwise = [False ]
-- | Get the list of disambiguation options set in the 'Style' for
-- citations.
getCitDisambOptions :: Style -> [String]
getCitDisambOptions
= map fst . filter ((==) "true" . snd) .
filter (isPrefixOf "disambiguate" . fst) . citOptions . citation
-- | Group citation data (with possible alternative names) of
-- citations which have a duplicate (same 'collision' and same
-- 'citYear'). If the first 'Bool' is 'False', then we need to
-- retrieve data for year suffix disambiguation.
getDuplCiteData :: Bool -> [CitationGroup] -> [[CiteData]]
getDuplCiteData b g
= groupBy (\x y -> collide x == collide y) . sortBy (comparing collide) $ duplicates
where
whatToGet = if b then collision else disambYS
collide = proc rmNameHash . proc rmGivenNames . whatToGet
citeData = nub $ concatMap (mapGroupOutput $ getCiteData) g
duplicates = filter (collide &&& citYear >>> flip elem (getDuplNamesYear b g)) citeData
-- | For an evaluated citation get its 'CiteData'. The disambiguated
-- citation and the year fields are empty.
getCiteData :: Output -> [CiteData]
getCiteData
= contribs &&& years >>>
zipData
where
contribs x = if query contribsQ x /= []
then query contribsQ x
else [CD [] [] [] [] [] []]
yearsQ = query getYears
years o = if yearsQ o /= [] then yearsQ o else [([],[])]
zipData = uncurry . zipWith $ \c y -> if key c /= []
then c {citYear = snd y}
else c {key = fst y
,citYear = snd y}
contribsQ o
| OContrib k _ c d dd <- o = [CD k c d dd [] []]
| otherwise = []
-- | The contributors diambiguation data, the list of names and
-- give-names, and the citation year ('OYear').
type NamesYear = ([Output],String)
-- | Get the contributors list ('OContrib') and the year occurring in
-- more then one citation.
getDuplNamesYear :: Bool -> [CitationGroup] -> [NamesYear]
getDuplNamesYear b
= nub . catMaybes . snd . mapAccumL dupl [] . getData
where
getData = nub . concatMap (mapGroupOutput $ getNamesYear b)
dupl a c = if snd c `elem` a
then (a,Just $ snd c) else (snd c:a,Nothing)
-- | Get the list of citation keys coupled with their 'NamesYear' in
-- the evaluated 'Output'. If the 'Bool' is 'False' then the function
-- retrieves the names used in citations not occuring for the first
-- time.
getNamesYear :: Bool -> Output -> [(String,NamesYear)]
getNamesYear b
= proc rmNameHash >>>
proc rmGivenNames >>>
contribs &&& years >>>
zipData
where
contribs x = if query contribsQ x /= []
then query contribsQ x
else [([],[])]
yearsQ = query getYears
years o = if yearsQ o /= [] then yearsQ o else [([],[])]
zipData = uncurry . zipWith $ \(k,c) y -> if k /= []
then (,) k (c, snd y)
else (,) (fst y) (c, snd y)
contribsQ o
| OContrib k _ c d _ <- o = [(k,if b then c else d)]
| otherwise = []
getYears :: Output -> [(String,String)]
getYears o
| OYear x k _ <- o = [(k,x)]
| otherwise = []
getDuplNameData :: [CitationGroup] -> [[NameData]]
getDuplNameData g
= groupBy (\a b -> collide a == collide b) . sortBy (comparing collide) $ duplicates
where
collide = nameCollision
nameData = nub $ concatMap (mapGroupOutput getName) g
duplicates = filter (flip elem (getDuplNames g) . collide) nameData
getDuplNames :: [CitationGroup] -> [[Output]]
getDuplNames xs
= nub . catMaybes . snd . mapAccumL dupl [] . getData $ xs
where
getData = concatMap (mapGroupOutput getName)
dupl a c = if nameCollision c `elem` map nameCollision a
then (a,Just $ nameCollision c)
else (c:a,Nothing)
getName :: Output -> [NameData]
getName = query getName'
where
getName' o
| OName i n ns _ <- o = [ND i n (n:ns) []]
| otherwise = []
generateYearSuffix :: [Reference] -> [(String, [Output])] -> [(String,String)]
generateYearSuffix refs
= concatMap (flip zip suffs) .
-- sort clashing cites using their position in the sorted bibliography
getFst . map sort' . map (filter ((/=) 0 . snd)) . map (map getP) .
-- group clashing cites
getFst . map nub . groupBy (\a b -> snd a == snd b) . sort' . filter ((/=) [] . snd)
where
sort' :: (Ord a, Ord b) => [(a,b)] -> [(a,b)]
sort' = sortBy (comparing snd)
getFst = map $ map fst
getP k = case findIndex ((==) k . refId) refs of
Just x -> (k, x + 1)
_ -> (k, 0)
suffs = l ++ [x ++ y | x <- l, y <- l ]
l = map (return . chr) [97..122]
setYearSuffCollision :: Bool -> [CiteData] -> [Output] -> [Output]
setYearSuffCollision b cs = proc (setYS cs) . (map $ \x -> if hasYearSuf x then x else addYearSuffix x)
where
setYS c o
| OYearSuf _ k _ f <- o = OYearSuf [] k (getCollision k c) f
| otherwise = o
collide = if b then disambed else disambYS
getCollision k c = case find ((==) k . key) c of
Just x -> if collide x == []
then [OStr (citYear x) emptyFormatting]
else collide x
_ -> []
updateYearSuffixes :: [(String, String)] -> Output -> Output
updateYearSuffixes yss o
| OYearSuf _ k c f <- o = case lookup k yss of
Just x -> OYearSuf x k c f
_ -> ONull
| otherwise = o
getYearSuffixes :: Output -> [(String,[Output])]
getYearSuffixes o
| OYearSuf _ k c _ <- o = [(k,c)]
| otherwise = []
rmYearSuff :: [CitationGroup] -> [CitationGroup]
rmYearSuff = proc rmYS
where
rmYS o
| OYearSuf _ _ _ _ <- o = ONull
| otherwise = o
-- List Utilities
-- | Try to disambiguate a list of lists by returning the first non
-- colliding element, if any, of each list:
--
-- > disambiguate [[1,2],[1,3],[2]] = [[2],[3],[2]]
disambiguate :: (Eq a) => [[a]] -> [[a]]
disambiguate [] = []
disambiguate l
= if hasMult l && not (allTheSame l) && hasDuplicates heads
then disambiguate (rest l)
else heads
where
heads = map head' l
rest = map (\(b,x) -> if b then tail_ x else head' x) . zip (same heads)
hasMult [] = False
hasMult (x:xs) = length x > 1 || hasMult xs
tail_ [x] = [x]
tail_ x = if null x then x else tail x
-- | For each element a list of 'Bool': 'True' if the element has a
-- duplicate in the list:
--
-- > same [1,2,1] = [True,False,True]
same :: Eq a => [a] -> [Bool]
same [] = []
same l
= map (flip elem dupl) l
where
dupl = catMaybes . snd . macc [] $ l
macc = mapAccumL $ \a x -> if x `elem` a then (a,Just x) else (x:a,Nothing)
hasDuplicates :: Eq a => [a] -> Bool
hasDuplicates = or . same
allTheSame :: Eq a => [a] -> Bool
allTheSame = and . same
|
singingwolfboy/citeproc-hs
|
src/Text/CSL/Proc/Disamb.hs
|
bsd-3-clause
| 13,812
| 0
| 17
| 4,190
| 4,268
| 2,264
| 2,004
| 220
| 8
|
module Test.Cont where
import Test.Hspec
import Test.Core
runContTests :: IO ()
runContTests = hspec $ do
callCCTest
shiftResetTest
callCCTest :: Spec
callCCTest = do
describe "call/cc" $ do
it "works with simple input" $ do
testScheme "(call/cc (lambda (k) (k 1)))" `shouldReturn` ["1"]
testScheme "(+ (call/cc (lambda (k) (k 5))) 4)" `shouldReturn` ["9"]
it "words with saving continuation" $ do
testScheme (concat
[ "(define cc ())"
, "cc"
, "(+ (call/cc (lambda (k) (set! cc k) 5)) 4)"
, "(cc 1)"
, "(+ (cc 1) 100)"
]) `shouldReturn` ["cc", "()", "9", "5", "5"]
shiftResetTest :: Spec
shiftResetTest = do
describe "shift/reset" $ do
it "works with simple input" $ do
testScheme "(reset (+ 1 (shift (lambda (k) (k 2))) 3))" `shouldReturn` ["6"]
testScheme "(reset (+ 1 (shift (lambda (k) (+ 10 (k 2)))) 3))" `shouldReturn` ["16"]
testScheme "(* 2 (reset (+ 1 (shift (lambda (k) (+ 10 (k 2)))) 3)))" `shouldReturn` ["32"]
it "works with saving continuation" $ do
testScheme (concat
[ "(define cc ())"
, "(* 2 (reset (+ 1 (shift (lambda (k) (set! cc k) (+ 10 (k 2)))) 3)))"
, "(cc 1)"
, "(cc 10)"
, "(+ (cc 1) 100)"
]) `shouldReturn` ["cc", "32", "5", "14", "105"]
|
amutake/psg-scheme
|
test/Test/Cont.hs
|
bsd-3-clause
| 1,495
| 0
| 17
| 529
| 292
| 156
| 136
| 36
| 1
|
instance X Y where
f = g
instance W U where
f = g
|
itchyny/vim-haskell-indent
|
test/instance/multiple_instance.out.hs
|
mit
| 55
| 0
| 5
| 19
| 27
| 14
| 13
| 4
| 0
|
module Main where
import HROOT
import Control.Applicative
main = do
tcanvas <- newTCanvas "test" "test" 640 480
t0 <- newTDatime 2002 1 1 0 0 0
x0 <- convert t0 0
t1 <- newTDatime 2002 09 23 00 00 00
x1 <- (-) <$> convert t1 0 <*> return x0
t2 <- newTDatime 2003 03 07 00 00 00
x2 <- (-) <$> convert t2 0 <*> return x0
putStrLn $ show x0
putStrLn $ show x1
putStrLn $ show x2
h1 <- newTH1F "test" "test" 100 (fromIntegral x1) (fromIntegral x2)
xaxis <- tH1GetXaxis (upcastTH1 h1)
setTimeOffset xaxis (fromIntegral x0) "local"
setTimeDisplay xaxis 1
setTimeFormat xaxis "%Y/%m/%d"
draw h1 ""
saveAs tcanvas "datetime.pdf" ""
delete h1
delete tcanvas
return ()
|
wavewave/HROOT
|
HROOT-generate/template/HROOT-math/example/datetime.hs
|
gpl-3.0
| 731
| 0
| 10
| 192
| 301
| 134
| 167
| 24
| 1
|
{-# LANGUAGE LambdaCase,TemplateHaskell, MultiWayIf #-}
-- | A complete re-implementation of the official herbstclient program
module Main where
import HLWM.IPC
import System.Console.GetOpt
import Data.List
import System.Environment
import System.Exit
import System.IO
data HCOptions = HCOpt {
newline :: Bool,
print0 :: Bool,
lastArg :: Bool,
idle :: Bool,
wait :: Bool,
count :: Int,
quiet :: Bool,
version :: Bool,
help :: Bool
}
defOpts :: HCOptions
defOpts = HCOpt {
newline = True,
print0 = False,
lastArg = False,
idle = False,
wait = False,
count = 1,
quiet = False,
version = False,
help = False
}
options :: [OptDescr (HCOptions -> HCOptions)]
options =
[ Option ['n'] ["no-newline"] (NoArg $ \o -> o { newline = False })
"Do not print a newline if output does not end with a newline."
, Option ['0'] ["print0"] (NoArg $ \o -> o { print0 = True })
"Use the null character as delimiter between the output of hooks."
, Option ['l'] ["last-arg"] (NoArg $ \o -> o { lastArg = True })
"Print only the last argument of a hook."
, Option ['i'] ["idle"] (NoArg $ \o -> o { idle = True })
"Wait for hooks instead of executing commands."
, Option ['w'] ["wait"] (NoArg $ \o -> o { wait = True })
"Same as --idle but exit after first --count hooks."
, Option ['c'] ["count"] (ReqArg (\a o -> o { count = read a }) "COUNT")
"Let --wait exit after COUNT hooks were received and printed. The default of COUNT is 1."
, Option ['q'] ["quiet"] (NoArg $ \o -> o { quiet = True })
"Do not print error messages if herbstclient cannot connect to the running herbstluftwm instance."
, Option ['v'] ["version"] (NoArg $ \o -> o { version = True })
"Print the herbstclient version. To get the herbstluftwm version, use 'herbstclient version'."
, Option ['h'] ["help"] (NoArg $ \o -> o { help = True }) "Print this help."
]
usage :: String -> String
usage name = "Usage: " ++ name ++ " [OPTION...] files..."
hcOpts :: [String] -> IO (HCOptions, [String])
hcOpts argv = do
case getOpt Permute options argv of
(o,n,[] ) -> return (foldl (flip id) defOpts o, n)
(_,_,errs) -> ioError (userError (concat errs))
putStrMaybeLn :: String -> IO ()
putStrMaybeLn str
| "\n" `isSuffixOf` str = putStr str
| otherwise = putStrLn str
helpString :: String -> String
helpString name = unlines $
[ "Usage: " ++ name ++ " [OPTION...] files..."
, " " ++ name ++ " [OPTIONS] [--wait|--idle] [FILTER ...]"
, "Send a COMMAND with optional arguments ARGS to a running herbstluftwm instance."
, ""
, usageInfo "Options:" options
, "See the man page (herbstclient(1)) for more details."
]
data Wait = Infinite
| Wait Int
newtype NullPolicy = Null Bool
newtype NLPolicy = NL Bool
newtype Quiet = Quiet Bool
newtype LastArg = LastArg Bool
withQConnection :: Quiet -> a -> (HerbstConnection -> IO a) -> IO a
withQConnection q x f = withConnection f >>= \case
Nothing -> case q of
Quiet True -> return x
Quiet False -> hPutStrLn stderr "Could not connect to server" >> return x
Just y -> return y
waitForHooks :: Wait -> NullPolicy -> Quiet -> LastArg -> IO ()
waitForHooks w nl q la = withQConnection q () (doWait w)
where doWait (Wait 0) _ = return () -- TODO handle negative values
doWait w' con = do
h <- nextHook con
case la of
LastArg True | not (null h) -> putStr (last h)
_ -> putStr $ unwords h
case nl of
Null True -> putStr "\0"
Null False -> putStr "\n"
case w' of
Infinite -> doWait Infinite con
Wait x -> doWait (Wait (x-1)) con
send :: [String] -> NLPolicy -> Quiet -> IO ExitCode
send args nl q = withQConnection q (ExitFailure 1)$ \con -> do
(stat, ret) <- sendCommand con args
case nl of
NL False -> putStr ret
NL True -> if null ret || last ret == '\n'
then putStr ret else putStrLn ret
return $ if stat == 0 then ExitSuccess else ExitFailure stat
main :: IO ()
main = do
name <- getProgName
(opts, args) <- getArgs >>= hcOpts
if | help opts -> putStr $ helpString name
| version opts -> putStrLn "A friendly haskell implementation of herbstclient"
| idle opts -> waitForHooks Infinite (Null (print0 opts))
(Quiet (quiet opts)) (LastArg (lastArg opts))
| wait opts -> waitForHooks (Wait (count opts)) (Null (print0 opts))
(Quiet (quiet opts)) (LastArg (lastArg opts))
| otherwise -> send args (NL (newline opts)) (Quiet (quiet opts))
>>= exitWith
|
rootzlevel/hlwm-haskell
|
examples/HerbstClient.hs
|
bsd-2-clause
| 4,711
| 0
| 17
| 1,257
| 1,564
| 823
| 741
| 113
| 5
|
-- | Rendering arbitrary data, and filling in holes in the data with variables.
module Test.SmartCheck.Render
( renderWithVars
, smartPrtLn
) where
import Test.SmartCheck.Types
import Test.SmartCheck.Args hiding (format)
import Test.SmartCheck.DataToTree
import Data.Maybe
import Data.Tree
import Data.List
import Data.Char
import Control.Monad
--------------------------------------------------------------------------------
smartPrefix :: String
smartPrefix = "*** "
smartPrtLn :: String -> IO ()
smartPrtLn = putStrLn . (smartPrefix ++)
--------------------------------------------------------------------------------
-- only print if variable list is non-empty.
renderWithVars :: SubTypes a => Format -> a -> Replace Idx -> IO ()
renderWithVars format d idxs = do
prtVars "values" valsLen valVars
prtVars "constructors" constrsLen constrVars
constrArgs
putStrLn ""
putStrLn $ replaceWithVars format d idxs' (Replace valVars constrVars)
putStrLn ""
where
idxs' = let cs = unConstrs idxs \\ unVals idxs in
idxs { unConstrs = cs }
constrArgs =
unless (constrsLen == 0) $ putStrLn " there exist arguments x̅ s.t."
prtVars kind len vs =
when (len > 0)
( putStrLn $ "forall " ++ kind ++ " "
++ unwords (take len vs) ++ ":")
vars str = map (\(x,i) -> x ++ show i) (zip (repeat str) [0::Integer ..])
valVars = vars "x"
constrVars = vars "C"
valsLen = length (unVals idxs')
constrsLen = length (unConstrs idxs')
--------------------------------------------------------------------------------
type VarRepl = Either String String
-- | At each index into d from idxs, replace the whole with a fresh value.
replaceWithVars :: SubTypes a
=> Format -> a -> Replace Idx -> Replace String -> String
replaceWithVars format d idxs vars =
case format of
PrintTree -> drawTree strTree
-- We have to be careful here. We can't just show d and then find the
-- matching substrings to replace, since the same substring may show up in
-- multiple places. Rather, we have to recursively descend down the tree of
-- substrings, finding matches, til we hit our variable.
PrintString -> stitchTree strTree
where
strTree :: Tree String
strTree = remSubVars (foldl' f t zipRepl)
where
-- Now we'll remove everything after the initial Rights, which are below
-- variables.
remSubVars (Node (Left s ) sf) = Node s (map remSubVars sf)
remSubVars (Node (Right s) _ ) = Node s []
f :: Tree VarRepl -> (String, Idx) -> Tree VarRepl
f tree (var, idx) = Node (rootLabel tree) $
case getIdxForest sf idx of
Nothing -> errorMsg "replaceWithVars1"
Just (Node (Right _) _) -> sf -- Don't replace anything
Just (Node (Left _) _) -> forestReplaceChildren sf idx (Right var)
where
sf = subForest tree
-- A tree representation of the data turned into a tree of Strings showing the
-- data. showForest is one of our generic methods.
t :: Tree VarRepl
t = let forest = showForest d in
if null forest then errorMsg "replaceWithVars2"
else fmap Left (head forest) -- Should be a singleton
-- Note: we put value idxs before constrs, since they take precedence.
zipRepl :: [(String, Idx)]
zipRepl = zip (unVals vars) (unVals idxs)
++ zip (unConstrs vars) (unConstrs idxs)
--------------------------------------------------------------------------------
-- | Make a string out a Tree of Strings. Put parentheses around complex
-- subterms, where "complex" means we have two or more items (i.e., there's a
-- space).
stitchTree :: Tree String -> String
stitchTree = stitch
where
stitch (Node str forest) = str ++ " " ++ unwords (map stitchTree' forest)
stitchTree' (Node str []) = if isJust $ find isSpace str
then '(' : str ++ ")"
else str
stitchTree' node = '(' : stitch node ++ ")"
--------------------------------------------------------------------------------
|
leepike/SmartCheck
|
src/Test/SmartCheck/Render.hs
|
bsd-3-clause
| 4,070
| 0
| 15
| 932
| 982
| 506
| 476
| 68
| 6
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Revisit | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/revisit/resources/help_it_IT/helpset_it_IT.hs
|
apache-2.0
| 969
| 89
| 29
| 159
| 392
| 212
| 180
| -1
| -1
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Sandbox
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- UI for the sandboxing functionality.
-----------------------------------------------------------------------------
module Distribution.Client.Sandbox (
sandboxInit,
sandboxDelete,
sandboxAddSource,
sandboxAddSourceSnapshot,
sandboxDeleteSource,
sandboxListSources,
sandboxHcPkg,
dumpPackageEnvironment,
withSandboxBinDirOnSearchPath,
getSandboxConfigFilePath,
loadConfigOrSandboxConfig,
findSavedDistPref,
initPackageDBIfNeeded,
maybeWithSandboxDirOnSearchPath,
WereDepsReinstalled(..),
reinstallAddSourceDeps,
maybeReinstallAddSourceDeps,
SandboxPackageInfo(..),
maybeWithSandboxPackageInfo,
tryGetIndexFilePath,
sandboxBuildDir,
getInstalledPackagesInSandbox,
updateSandboxConfigFileFlag,
updateInstallDirs,
-- FIXME: move somewhere else
configPackageDB', configCompilerAux'
) where
import Distribution.Client.Setup
( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..)
, GlobalFlags(..), defaultConfigExFlags, defaultInstallFlags
, defaultSandboxLocation, globalRepos )
import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps
, maybeAddCompilerTimestampRecord
, withAddTimestamps
, withRemoveTimestamps )
import Distribution.Client.Config
( SavedConfig(..), defaultUserInstall, loadConfig )
import Distribution.Client.Dependency ( foldProgress )
import Distribution.Client.IndexUtils ( BuildTreeRefType(..) )
import Distribution.Client.Install ( InstallArgs,
makeInstallContext,
makeInstallPlan,
processInstallPlan )
import Distribution.Utils.NubList ( fromNubList )
import Distribution.Client.Sandbox.PackageEnvironment
( PackageEnvironment(..), PackageEnvironmentType(..)
, createPackageEnvironmentFile, classifyPackageEnvironment
, tryLoadSandboxPackageEnvironmentFile, loadUserConfig
, commentPackageEnvironment, showPackageEnvironmentWithComments
, sandboxPackageEnvironmentFile, userPackageEnvironmentFile )
import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..)
, UseSandbox(..) )
import Distribution.Client.SetupWrapper
( SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Types ( PackageLocation(..)
, SourcePackage(..) )
import Distribution.Client.Utils ( inDir, tryCanonicalizePath
, tryFindAddSourcePackageDesc )
import Distribution.PackageDescription.Configuration
( flattenPackageDescription )
import Distribution.PackageDescription.Parse ( readPackageDescription )
import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..)
, PackageDBStack )
import Distribution.Simple.Configure ( configCompilerAuxEx
, interpretPackageDbFlags
, getPackageDBContents
, findDistPref )
import Distribution.Simple.PreProcess ( knownSuffixHandlers )
import Distribution.Simple.Program ( ProgramConfiguration )
import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..)
, fromFlagOrDefault )
import Distribution.Simple.SrcDist ( prepareTree )
import Distribution.Simple.Utils ( die, debug, notice, info, warn
, debugNoWrap, defaultPackageDesc
, intercalate, topHandlerWith
, createDirectoryIfMissingVerbose )
import Distribution.Package ( Package(..) )
import Distribution.System ( Platform )
import Distribution.Text ( display )
import Distribution.Verbosity ( Verbosity, lessVerbose )
import Distribution.Compat.Environment ( lookupEnv, setEnv )
import Distribution.Client.Compat.FilePerms ( setFileHidden )
import qualified Distribution.Client.Sandbox.Index as Index
import Distribution.Simple.PackageIndex ( InstalledPackageIndex )
import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex
import qualified Distribution.Simple.Register as Register
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Exception ( assert, bracket_ )
import Control.Monad ( forM, liftM2, unless, when )
import Data.Bits ( shiftL, shiftR, xor )
import Data.Char ( ord )
import Data.Foldable ( forM_ )
import Data.IORef ( newIORef, writeIORef, readIORef )
import Data.List ( delete, foldl' )
import Data.Maybe ( fromJust )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid ( mempty, mappend )
#endif
import Data.Word ( Word32 )
import Numeric ( showHex )
import System.Directory ( createDirectory
, doesDirectoryExist
, doesFileExist
, getCurrentDirectory
, removeDirectoryRecursive
, removeFile
, renameDirectory )
import System.FilePath ( (</>), equalFilePath
, getSearchPath
, searchPathSeparator
, takeDirectory )
--
-- * Constants
--
-- | The name of the sandbox subdirectory where we keep snapshots of add-source
-- dependencies.
snapshotDirectoryName :: FilePath
snapshotDirectoryName = "snapshots"
-- | Non-standard build dir that is used for building add-source deps instead of
-- "dist". Fixes surprising behaviour in some cases (see issue #1281).
sandboxBuildDir :: FilePath -> FilePath
sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash ""
where
sandboxDirHash = jenkins sandboxDir
-- See http://en.wikipedia.org/wiki/Jenkins_hash_function
jenkins :: String -> Word32
jenkins str = loop_finish $ foldl' loop 0 str
where
loop :: Word32 -> Char -> Word32
loop hash key_i' = hash'''
where
key_i = toEnum . ord $ key_i'
hash' = hash + key_i
hash'' = hash' + (shiftL hash' 10)
hash''' = hash'' `xor` (shiftR hash'' 6)
loop_finish :: Word32 -> Word32
loop_finish hash = hash'''
where
hash' = hash + (shiftL hash 3)
hash'' = hash' `xor` (shiftR hash' 11)
hash''' = hash'' + (shiftL hash'' 15)
--
-- * Basic sandbox functions.
--
-- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the
-- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to
-- 'NoFlag'.
updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags
updateSandboxConfigFileFlag globalFlags =
case globalSandboxConfigFile globalFlags of
Flag _ -> return globalFlags
NoFlag -> do
fp <- lookupEnv "CABAL_SANDBOX_CONFIG"
forM_ fp $ \fp' -> do -- Check for existence if environment variable set
exists <- doesFileExist fp'
unless exists $ die $ "Cabal sandbox file in $CABAL_SANDBOX_CONFIG does not exist: " ++ fp'
let f' = maybe NoFlag Flag fp
return globalFlags { globalSandboxConfigFile = f' }
-- | Return the path to the sandbox config file - either the default or the one
-- specified with @--sandbox-config-file@.
getSandboxConfigFilePath :: GlobalFlags -> IO FilePath
getSandboxConfigFilePath globalFlags = do
let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags
case sandboxConfigFileFlag of
NoFlag -> do pkgEnvDir <- getCurrentDirectory
return (pkgEnvDir </> sandboxPackageEnvironmentFile)
Flag path -> return path
-- | Load the @cabal.sandbox.config@ file (and possibly the optional
-- @cabal.config@). In addition to a @PackageEnvironment@, also return a
-- canonical path to the sandbox. Exit with error if the sandbox directory or
-- the package environment file do not exist.
tryLoadSandboxConfig :: Verbosity -> GlobalFlags
-> IO (FilePath, PackageEnvironment)
tryLoadSandboxConfig verbosity globalFlags = do
path <- getSandboxConfigFilePath globalFlags
tryLoadSandboxPackageEnvironmentFile verbosity path
(globalConfigFile globalFlags)
-- | Return the name of the package index file for this package environment.
tryGetIndexFilePath :: SavedConfig -> IO FilePath
tryGetIndexFilePath config = tryGetIndexFilePath' (savedGlobalFlags config)
-- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of
-- 'SavedConfig'.
tryGetIndexFilePath' :: GlobalFlags -> IO FilePath
tryGetIndexFilePath' globalFlags = do
let paths = fromNubList $ globalLocalRepos globalFlags
case paths of
[] -> die $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++
"no local repos found. " ++ checkConfiguration
_ -> return $ (last paths) </> Index.defaultIndexFileName
where
checkConfiguration = "Please check your configuration ('"
++ userPackageEnvironmentFile ++ "')."
-- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error
-- message than just pattern-matching.
getSandboxPackageDB :: ConfigFlags -> IO PackageDB
getSandboxPackageDB configFlags = do
case configPackageDBs configFlags of
[Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB
-- TODO: should we allow multiple package DBs (e.g. with 'inherit')?
[] ->
die $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt
[_] ->
die $ "Unexpected contents of the 'package-db' field. "
++ sandboxConfigCorrupt
_ ->
die $ "Too many package DBs provided. " ++ sandboxConfigCorrupt
where
sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt."
-- | Which packages are installed in the sandbox package DB?
getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags
-> Compiler -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackagesInSandbox verbosity configFlags comp conf = do
sandboxDB <- getSandboxPackageDB configFlags
getPackageDBContents verbosity comp sandboxDB conf
-- | Temporarily add $SANDBOX_DIR/bin to $PATH.
withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a
withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir
where
-- TODO: Instead of modifying the global process state, it'd be better to
-- set the environment individually for each subprocess invocation. This
-- will have to wait until the Shell monad is implemented; without it the
-- required changes are too intrusive.
addBinDir :: IO ()
addBinDir = do
mbOldPath <- lookupEnv "PATH"
let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator)
mbOldPath
setEnv "PATH" newPath
rmBinDir :: IO ()
rmBinDir = do
oldPath <- getSearchPath
let newPath = intercalate [searchPathSeparator]
(delete sandboxBin oldPath)
setEnv "PATH" newPath
sandboxBin = sandboxDir </> "bin"
-- | Initialise a package DB for this compiler if it doesn't exist.
initPackageDBIfNeeded :: Verbosity -> ConfigFlags
-> Compiler -> ProgramConfiguration
-> IO ()
initPackageDBIfNeeded verbosity configFlags comp conf = do
SpecificPackageDB dbPath <- getSandboxPackageDB configFlags
packageDBExists <- doesDirectoryExist dbPath
unless packageDBExists $
Register.initPackageDB verbosity comp conf dbPath
when packageDBExists $
debug verbosity $ "The package database already exists: " ++ dbPath
-- | Entry point for the 'cabal sandbox dump-pkgenv' command.
dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
commentPkgEnv <- commentPackageEnvironment sandboxDir
putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv
-- | Entry point for the 'cabal sandbox init' command.
sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
sandboxInit verbosity sandboxFlags globalFlags = do
-- Warn if there's a 'cabal-dev' sandbox.
isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev")
(doesFileExist $ "cabal-dev" </> "cabal.config")
when isCabalDevSandbox $
warn verbosity $
"You are apparently using a legacy (cabal-dev) sandbox. "
++ "Legacy sandboxes may interact badly with native Cabal sandboxes. "
++ "You may want to delete the 'cabal-dev' directory to prevent issues."
-- Create the sandbox directory.
let sandboxDir' = fromFlagOrDefault defaultSandboxLocation
(sandboxLocation sandboxFlags)
createDirectoryIfMissingVerbose verbosity True sandboxDir'
sandboxDir <- tryCanonicalizePath sandboxDir'
setFileHidden sandboxDir
-- Determine which compiler to use (using the value from ~/.cabal/config).
userConfig <- loadConfig verbosity (globalConfigFile globalFlags)
(comp, platform, conf) <- configCompilerAuxEx (savedConfigureFlags userConfig)
-- Create the package environment file.
pkgEnvFile <- getSandboxConfigFilePath globalFlags
createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform
(_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
let config = pkgEnvSavedConfig pkgEnv
configFlags = savedConfigureFlags config
-- Create the index file if it doesn't exist.
indexFile <- tryGetIndexFilePath config
indexFileExists <- doesFileExist indexFile
if indexFileExists
then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir
else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir
Index.createEmpty verbosity indexFile
-- Create the package DB for the default compiler.
initPackageDBIfNeeded verbosity configFlags comp conf
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
-- | Entry point for the 'cabal sandbox delete' command.
sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO ()
sandboxDelete verbosity _sandboxFlags globalFlags = do
(useSandbox, _) <- loadConfigOrSandboxConfig
verbosity
globalFlags { globalRequireSandbox = Flag False }
case useSandbox of
NoSandbox -> warn verbosity "Not in a sandbox."
UseSandbox sandboxDir -> do
curDir <- getCurrentDirectory
pkgEnvFile <- getSandboxConfigFilePath globalFlags
-- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard
-- location.
let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $
curDir </> sandboxPackageEnvironmentFile
if isNonDefaultConfigLocation
then warn verbosity $ "Sandbox config file is in non-default location: '"
++ pkgEnvFile ++ "'.\n Please delete manually."
else removeFile pkgEnvFile
-- Remove the sandbox directory, unless we're using a shared sandbox.
let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $
curDir </> defaultSandboxLocation
when isNonDefaultSandboxLocation $
die $ "Non-default sandbox location used: '" ++ sandboxDir
++ "'.\nAssuming a shared sandbox. Please delete '"
++ sandboxDir ++ "' manually."
notice verbosity $ "Deleting the sandbox located at " ++ sandboxDir
removeDirectoryRecursive sandboxDir
-- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'.
doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment
-> BuildTreeRefType
-> IO ()
doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do
let savedConfig = pkgEnvSavedConfig pkgEnv
indexFile <- tryGetIndexFilePath savedConfig
-- If we're running 'sandbox add-source' for the first time for this compiler,
-- we need to create an initial timestamp record.
(comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
withAddTimestamps sandboxDir $ do
-- FIXME: path canonicalisation is done in addBuildTreeRefs, but we do it
-- twice because of the timestamps file.
buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs
Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType
return buildTreeRefs'
-- | Entry point for the 'cabal sandbox add-source' command.
sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
if fromFlagOrDefault False (sandboxSnapshot sandboxFlags)
then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv
else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef
-- | Entry point for the 'cabal sandbox add-source --snapshot' command.
sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath
-> PackageEnvironment
-> IO ()
sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do
let snapshotDir = sandboxDir </> snapshotDirectoryName
-- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private
-- location.
createDirectoryIfMissingVerbose verbosity True snapshotDir
-- Collect the package descriptions first, so that if some path does not refer
-- to a cabal package, we fail immediately.
pkgs <- forM buildTreeRefs $ \buildTreeRef ->
inDir (Just buildTreeRef) $
return . flattenPackageDescription
=<< readPackageDescription verbosity
=<< defaultPackageDesc verbosity
-- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If
-- 'prepareTree' throws an error at any point, the old snapshots will still be
-- in consistent state.
tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) ->
inDir (Just buildTreeRef) $ do
let targetDir = snapshotDir </> (display . packageId $ pkg)
targetTmpDir = targetDir ++ "-tmp"
dirExists <- doesDirectoryExist targetTmpDir
when dirExists $
removeDirectoryRecursive targetDir
createDirectory targetTmpDir
prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers
return (targetTmpDir, targetDir)
-- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to
-- "snapshots/$PKGNAME-$VERSION".
snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do
dirExists <- doesDirectoryExist targetDir
when dirExists $
removeDirectoryRecursive targetDir
renameDirectory targetTmpDir targetDir
return targetDir
-- Once the packages are copied, just 'add-source' them as usual.
doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef
-- | Entry point for the 'cabal sandbox delete-source' command.
sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv)
withRemoveTimestamps sandboxDir $ do
Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs
notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++
"source dependency, but does not remove the package " ++
"from the sandbox package DB.\n\n" ++
"Use 'sandbox hc-pkg -- unregister' to do that."
-- | Entry point for the 'cabal sandbox list-sources' command.
sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags
-> IO ()
sandboxListSources verbosity _sandboxFlags globalFlags = do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
indexFile <- tryGetIndexFilePath (pkgEnvSavedConfig pkgEnv)
refs <- Index.listBuildTreeRefs verbosity
Index.ListIgnored Index.LinksAndSnapshots indexFile
when (null refs) $
notice verbosity $ "Index file '" ++ indexFile
++ "' has no references to local build trees."
when (not . null $ refs) $ do
notice verbosity $ "Source dependencies registered "
++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n"
mapM_ putStrLn refs
notice verbosity $ "\nTo unregister source dependencies, "
++ "use the 'sandbox delete-source' command."
-- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@
-- tool with provided arguments, restricted to the sandbox.
sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO ()
sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do
(_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv
dbStack = configPackageDB' configFlags
(comp, _platform, conf) <- configCompilerAux' configFlags
Register.invokeHcPkg verbosity comp conf dbStack extraArgs
updateInstallDirs :: Flag Bool
-> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig)
updateInstallDirs userInstallFlag (useSandbox, savedConfig) =
case useSandbox of
NoSandbox ->
let savedConfig' = savedConfig {
savedConfigureFlags = configureFlags {
configInstallDirs = installDirs
}
}
in (useSandbox, savedConfig')
_ -> (useSandbox, savedConfig)
where
configureFlags = savedConfigureFlags savedConfig
userInstallDirs = savedUserInstallDirs savedConfig
globalInstallDirs = savedGlobalInstallDirs savedConfig
installDirs | userInstall = userInstallDirs
| otherwise = globalInstallDirs
userInstall = fromFlagOrDefault defaultUserInstall
(configUserInstall configureFlags `mappend` userInstallFlag)
-- | Check which type of package environment we're in and return a
-- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates
-- whether we're working in a sandbox.
loadConfigOrSandboxConfig :: Verbosity
-> GlobalFlags -- ^ For @--config-file@ and
-- @--sandbox-config-file@.
-> IO (UseSandbox, SavedConfig)
loadConfigOrSandboxConfig verbosity globalFlags = do
let configFileFlag = globalConfigFile globalFlags
sandboxConfigFileFlag = globalSandboxConfigFile globalFlags
ignoreSandboxFlag = globalIgnoreSandbox globalFlags
pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag
pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag
ignoreSandboxFlag
case pkgEnvType of
-- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present.
SandboxPackageEnvironment -> do
(sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags
-- ^ Prints an error message and exits on error.
let config = pkgEnvSavedConfig pkgEnv
return (UseSandbox sandboxDir, config)
-- Only @cabal.config@ is present.
UserPackageEnvironment -> do
config <- loadConfig verbosity configFileFlag
userConfig <- loadUserConfig verbosity pkgEnvDir
let config' = config `mappend` userConfig
dieIfSandboxRequired config'
return (NoSandbox, config')
-- Neither @cabal.sandbox.config@ nor @cabal.config@ are present.
AmbientPackageEnvironment -> do
config <- loadConfig verbosity configFileFlag
dieIfSandboxRequired config
return (NoSandbox, config)
where
-- Return the path to the package environment directory - either the
-- current directory or the one that @--sandbox-config-file@ resides in.
getPkgEnvDir :: (Flag FilePath) -> IO FilePath
getPkgEnvDir sandboxConfigFileFlag = do
case sandboxConfigFileFlag of
NoFlag -> getCurrentDirectory
Flag path -> tryCanonicalizePath . takeDirectory $ path
-- Die if @--require-sandbox@ was specified and we're not inside a sandbox.
dieIfSandboxRequired :: SavedConfig -> IO ()
dieIfSandboxRequired config = checkFlag flag
where
flag = (globalRequireSandbox . savedGlobalFlags $ config)
`mappend` (globalRequireSandbox globalFlags)
checkFlag (Flag True) =
die $ "'require-sandbox' is set to True, but no sandbox is present. "
++ "Use '--no-require-sandbox' if you want to override "
++ "'require-sandbox' temporarily."
checkFlag (Flag False) = return ()
checkFlag (NoFlag) = return ()
-- | Return the saved \"dist/\" prefix, or the default prefix.
findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath
findSavedDistPref config flagDistPref = do
let defDistPref = useDistPref defaultSetupScriptOptions
flagDistPref' = configDistPref (savedConfigureFlags config)
`mappend` flagDistPref
findDistPref defDistPref flagDistPref'
-- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do
-- nothing.
maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a
maybeWithSandboxDirOnSearchPath NoSandbox act = act
maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act =
withSandboxBinDirOnSearchPath sandboxDir $ act
-- | Had reinstallAddSourceDeps actually reinstalled any dependencies?
data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled
-- | Reinstall those add-source dependencies that have been modified since
-- we've last installed them. Assumes that we're working inside a sandbox.
reinstallAddSourceDeps :: Verbosity
-> ConfigFlags -> ConfigExFlags
-> InstallFlags -> GlobalFlags
-> FilePath
-> IO WereDepsReinstalled
reinstallAddSourceDeps verbosity configFlags' configExFlags
installFlags globalFlags sandboxDir = topHandler' $ do
let sandboxDistPref = sandboxBuildDir sandboxDir
configFlags = configFlags'
{ configDistPref = Flag sandboxDistPref }
haddockFlags = mempty
{ haddockDistPref = Flag sandboxDistPref }
(comp, platform, conf) <- configCompilerAux' configFlags
retVal <- newIORef NoDepsReinstalled
withSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf sandboxDir $ \sandboxPkgInfo ->
unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do
let args :: InstallArgs
args = ((configPackageDB' configFlags)
,(globalRepos globalFlags)
,comp, platform, conf
,UseSandbox sandboxDir, Just sandboxPkgInfo
,globalFlags, configFlags, configExFlags, installFlags
,haddockFlags)
-- This can actually be replaced by a call to 'install', but we use a
-- lower-level API because of layer separation reasons. Additionally, we
-- might want to use some lower-level features this in the future.
withSandboxBinDirOnSearchPath sandboxDir $ do
installContext <- makeInstallContext verbosity args Nothing
installPlan <- foldProgress logMsg die' return =<<
makeInstallPlan verbosity args installContext
processInstallPlan verbosity args installContext installPlan
writeIORef retVal ReinstalledSomeDeps
readIORef retVal
where
die' message = die (message ++ installFailedInSandbox)
-- TODO: use a better error message, remove duplication.
installFailedInSandbox =
"Note: when using a sandbox, all packages are required to have consistent dependencies. Try reinstalling/unregistering the offending packages or recreating the sandbox."
logMsg message rest = debugNoWrap verbosity message >> rest
topHandler' = topHandlerWith $ \_ -> do
warn verbosity "Couldn't reinstall some add-source dependencies."
-- Here we can't know whether any deps have been reinstalled, so we have
-- to be conservative.
return ReinstalledSomeDeps
-- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that
-- we don't update the timestamp file here - this is done in
-- 'postInstallActions'.
withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags
-> Compiler -> Platform -> ProgramConfiguration
-> FilePath
-> (SandboxPackageInfo -> IO ())
-> IO ()
withSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf sandboxDir cont = do
-- List all add-source deps.
indexFile <- tryGetIndexFilePath' globalFlags
buildTreeRefs <- Index.listBuildTreeRefs verbosity
Index.DontListIgnored Index.OnlyLinks indexFile
let allAddSourceDepsSet = S.fromList buildTreeRefs
-- List all packages installed in the sandbox.
installedPkgIndex <- getInstalledPackagesInSandbox verbosity
configFlags comp conf
let err = "Error reading sandbox package information."
-- Get the package descriptions for all add-source deps.
depsCabalFiles <- mapM (flip tryFindAddSourcePackageDesc err) buildTreeRefs
depsPkgDescs <- mapM (readPackageDescription verbosity) depsCabalFiles
let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs)
isInstalled pkgid = not . null
. InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid
installedDepsMap = M.filter (isInstalled . packageId) depsMap
-- Get the package ids of modified (and installed) add-source deps.
modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir
(compilerId comp) platform installedDepsMap
-- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to
-- be a subset of the keys of 'depsMap'.
let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap)
| modDepPath <- modifiedAddSourceDeps ]
modifiedDepsMap = M.fromList modifiedDeps
assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ())
if (null modifiedDeps)
then info verbosity $ "Found no modified add-source deps."
else notice verbosity $ "Some add-source dependencies have been modified. "
++ "They will be reinstalled..."
-- Get the package ids of the remaining add-source deps (some are possibly not
-- installed).
let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap)
-- Finally, assemble a 'SandboxPackageInfo'.
cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps)
(map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet
where
toSourcePackage (path, pkgDesc) = SourcePackage
(packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing
-- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and a no-op
-- otherwise.
maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags
-> Compiler -> Platform -> ProgramConfiguration
-> UseSandbox
-> (Maybe SandboxPackageInfo -> IO ())
-> IO ()
maybeWithSandboxPackageInfo verbosity configFlags globalFlags
comp platform conf useSandbox cont =
case useSandbox of
NoSandbox -> cont Nothing
UseSandbox sandboxDir -> withSandboxPackageInfo verbosity
configFlags globalFlags
comp platform conf sandboxDir
(\spi -> cont (Just spi))
-- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that
-- case.
maybeReinstallAddSourceDeps :: Verbosity
-> Flag (Maybe Int) -- ^ The '-j' flag
-> ConfigFlags -- ^ Saved configure flags
-- (from dist/setup-config)
-> GlobalFlags
-> (UseSandbox, SavedConfig)
-> IO WereDepsReinstalled
maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags'
globalFlags' (useSandbox, config) = do
case useSandbox of
NoSandbox -> return NoDepsReinstalled
UseSandbox sandboxDir -> do
-- Reinstall the modified add-source deps.
let configFlags = savedConfigureFlags config
`mappendSomeSavedFlags`
configFlags'
configExFlags = defaultConfigExFlags
`mappend` savedConfigureExFlags config
installFlags' = defaultInstallFlags
`mappend` savedInstallFlags config
installFlags = installFlags' {
installNumJobs = installNumJobs installFlags'
`mappend` numJobsFlag
}
globalFlags = savedGlobalFlags config
-- This makes it possible to override things like 'remote-repo-cache'
-- from the command line. These options are hidden, and are only
-- useful for debugging, so this should be fine.
`mappend` globalFlags'
reinstallAddSourceDeps
verbosity configFlags configExFlags
installFlags globalFlags sandboxDir
where
-- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@
-- because we don't want to auto-enable things like 'library-profiling' for
-- all add-source dependencies even if the user has passed
-- '--enable-library-profiling' to 'cabal configure'. These options are
-- supposed to be set in 'cabal.config'.
mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags
mappendSomeSavedFlags sandboxConfigFlags savedFlags =
sandboxConfigFlags {
configHcFlavor = configHcFlavor sandboxConfigFlags
`mappend` configHcFlavor savedFlags,
configHcPath = configHcPath sandboxConfigFlags
`mappend` configHcPath savedFlags,
configHcPkg = configHcPkg sandboxConfigFlags
`mappend` configHcPkg savedFlags,
configProgramPaths = configProgramPaths sandboxConfigFlags
`mappend` configProgramPaths savedFlags,
configProgramArgs = configProgramArgs sandboxConfigFlags
`mappend` configProgramArgs savedFlags,
-- NOTE: Unconditionally choosing the value from
-- 'dist/setup-config'. Sandbox package DB location may have been
-- changed by 'configure -w'.
configPackageDBs = configPackageDBs savedFlags
-- FIXME: Is this compatible with the 'inherit' feature?
}
--
-- Utils (transitionary)
--
-- FIXME: configPackageDB' and configCompilerAux' don't really belong in this
-- module
--
configPackageDB' :: ConfigFlags -> PackageDBStack
configPackageDB' cfg =
interpretPackageDbFlags userInstall (configPackageDBs cfg)
where
userInstall = fromFlagOrDefault True (configUserInstall cfg)
configCompilerAux' :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerAux' configFlags =
configCompilerAuxEx configFlags
--FIXME: make configCompilerAux use a sensible verbosity
{ configVerbosity = fmap lessVerbose (configVerbosity configFlags) }
|
eric-stanley/cabal
|
cabal-install/Distribution/Client/Sandbox.hs
|
bsd-3-clause
| 37,568
| 0
| 19
| 10,219
| 5,855
| 3,064
| 2,791
| 550
| 6
|
import StackTest
main :: IO ()
main = do
stack ["build", "--stack-yaml", "stack1.yaml"]
stack ["build", "--stack-yaml", "stack2.yaml"]
|
juhp/stack
|
test/integration/tests/4215-missing-unregister/Main.hs
|
bsd-3-clause
| 140
| 1
| 8
| 22
| 52
| 26
| 26
| 5
| 1
|
{-# LANGUAGE TemplateHaskell, NoMonomorphismRestriction #-}
module Sub where
x = [| 2 |]
|
sdiehl/ghc
|
testsuite/tests/driver/recomp009/Sub2.hs
|
bsd-3-clause
| 89
| 0
| 4
| 13
| 13
| 10
| 3
| 3
| 1
|
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
--
-- The @Class@ datatype
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module Class (
Class,
ClassOpItem, DefMeth (..),
ClassATItem(..),
ClassMinimalDef,
defMethSpecOfDefMeth,
FunDep, pprFundeps, pprFunDep,
mkClass, classTyVars, classArity,
classKey, className, classATs, classATItems, classTyCon, classMethods,
classOpItems, classBigSig, classExtraBigSig, classTvsFds, classSCTheta,
classAllSelIds, classSCSelId, classMinimalDef, classHasFds
) where
#include "HsVersions.h"
import {-# SOURCE #-} TyCon ( TyCon, tyConName, tyConUnique )
import {-# SOURCE #-} TypeRep ( Type, PredType )
import Var
import Name
import BasicTypes
import Unique
import Util
import SrcLoc
import Outputable
import FastString
import BooleanFormula (BooleanFormula)
import Data.Typeable (Typeable)
import qualified Data.Data as Data
{-
************************************************************************
* *
\subsection[Class-basic]{@Class@: basic definition}
* *
************************************************************************
A @Class@ corresponds to a Greek kappa in the static semantics:
-}
data Class
= Class {
classTyCon :: TyCon, -- The data type constructor for
-- dictionaries of this class
-- See Note [ATyCon for classes] in TypeRep
className :: Name, -- Just the cached name of the TyCon
classKey :: Unique, -- Cached unique of TyCon
classTyVars :: [TyVar], -- The class kind and type variables;
-- identical to those of the TyCon
classFunDeps :: [FunDep TyVar], -- The functional dependencies
-- Superclasses: eg: (F a ~ b, F b ~ G a, Eq a, Show b)
-- We need value-level selectors for both the dictionary
-- superclasses and the equality superclasses
classSCTheta :: [PredType], -- Immediate superclasses,
classSCSels :: [Id], -- Selector functions to extract the
-- superclasses from a
-- dictionary of this class
-- Associated types
classATStuff :: [ClassATItem], -- Associated type families
-- Class operations (methods, not superclasses)
classOpStuff :: [ClassOpItem], -- Ordered by tag
-- Minimal complete definition
classMinimalDef :: ClassMinimalDef
}
deriving Typeable
-- | e.g.
--
-- > class C a b c | a b -> c, a c -> b where...
--
-- Here fun-deps are [([a,b],[c]), ([a,c],[b])]
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnRarrow'',
-- For details on above see note [Api annotations] in ApiAnnotation
type FunDep a = ([a],[a])
type ClassOpItem = (Id, DefMeth)
-- Selector function; contains unfolding
-- Default-method info
data DefMeth = NoDefMeth -- No default method
| DefMeth Name -- A polymorphic default method
| GenDefMeth Name -- A generic default method
deriving Eq
data ClassATItem
= ATI TyCon -- See Note [Associated type tyvar names]
(Maybe (Type, SrcSpan))
-- Default associated type (if any) from this template
-- Note [Associated type defaults]
type ClassMinimalDef = BooleanFormula Name -- Required methods
-- | Convert a `DefMethSpec` to a `DefMeth`, which discards the name field in
-- the `DefMeth` constructor of the `DefMeth`.
defMethSpecOfDefMeth :: DefMeth -> DefMethSpec
defMethSpecOfDefMeth meth
= case meth of
NoDefMeth -> NoDM
DefMeth _ -> VanillaDM
GenDefMeth _ -> GenericDM
{-
Note [Associated type defaults]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following is an example of associated type defaults:
class C a where
data D a r
type F x a b :: *
type F p q r = (p,q)->r -- Default
Note that
* The TyCons for the associated types *share type variables* with the
class, so that we can tell which argument positions should be
instantiated in an instance decl. (The first for 'D', the second
for 'F'.)
* We can have default definitions only for *type* families,
not data families
* In the default decl, the "patterns" should all be type variables,
but (in the source language) they don't need to be the same as in
the 'type' decl signature or the class. It's more like a
free-standing 'type instance' declaration.
* HOWEVER, in the internal ClassATItem we rename the RHS to match the
tyConTyVars of the family TyCon. So in the example above we'd get
a ClassATItem of
ATI F ((x,a) -> b)
So the tyConTyVars of the family TyCon bind the free vars of
the default Type rhs
The @mkClass@ function fills in the indirect superclasses.
The SrcSpan is for the entire original declaration.
-}
mkClass :: [TyVar]
-> [([TyVar], [TyVar])]
-> [PredType] -> [Id]
-> [ClassATItem]
-> [ClassOpItem]
-> ClassMinimalDef
-> TyCon
-> Class
mkClass tyvars fds super_classes superdict_sels at_stuff
op_stuff mindef tycon
= Class { classKey = tyConUnique tycon,
className = tyConName tycon,
classTyVars = tyvars,
classFunDeps = fds,
classSCTheta = super_classes,
classSCSels = superdict_sels,
classATStuff = at_stuff,
classOpStuff = op_stuff,
classMinimalDef = mindef,
classTyCon = tycon }
{-
Note [Associated type tyvar names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The TyCon of an associated type should use the same variable names as its
parent class. Thus
class C a b where
type F b x a :: *
We make F use the same Name for 'a' as C does, and similary 'b'.
The reason for this is when checking instances it's easier to match
them up, to ensure they match. Eg
instance C Int [d] where
type F [d] x Int = ....
we should make sure that the first and third args match the instance
header.
Having the same variables for class and tycon is also used in checkValidRoles
(in TcTyClsDecls) when checking a class's roles.
************************************************************************
* *
\subsection[Class-selectors]{@Class@: simple selectors}
* *
************************************************************************
The rest of these functions are just simple selectors.
-}
classArity :: Class -> Arity
classArity clas = length (classTyVars clas)
-- Could memoise this
classAllSelIds :: Class -> [Id]
-- Both superclass-dictionary and method selectors
classAllSelIds c@(Class {classSCSels = sc_sels})
= sc_sels ++ classMethods c
classSCSelId :: Class -> Int -> Id
-- Get the n'th superclass selector Id
-- where n is 0-indexed, and counts
-- *all* superclasses including equalities
classSCSelId (Class { classSCSels = sc_sels }) n
= ASSERT( n >= 0 && n < length sc_sels )
sc_sels !! n
classMethods :: Class -> [Id]
classMethods (Class {classOpStuff = op_stuff})
= [op_sel | (op_sel, _) <- op_stuff]
classOpItems :: Class -> [ClassOpItem]
classOpItems = classOpStuff
classATs :: Class -> [TyCon]
classATs (Class { classATStuff = at_stuff })
= [tc | ATI tc _ <- at_stuff]
classATItems :: Class -> [ClassATItem]
classATItems = classATStuff
classTvsFds :: Class -> ([TyVar], [FunDep TyVar])
classTvsFds c
= (classTyVars c, classFunDeps c)
classHasFds :: Class -> Bool
classHasFds (Class { classFunDeps = fds }) = not (null fds)
classBigSig :: Class -> ([TyVar], [PredType], [Id], [ClassOpItem])
classBigSig (Class {classTyVars = tyvars, classSCTheta = sc_theta,
classSCSels = sc_sels, classOpStuff = op_stuff})
= (tyvars, sc_theta, sc_sels, op_stuff)
classExtraBigSig :: Class -> ([TyVar], [FunDep TyVar], [PredType], [Id], [ClassATItem], [ClassOpItem])
classExtraBigSig (Class {classTyVars = tyvars, classFunDeps = fundeps,
classSCTheta = sc_theta, classSCSels = sc_sels,
classATStuff = ats, classOpStuff = op_stuff})
= (tyvars, fundeps, sc_theta, sc_sels, ats, op_stuff)
{-
************************************************************************
* *
\subsection[Class-instances]{Instance declarations for @Class@}
* *
************************************************************************
We compare @Classes@ by their keys (which include @Uniques@).
-}
instance Eq Class where
c1 == c2 = classKey c1 == classKey c2
c1 /= c2 = classKey c1 /= classKey c2
instance Ord Class where
c1 <= c2 = classKey c1 <= classKey c2
c1 < c2 = classKey c1 < classKey c2
c1 >= c2 = classKey c1 >= classKey c2
c1 > c2 = classKey c1 > classKey c2
compare c1 c2 = classKey c1 `compare` classKey c2
instance Uniquable Class where
getUnique c = classKey c
instance NamedThing Class where
getName clas = className clas
instance Outputable Class where
ppr c = ppr (getName c)
instance Outputable DefMeth where
ppr (DefMeth n) = ptext (sLit "Default method") <+> ppr n
ppr (GenDefMeth n) = ptext (sLit "Generic default method") <+> ppr n
ppr NoDefMeth = empty -- No default method
pprFundeps :: Outputable a => [FunDep a] -> SDoc
pprFundeps [] = empty
pprFundeps fds = hsep (ptext (sLit "|") : punctuate comma (map pprFunDep fds))
pprFunDep :: Outputable a => FunDep a -> SDoc
pprFunDep (us, vs) = hsep [interppSP us, ptext (sLit "->"), interppSP vs]
instance Data.Data Class where
-- don't traverse?
toConstr _ = abstractConstr "Class"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Class"
|
ml9951/ghc
|
compiler/types/Class.hs
|
bsd-3-clause
| 10,310
| 0
| 12
| 2,916
| 1,628
| 933
| 695
| 135
| 3
|
module A1 where
data T c a = C1 a | C2 c
addedC2 = error "added C2 c to T"
over :: (T c b) -> b
over (C1 x) = x
over (C2 a) = addedC2
|
kmate/HaRe
|
old/testing/addCon/A1_TokOut.hs
|
bsd-3-clause
| 141
| 0
| 7
| 45
| 73
| 40
| 33
| 6
| 1
|
import BasicPrelude
import Lens.Micro ((<&>))
import TabWrapper
import Output
-- |
-- getArgs :: IO ([Text])
-- getArgs <&> toOutputFormat :: IO OutputFormat
-- getArgs <&> toOutputFormat >>= printout :: IO ()
main :: IO ()
main = getArgs <&> toOutputFormat >>= printout
|
mlitchard/primenator
|
src-exe/Main.hs
|
isc
| 274
| 1
| 6
| 46
| 52
| 30
| 22
| 6
| 1
|
module Purecoin.Core.Hash
( Hash, hash0, hashBS, merkleHash
, Hash160, hash160BS, sha256BS, ripemd160BS
) where
import Data.Monoid (mempty, mappend)
import Control.Applicative ((<$>))
import Data.ByteString (ByteString)
import Data.ByteString.Lazy (fromChunks)
import qualified Data.Hashable as H
import Data.NEList (NEList(..))
import Purecoin.Digest.SHA256 (Hash256, sha256)
import Purecoin.Digest.RIPEMD (Hash160, ripemd160)
import Purecoin.Core.Serialize (Serialize, get, put, encode)
import Purecoin.Utils (showHexByteStringLE)
-- the Ord instance makes it useful as a key for a Map
newtype Hash = Hash Hash256 deriving (Eq, Ord)
instance Serialize Hash where
get = Hash <$> get
put (Hash h) = put h
-- The standard way of displaying a bitcoin hash is in little endian format.
instance Show Hash where
show = showHexByteStringLE . encode
instance H.Hashable Hash where
hash (Hash h) = H.hash h
-- Like all crap C programs, the 0 value is copted to have a sepearate special meaning.
hash0 :: Hash
hash0 = Hash mempty
-- For some reason in bitcoin hashing is done by two separate rounds of sha256.
-- It makes hashing slower and shortens the effectiveness of the hash by close a little less than a bit.
-- I do not know what is gained by this.
hashBS :: ByteString -> Hash
hashBS = Hash . sha256BS . encode . sha256BS
merkleHash :: NEList Hash -> Hash
merkleHash (NENil x) = x
merkleHash l = merkleHash (go l)
where
merkle :: Hash -> Hash -> Hash
h1 `merkle` h2 = hashBS $ encode h1 `mappend` encode h2
go :: NEList Hash -> NEList Hash
go (NENil x) = NENil (x `merkle` x)
go (NECons x (NENil y)) = NENil (x `merkle` y)
go (NECons x (NECons y ys)) = NECons (x `merkle` y) (go ys)
sha256BS :: ByteString -> Hash256
sha256BS = sha256 . fromChunks . (:[])
ripemd160BS :: ByteString -> Hash160
ripemd160BS = ripemd160 . fromChunks . (:[])
hash160BS :: ByteString -> Hash160
hash160BS = ripemd160BS . encode . sha256BS
|
laanwj/Purecoin
|
Purecoin/Core/Hash.hs
|
mit
| 1,969
| 0
| 11
| 367
| 587
| 331
| 256
| 40
| 3
|
module Test.PRNGBench
where
import System.Random (RandomGen)
import Control.Arrow (second)
import Criterion.Main (Benchmark, defaultMain)
import Test.PRNGBench.GenList (SomeGen(SG), AnnotatedGen, AnnotatedGenList)
import Test.PRNGBench.SimpleBattery (manyRandomsBenchGroup, manySplitsBenchGroup)
import Test.PRNGBench.MC (runSequentialCircleMCBattery, runParallelCircleMCBattery)
-- | A list of default benchmark groups
benchGroups :: [AnnotatedGenList -> Benchmark]
benchGroups = [ manyRandomsBenchGroup
, manySplitsBenchGroup
, runSequentialCircleMCBattery
, runParallelCircleMCBattery
]
-- | Given a name and a generator, wraps the latter in an annotated box
genToAnnotatedGen :: RandomGen g => String -> g -> AnnotatedGen
genToAnnotatedGen = curry $ second SG
-- | Runs all the default benchmark groups on a given list of annotated boxed generators
runGroups :: AnnotatedGenList -> IO ()
runGroups gens = defaultMain $ map (\b -> b gens) benchGroups
|
nkartashov/prng-bench
|
src/Test/PRNGBench.hs
|
mit
| 1,018
| 0
| 9
| 178
| 194
| 114
| 80
| 16
| 1
|
module KMC.Syntax.External where
import Control.Applicative
import Control.Arrow (second)
import Data.Char (chr, ord)
import Data.List (sort)
import Data.Maybe (fromMaybe, isNothing)
import qualified KMC.Syntax.Internal as Int
import Prelude
data Anchoring = AnchorNone
| AnchorStart
| AnchorEnd
| AnchorBoth
deriving (Show)
-- | Named character classes from the POSIX standard.
data POSIXNamedSet = NSAlnum
| NSAlpha
| NSAscii
| NSBlank
| NSCntrl
| NSDigit
| NSGraph
| NSLower
| NSPrint
| NSPunct
| NSSpace
| NSUpper
| NSWord
| NSXDigit
deriving (Eq,Ord,Show)
data Regex = One -- ^ Epsilon
| Dot -- ^ Wildcard symbol
| Chr Char -- ^ Single character
| Group Bool Regex -- ^ Group (parentheses)
| Concat Regex Regex -- ^ Concatenation
| Branch Regex Regex -- ^ Sum (alternation)
| Class Bool [(Char, Char)] -- ^ Character class. False indicates
-- the class of symbols *not* mentioned.
| NamedSet Bool POSIXNamedSet -- ^ POSIX named character class
| Range Regex Int (Maybe Int) -- ^ Range expression. "n Just m" is
-- repetitition between n and m times,
-- and "n Nothing" is n or more times.
| LazyRange Regex Int (Maybe Int)
| Star Regex -- ^ Kleene star
| LazyStar Regex
| Plus Regex -- ^ Plus expression (1 or more repetitions)
| LazyPlus Regex
| Question Regex -- ^ Question mark (1 or 0 repetitions)
| LazyQuestion Regex
| Suppress Regex
deriving (Show, Eq, Ord)
data OpAssoc = OpAssocLeft | OpAssocRight | OpAssocNone
deriving (Eq, Ord, Show)
unparse :: Regex -> String
unparse = go OpAssocNone (0::Int)
where
noncap s = "(?:" ++ s ++ ")"
cap s = "(" ++ s ++ ")"
pars assoc p assoc' p' = if p > p' || (p == p' && assoc /= assoc') then noncap else id
go _ _ One = ""
go _ _ Dot = "."
go _ _ (Chr a) = [a]
go _ _ (Group False e) = noncap $ go OpAssocNone 0 e
go _ _ (Group True e) = cap $ go OpAssocNone 0 e
go assoc p (Concat e1 e2) = pars assoc p OpAssocRight 2
(go OpAssocLeft 2 e1 ++ go OpAssocRight 2 e2)
go assoc p (Branch e1 e2) = pars assoc p OpAssocRight 1
(go OpAssocLeft 1 e1 ++ "|" ++ go OpAssocRight 1 e2)
go _ _ (Class b rs) = "[" ++ (if b then "" else "^") ++ concatMap rng rs ++ "]"
go _ _ (NamedSet b ns) = "[:" ++ (if b then "" else "^") ++ name ns ++ ":]"
go _ _ (Range e i mj) = go OpAssocNone 3 e
++ "{" ++ show i
++ (case mj of
Just j -> ","++show j
Nothing -> "")
++ "}"
go assoc p (LazyRange e i mj) =
pars assoc p OpAssocNone 3 $
go OpAssocNone 3 e
++ "{" ++ show i
++ (case mj of
Just j -> ","++show j
Nothing -> "")
++ "}?"
go assoc p (Star e) = pars assoc p OpAssocNone 3 $ go OpAssocNone 3 e ++ "*"
go assoc p (LazyStar e) = pars assoc p OpAssocNone 3 $ go OpAssocNone 3 e ++ "*?"
go assoc p (Plus e) = pars assoc p OpAssocNone 3 $ go OpAssocNone 3 e ++ "+"
go assoc p (LazyPlus e) = pars assoc p OpAssocNone 3 $ go OpAssocNone 3 e ++ "+?"
go assoc p (Question e) = pars assoc p OpAssocNone 4 $ go OpAssocNone 4 e ++ "?"
go assoc p (LazyQuestion e) = pars assoc p OpAssocNone 3 $ go OpAssocNone 3 e ++ "??"
go _ _ (Suppress _) = error "No syntax for suppress"
name ns = case ns of
NSAlnum -> "alnum"
NSAlpha -> "alpha"
NSAscii -> "ascii"
NSBlank -> "blank"
NSCntrl -> "cntrl"
NSDigit -> "digit"
NSGraph -> "graph"
NSLower -> "lower"
NSPrint -> "print"
NSPunct -> "punct"
NSSpace -> "space"
NSUpper -> "upper"
NSWord -> "word"
NSXDigit -> "xdigit"
rng (a,b) = [a,'-',b]
simplifyRanges :: [(Char, Char)] -> [(Char, Char)]
simplifyRanges rs = simplifyRanges' $ sort $ filter validRange rs
where
validRange (c1, c2) = c1 <= c2
simplifyRanges' [] = []
simplifyRanges' [r] = [r]
simplifyRanges' ((c1, c2):(c1', c2'):rs')
| c2' <= c2 = simplifyRanges' $ (c1, c2):rs'
| c1' <= c2 || succ c2 == c1' = simplifyRanges' $ (c1, c2'):rs'
| otherwise = (c1, c2):simplifyRanges' ((c1', c2'):rs')
-- | Negate character ranges. Precondition: Input ranges are simplified
-- (i.e. sorted and minimal)
negateRanges :: [(Char, Char)] -> [(Char, Char)]
negateRanges = negateRanges' $ Just $ chr 0
where
negateRanges' (Just c) [] = [(c, chr 255)]
negateRanges' (Just c) ((c1, c2):rs) =
let rs' = negateRanges' (if ord c2 < 255 then Just $ succ c2 else Nothing) rs
in if c < c1 then (c, chr $ ord c1 - 1):rs' else rs'
negateRanges' Nothing _ = []
balance :: [Int.Regex] -> Maybe Int.Regex
balance [] = Nothing
balance [c] = Just c
balance xs = let (xs1, xs2) = splitAt (length xs `div` 2) xs
in (Int.:|:) <$> balance xs1 <*> balance xs2
reSum :: Maybe Int.Regex -> Maybe Int.Regex -> Maybe Int.Regex
reSum Nothing Nothing = Nothing
reSum Nothing (Just e) = Just e
reSum (Just e) Nothing = Just e
reSum (Just e1) (Just e2) = Just $ e1 Int.:|: e2
-- | Flag indicating how a ? is rewritten to a choice
data QuestionOrientation = EmptyLeft -- ^ "E?" => "1 + E"
| EmptyRight -- ^ "E?" => "E + 1"
deriving (Show)
-- | How to treat the dot when simplifying?
data DotBehavior = BalancedTree -- ^ Build a balanced tree of alternatives
| DummyDot -- ^ Insert some dummy char. Useful
-- when interfacing with external tools.
deriving (Show)
-- | Convert regular expression AST to internal (simplified) representation
simplify' :: Int.GroupId -> QuestionOrientation -> DotBehavior -> Regex -> (Int.GroupId, Maybe Int.Regex)
simplify' i _ _ One = (i, Just Int.One)
simplify' i _ BalancedTree Dot = (i, balance [Int.Byte j | j <- [0..255]])
simplify' i o b@DummyDot Dot = simplify' i o b (Chr '.')
simplify' i _ _ (Chr c) = let b = fromIntegral . ord $ c in (i, Just $ Int.Byte b)
simplify' i o b (Group True re) = (Int.G i <$>) `second` simplify' (i+1) o b re
simplify' i o b (Group False re) = simplify' i o b re
simplify' i o b (Concat e1 e2) =
let (i', r') = simplify' i o b e1
in ((Int.:&:) <$> r' <*>) `second` simplify' i' o b e2
simplify' i o b (Branch e1 e2) =
let (i', r') = simplify' i o b e1
in reSum r' `second` simplify' i' o b e2
simplify' i o@EmptyRight b (Question e) =
flip reSum (Just Int.One) `second` simplify' i o b e
simplify' i o@EmptyLeft b (Question e) =
reSum (Just Int.One) `second` simplify' i o b e
simplify' i o b (Star e) = (Int.K <$>) `second` simplify' i o b e
simplify' i o b (Plus e) =
((\e' -> e' Int.:&: Int.K e') <$>) `second` simplify' i o b e
simplify' i _ _ (Class b rs) =
let rs' = (if b then id else negateRanges) $ simplifyRanges rs
in (,) i $ balance
$ concat
[map Int.Byte [fromIntegral (ord c1)..fromIntegral (ord c2)]
| (c1, c2) <- rs']
simplify' _ _ _ (NamedSet _ n) = error $ "I can't simplify " ++ show n
simplify' i _ _ (Range _ from (Just to))
| to < from = (i, Nothing)
simplify' i o b (Range e from mto)
| from == 0, isNothing mto || mto == Just 0 = (i, Nothing)
| otherwise = foldr1 combine $ map (simplify' i o b)
(unroll from (fromMaybe from mto))
where
-- Group-ids under repetition are identical:
-- (a){2} -> (:1 a)(:1 a), NOT (:1 a)(:2 a)
combine (_,r') (i', acc) = (i', (Int.:&:) <$> r' <*> acc)
unroll :: Int -> Int -> [Regex]
unroll 0 1 = [Question e]
unroll 0 y = Question e : unroll 0 (y-1)
unroll 1 1 = [e]
unroll x y = e : unroll (x-1) (y-1)
simplify' _ _ _ _ = error "incomplete pattern"
simplifyOpts :: QuestionOrientation -> DotBehavior -> Regex -> Maybe Int.Regex
simplifyOpts ori beh = snd . simplify' 1 ori beh
-- | Convert regular expression AST to internal (simplified) representation
simplify :: Regex -> Maybe Int.Regex
simplify = simplifyOpts EmptyRight BalancedTree
|
diku-kmc/regexps-syntax
|
KMC/Syntax/External.hs
|
mit
| 9,076
| 0
| 15
| 3,246
| 3,129
| 1,636
| 1,493
| 177
| 36
|
module Data.List.Subs where
subsequencesN :: Int -> [a] -> [[a]]
subsequencesN n xs = let l = length xs
in if n>l then [] else subsequencesBySize xs !! (l-n)
where
subsequencesBySize [] = [[[]]]
subsequencesBySize (x:xs) = let next = subsequencesBySize xs
in zipWith (++) ([]:next) (map (map (x:)) next ++ [[]])
|
goshakkk/Poker.hs
|
src/Data/List/Subs.hs
|
mit
| 375
| 0
| 15
| 115
| 175
| 94
| 81
| 7
| 3
|
module Main where
import Test.Framework.Runners.Console
import J2S.AITest
main :: IO ()
main = defaultMain [aiTests]
|
berewt/J2S
|
test/Main.hs
|
mit
| 120
| 0
| 6
| 18
| 37
| 22
| 15
| 5
| 1
|
import Control.Monad
import qualified Data.ByteString.Lazy as BL
import qualified Data.Vector as V
import qualified Data.Map as M
import qualified Data.PSQueue as PS
import qualified HotelMap as H
import Data.Csv
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
myOptions = defaultDecodeOptions {
-- Change the csv delimiter to ';'
decDelimiter = fromIntegral $ fromEnum ';'
}
myEncodeOptions = defaultEncodeOptions {
-- Change the csv delimiter to ';'
encDelimiter = fromIntegral $ fromEnum ';'
}
--- Takes a Maybe and returns value without Just ---
removeJust :: Maybe a -> a
removeJust Nothing = error "Nothing on removeJust"
removeJust (Just x) = x
-------------------------------- Functions to convert our map into a list that can be encoded into a csv -------------------------------------
convertMap2List :: H.GlobeMap String -> [(String, String, String, Int)]
convertMap2List globe_map = M.foldWithKey (\k x ks -> (convertCountry2List k x) ++ ks) [] globe_map
convertCountry2List :: String -> H.CountryMap String -> [(String, String, String, Int)]
convertCountry2List country city_map = M.foldWithKey (\k x ks -> (convertCity2List k country x) ++ ks) [] city_map
convertCity2List :: String -> String -> H.CityMap String -> [(String, String, String, Int)]
convertCity2List city country hotel_list = [ (country, city, hotel, priority) | (hotel PS.:-> priority) <- PS.toList hotel_list ]
------------------------------------ Creates a listbox element where options are taken from the list -------------------------------------------
mkSelect :: [String] -> UI (UI.ListBox String)
mkSelect options_list = do
UI.listBox (pure options_list) (pure Nothing) (pure UI.string)
------------------------------------ Functions to return a list of hotels in a given city from our map ---------------------------------------------
getList :: String -> String -> H.GlobeMap String -> [String]
getList country city co_m = searchCity city ci_m
where (Just ci_m) = M.lookup country co_m
searchCity :: String -> H.CountryMap String -> [String]
searchCity city ci_m = H.getHotels ci_heap
where (Just ci_heap) = M.lookup city ci_m
------------------------ Given a map of countries and a country name, returns the list of cities in that country ----------------------------------
getCityList :: String -> H.GlobeMap String -> [String]
getCityList country our_map = M.keys city_m
where (Just city_m) = M.lookup country our_map
----------------------------- Given a vector in the form (<country>, <city>, <hotel>, <priority>), converts it into a map ------------------------------
generateMap :: V.Vector (String, String, String, Int) -> H.GlobeMap String
generateMap v = V.foldl' (\acc (co, ci, ho, pr) -> H.insertCountry co ci ho pr acc) M.empty v
------------------------------------------------------------ THE GUI part ------------------------------------------------------
makeLabel :: Window -> UI.ListBox String -> [String] -> String -> String -> H.GlobeMap String -> UI Element
makeLabel rootWindow hotelBox hotelLst country city our_map = do
hotelChoice <- get UI.selection $ getElement hotelBox
case hotelChoice of
Nothing -> error "Not Found"
(Just h) -> do
let hotelName = hotelLst !! h -- Get name of selected hotel
let new_map = H.selectHotel country city hotelName our_map -- Increment the priority of the selected hotel by 1
liftIO $ BL.writeFile "hotels.csv" $ encodeWith myEncodeOptions $ convertMap2List new_map -- Write updated map to CSV
header <- UI.h1 #+ [ string $ hotelName ++ " selected. Please reload page!" ] -- Create <h1>
getBody rootWindow #+ [element header] -- draw
makeHotelsList :: Window -> UI.ListBox String -> UI.ListBox String -> [String] -> [String] -> H.GlobeMap String -> UI ()
makeHotelsList rootWindow countryInput cityInput countries cities our_map = do
countryChoice <- get UI.selection $ getElement countryInput -- Get selected country index
cityChoice <- get UI.selection $ getElement cityInput -- Get selected city index
let country = countries !! (removeJust countryChoice) -- Get selected country name
let city = cities !! (removeJust cityChoice) -- Get selected city name
let hotelLst = getList country city our_map -- Get list of hotels in city
hotelBox <- mkSelect hotelLst -- Make listbox of hotels
element hotelBox # set (attr "size") "20" -- set size attribute of listbox to 20
goBtn <- UI.button #+ [ string "Go!" ] -- Make button
getBody rootWindow #+ [element hotelBox, element goBtn] -- Draw
on UI.click goBtn $ \_ -> --on clicking Go!
makeLabel rootWindow hotelBox hotelLst country city our_map
makeCityS :: Window -> Maybe Int -> UI.ListBox String -> H.GlobeMap String -> [String] -> UI ()
makeCityS rootWindow x countryInput our_map countries = do
case x of
Nothing -> return ()
Just ix -> do
let countryName = countries !! ix -- get selected country name from the list of countries
let cities = getCityList countryName our_map -- get list of cities in the selected country
cityInput <- mkSelect cities -- makes a drop-down menu of cities
searchBtn <- UI.button #+ [ string "Search" ] -- make button
getBody rootWindow #+ [element cityInput, element searchBtn] -- draw drop-down menu and button
on UI.click searchBtn $ \_ -> -- on clicking Search button
makeHotelsList rootWindow countryInput cityInput countries cities our_map
makeCountryS :: Window -> V.Vector (String, String, String, Int) -> UI ()
makeCountryS rootWindow v = do
let our_map = generateMap v -- Inserts vector data into our data structure i.e. a map of countries
let countries = M.keys our_map -- Gets list of countries in the generetad map
countryInput <- mkSelect countries -- Makes a drop-down menu of countries
getBody rootWindow #+ [element countryInput] -- Draw the drop-drown menu of countries
on UI.selectionChange (getElement countryInput) $ \x -> -- When user selects a country
makeCityS rootWindow x countryInput our_map countries
setup :: Window -> UI ()
setup rootWindow = void $ do
-- Reads and parses CSV file and returns a vector of data in each line --
csv <- liftIO $ BL.readFile "hotels.csv"
case decodeWith myOptions NoHeader csv of
Left err -> liftIO $ putStrLn err
Right v -> makeCountryS rootWindow v
main :: IO ()
main = startGUI defaultConfig $ setup
|
fahadakhan96/hotel-recommend
|
src/Main.hs
|
mit
| 6,679
| 0
| 16
| 1,378
| 1,660
| 844
| 816
| 88
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Painting
( Canvas (..)
, newCanvas
, paint
) where
import Data.Monoid ((<>),mempty)
import Data.Word
import qualified Data.Foldable as F
import qualified Data.Vector as V
import qualified Data.Text as T
import Control.Lens
import Dom
import Layout
import Style
import CSS (Value(ColorValue), Color(..))
type DisplayList = V.Vector DisplayCommand
data DisplayCommand = SolidColor Color Rect
data Canvas = Canvas { pixels :: V.Vector Color
, wdth :: Word
, hght :: Word }
paint :: LayoutBox -> Rect -> Canvas
paint root bounds = let dlist = buildDisplayList root
canvas = newCanvas w h
w = fromInteger . floor $ bounds^.width
h = fromInteger . floor $ bounds^.height
in F.foldl' paintItem canvas dlist
buildDisplayList :: LayoutBox -> DisplayList
buildDisplayList = F.foldMap renderLayoutBox
renderLayoutBox :: (Dimensions,BoxType) -> DisplayList
renderLayoutBox box = renderBackgroud box <> renderBorders box
renderBackgroud :: (Dimensions,BoxType) -> DisplayList
renderBackgroud (dim,ty) = maybe mempty
(return . flip SolidColor (borderBox dim)) (getColor ty "background")
getColor :: BoxType -> T.Text -> Maybe Color
getColor (BlockNode style) name = getColor' style name
getColor (InlineNode style) name = getColor' style name
getColor AnonymousBlock _ = Nothing
getColor' style name = case value (NTree style []) name of
Just (ColorValue (Color r g b a)) -> Just (Color r g b a)
_ -> Nothing
renderBorders :: (Dimensions,BoxType) -> DisplayList
renderBorders (dim,ty) = maybe mempty renderBorders' (getColor ty "border-color")
where
renderBorders' color = V.fromList $ map (SolidColor color) [l, r, t, b]
bbox = borderBox dim
bdr = dim^.border
l = bbox & width.~ bdr^.left
r = bbox & x+~ bbox^.width - bdr^.right
& width.~ bdr^.right
t = bbox & height.~ bdr^.top
b = bbox & y+~ bbox^.height - bdr^.bottom
& height.~ bdr^.bottom
newCanvas :: Word -> Word -> Canvas
newCanvas w h = let white = Color 255 255 255 255 in
Canvas (V.replicate (fromIntegral(w * h)) white) w h
paintItem :: Canvas -> DisplayCommand -> Canvas
paintItem cs (SolidColor color rect) = updateChunk cs (x0,x1) (y0,y1) color
where
x0 = clampInt 0 (w-1) (rect^.x)
y0 = clampInt 0 (h-1) (rect^.y)
x1 = clampInt 0 (w-1) (rect^.x + rect^.width - 1)
y1 = clampInt 0 (h-1) (rect^.y + rect^.height - 1)
w = asFloat $ wdth cs
h = asFloat $ hght cs
asFloat = fromInteger . toInteger
-- this probably modifies the pixel vector in-place, if I'm reading the
-- Data.Vector source correctly
updateChunk :: Canvas -> (Integer,Integer) -> (Integer,Integer) -> Color -> Canvas
updateChunk cs (x0,x1) (y0,y1) c = let pxs = V.update (pixels cs) chunk in
cs{ pixels = pxs}
where
chunk = V.map (\a->(fromIntegral a,c)) indicies
indicies = V.fromList [ y * toInteger (wdth cs) + x | x <- [x0..x1], y <- [y0..y1] ]
clampInt :: Float -> Float -> Float -> Integer
clampInt f c = floor . min c . max f
|
Hrothen/Hubert
|
src/Painting.hs
|
mit
| 3,318
| 0
| 14
| 896
| 1,219
| 652
| 567
| 70
| 2
|
{-# LANGUAGE UnicodeSyntax #-}
module UnicodeSyntax where
import System.Environment (getArgs)
main :: IO ()
main = do
as ← getArgs
print $ test 0
test :: Int → Bool
test x = x*5 == x+8
|
Pnom/haskell-ast-pretty
|
Test/examples/UnicodeSyntax.hs
|
mit
| 196
| 0
| 8
| 42
| 76
| 40
| 36
| 9
| 1
|
module Red.Enums.FromBinary where
import Data.Word
class FromBinary32 a where
fromBinary32 :: Word32 -> a
class FromBinary8 a where
fromBinary8 :: Word8 -> a
|
LeviSchuck/RedReader
|
Red/Enums/FromBinary.hs
|
mit
| 171
| 0
| 7
| 36
| 48
| 26
| 22
| 6
| 0
|
module Dama.Core.Core
( Program
, Expr(Var, App, Lam, Case, Let)
, Decl((:=))
, Option((:->))
, Pattern(Bind, Cons)
, Bind
) where
type Program = [Decl]
data Expr
= Var String
| App Expr Expr
| Lam Bind Expr
| Case Expr [Option]
| Let [Decl] Expr
deriving Show
data Decl = String := Expr
deriving Show
infix 0 :=
data Option = Pattern :-> Expr
deriving Show
infix 0 :->
data Pattern
= Bind Bind
| Cons String [Bind]
deriving Show
type Bind = Maybe String
|
tysonzero/dama
|
src/Dama/Core/Core.hs
|
mit
| 533
| 0
| 7
| 164
| 179
| 117
| 62
| 34
| 0
|
module Twenty where
import Data.Monoid
-- Exercises: Library Functions
-- 1
sum :: (Monoid a, Foldable t, Num a) => t a -> a
sum = foldMap (+ mempty)
-- 2
product :: (Monoid a, Foldable t, Num a) => t a -> a
product = foldMap (* mempty)
-- 3
elem :: (Foldable t, Eq a) => a -> t a -> Bool
elem x xs = foldr (\a acc -> acc || a == x) False xs
-- 4
minimum :: (Foldable t, Ord a) => t a -> Maybe a
minimum xs = Just $ foldr1 min' xs
where min' x y = case compare x y of
GT -> y
_ -> x
-- 5
maximum :: (Foldable t, Ord a) => t a -> Maybe a
maximum xs = Just $ foldr1 max' xs
where max' x y = case compare x y of
GT -> x
_ -> y
-- 6
null :: (Foldable t) => t a -> Bool
null = foldr (\a acc -> False) True
-- 7
length :: (Foldable t) => t a -> Int
length = foldr (\a acc -> 1 + acc) 0
-- 8
toList :: (Foldable t) => t a -> [a]
toList = foldr (\a acc -> [a] ++ acc) []
-- 9
fold :: (Foldable t, Monoid m) => t m -> m
fold = foldMap (mappend mempty)
-- 10
foldMap' :: (Foldable t, Monoid m) => (a -> m) -> t a -> m
foldMap' f = foldr (\a acc -> (f a) <> acc) mempty
-- 20.6 Chapter Exercises
-- 1
data Constant a b = Constant a
instance Foldable (Constant a) where
foldMap _ _ = mempty
-- 2
data Two a b = Two a b
instance Foldable (Two a) where
foldMap f (Two _ b) = f b
-- 3
data Three a b c = Three a b c
instance Foldable (Three a b) where
foldMap f (Three _ _ c) = f c
-- 4
data Three' a b = Three' a b b
instance Foldable (Three' a) where
foldMap f (Three' a b c) = f b <> f c
-- 5
data Four' a b = Four' a b b b
instance Foldable (Four' a) where
foldMap f (Four' a b c d) = f b <> f c <> f d
--
filterF' :: (Applicative f, Foldable t, Monoid (f a)) => (a -> Bool) -> t a -> f a
filterF' f = foldMap (\th -> if (f th) then (pure th) else mempty)
-- filterF' (odd . getSum) [1,2,3 :: Sum Integer]
|
mudphone/HaskellBook
|
src/Twenty.hs
|
mit
| 1,931
| 0
| 10
| 576
| 954
| 504
| 450
| 45
| 2
|
{-# LANGUAGE PackageImports, RecordWildCards #-}
-- PartialTypeSignatures should be used when it's available.
module Main where
import "GLFW-b" Graphics.UI.GLFW as GLFW
import Graphics.Gloss.Rendering
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Data.Picture
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
import Control.Applicative
import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue)
import Control.Monad
import Control.Monad.Fix (fix)
import Control.Monad.RWS.Strict (RWST, ask, asks, evalRWST, get, liftIO, modify, put)
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Data.Maybe
import GHC.Float (double2Float)
import Board
import Drawing
import GameState
import qualified Textures as Tx
data Env = Env
{ envEventsChan :: TQueue Event
, envWindow :: !GLFW.Window
}
data State = State
{ stateWindowWidth :: !Int
, stateWindowHeight :: !Int
, stateMouseDown :: !Bool
, stateDragging :: !Bool
, stateDragStartX :: !Double
, stateDragStartY :: !Double
, stateDragStartXAngle :: !Double
, stateDragStartYAngle :: !Double
, stateGameState :: GameState
}
updateGameState :: GameState -> State -> State
updateGameState gS s = s { stateGameState = gS }
modifyGameState :: (GameState -> GameState) -> State -> State
modifyGameState f s = s { stateGameState = f (stateGameState s) }
type Game = RWST Env () State IO
--------------------------------------------------------------------------------
data Event =
EventError !GLFW.Error !String
| EventWindowPos !GLFW.Window !Int !Int
| EventWindowSize !GLFW.Window !Int !Int
| EventWindowClose !GLFW.Window
| EventWindowRefresh !GLFW.Window
| EventWindowFocus !GLFW.Window !GLFW.FocusState
| EventWindowIconify !GLFW.Window !GLFW.IconifyState
| EventFramebufferSize !GLFW.Window !Int !Int
| EventMouseButton !GLFW.Window !GLFW.MouseButton !GLFW.MouseButtonState !GLFW.ModifierKeys
| EventCursorPos !GLFW.Window !Double !Double
| EventCursorEnter !GLFW.Window !GLFW.CursorState
| EventScroll !GLFW.Window !Double !Double
| EventKey !GLFW.Window !GLFW.Key !Int !GLFW.KeyState !GLFW.ModifierKeys
| EventChar !GLFW.Window !Char
deriving Show
--------------------------------------------------------------------------------
windowWidth, windowHeight :: Int
windowWidth = 1000
windowHeight = 800
main :: IO ()
main = do
eventsChan <- newTQueueIO :: IO (TQueue Event)
withWindow windowWidth windowHeight "Civ" $ \win -> do
GLFW.setErrorCallback $ Just $ errorCallback eventsChan
GLFW.setWindowPosCallback win $ Just $ windowPosCallback eventsChan
GLFW.setWindowSizeCallback win $ Just $ windowSizeCallback eventsChan
GLFW.setWindowCloseCallback win $ Just $ windowCloseCallback eventsChan
GLFW.setWindowRefreshCallback win $ Just $ windowRefreshCallback eventsChan
GLFW.setWindowFocusCallback win $ Just $ windowFocusCallback eventsChan
GLFW.setWindowIconifyCallback win $ Just $ windowIconifyCallback eventsChan
GLFW.setFramebufferSizeCallback win $ Just $ framebufferSizeCallback eventsChan
GLFW.setMouseButtonCallback win $ Just $ mouseButtonCallback eventsChan
GLFW.setCursorPosCallback win $ Just $ cursorPosCallback eventsChan
GLFW.setCursorEnterCallback win $ Just $ cursorEnterCallback eventsChan
GLFW.setScrollCallback win $ Just $ scrollCallback eventsChan
GLFW.setKeyCallback win $ Just $ keyCallback eventsChan
GLFW.setCharCallback win $ Just $ charCallback eventsChan
GLFW.swapInterval 1
(fbWidth, fbHeight) <- GLFW.getFramebufferSize win
glossState <- initState -- we don't have access to its type
gameState <- initGameState
txMap <- liftIO Tx.textureMap
let env = Env
{ envEventsChan = eventsChan
, envWindow = win
}
state = State
{ stateWindowWidth = fbWidth
, stateWindowHeight = fbHeight
, stateMouseDown = False
, stateDragging = False
, stateDragStartX = 0
, stateDragStartY = 0
, stateDragStartXAngle = 0
, stateDragStartYAngle = 0
, stateGameState = gameState
}
runGame glossState txMap env state
withWindow :: Int -> Int -> String -> (GLFW.Window -> IO ()) -> IO ()
withWindow width height title f = do
GLFW.setErrorCallback $ Just simpleErrorCallback
r <- GLFW.init
when r $ do
m <- GLFW.createWindow width height title Nothing Nothing
case m of
(Just win) -> do
GLFW.makeContextCurrent m
f win
GLFW.setErrorCallback $ Just simpleErrorCallback
GLFW.destroyWindow win
Nothing -> return ()
GLFW.terminate
where
simpleErrorCallback e s =
putStrLn $ unwords [show e, show s]
--------------------------------------------------------------------------------
-- Each callback does just one thing: write an appropriate Event to the events
-- TQueue.
errorCallback :: TQueue Event -> GLFW.Error -> String -> IO ()
windowPosCallback :: TQueue Event -> GLFW.Window -> Int -> Int -> IO ()
windowSizeCallback :: TQueue Event -> GLFW.Window -> Int -> Int -> IO ()
windowCloseCallback :: TQueue Event -> GLFW.Window -> IO ()
windowRefreshCallback :: TQueue Event -> GLFW.Window -> IO ()
windowFocusCallback :: TQueue Event -> GLFW.Window -> GLFW.FocusState -> IO ()
windowIconifyCallback :: TQueue Event -> GLFW.Window -> GLFW.IconifyState -> IO ()
framebufferSizeCallback :: TQueue Event -> GLFW.Window -> Int -> Int -> IO ()
mouseButtonCallback :: TQueue Event -> GLFW.Window -> GLFW.MouseButton -> GLFW.MouseButtonState -> GLFW.ModifierKeys -> IO ()
cursorPosCallback :: TQueue Event -> GLFW.Window -> Double -> Double -> IO ()
cursorEnterCallback :: TQueue Event -> GLFW.Window -> GLFW.CursorState -> IO ()
scrollCallback :: TQueue Event -> GLFW.Window -> Double -> Double -> IO ()
keyCallback :: TQueue Event -> GLFW.Window -> GLFW.Key -> Int -> GLFW.KeyState -> GLFW.ModifierKeys -> IO ()
charCallback :: TQueue Event -> GLFW.Window -> Char -> IO ()
errorCallback tc e s = atomically $ writeTQueue tc $ EventError e s
windowPosCallback tc win x y = atomically $ writeTQueue tc $ EventWindowPos win x y
windowSizeCallback tc win w h = atomically $ writeTQueue tc $ EventWindowSize win w h
windowCloseCallback tc win = atomically $ writeTQueue tc $ EventWindowClose win
windowRefreshCallback tc win = atomically $ writeTQueue tc $ EventWindowRefresh win
windowFocusCallback tc win fa = atomically $ writeTQueue tc $ EventWindowFocus win fa
windowIconifyCallback tc win ia = atomically $ writeTQueue tc $ EventWindowIconify win ia
framebufferSizeCallback tc win w h = atomically $ writeTQueue tc $ EventFramebufferSize win w h
mouseButtonCallback tc win mb mba mk = atomically $ writeTQueue tc $ EventMouseButton win mb mba mk
cursorPosCallback tc win x y = atomically $ writeTQueue tc $ EventCursorPos win x y
cursorEnterCallback tc win ca = atomically $ writeTQueue tc $ EventCursorEnter win ca
scrollCallback tc win x y = atomically $ writeTQueue tc $ EventScroll win x y
keyCallback tc win k sc ka mk = atomically $ writeTQueue tc $ EventKey win k sc ka mk
charCallback tc win c = atomically $ writeTQueue tc $ EventChar win c
directionKeys :: [Key]
directionKeys = [Key'W, Key'E, Key'D, Key'X, Key'Z, Key'A]
--------------------------------------------------------------------------------
-- | runGame :: _ -> Env -> State -> IO ()
runGame glossState txMap env state =
void $ evalRWST (adjustWindow >> run glossState txMap) env state
-- | run :: _ -> Game ()
run glossState txMap = do
win <- asks envWindow
draw glossState txMap
liftIO $ do
GLFW.swapBuffers win
GLFW.pollEvents
processEvents
state <- get
if stateDragging state
then do
let sodx = stateDragStartX state
sody = stateDragStartY state
sodxa = stateDragStartXAngle state
sodya = stateDragStartYAngle state
(x, y) <- liftIO $ GLFW.getCursorPos win
let myrot = (x - sodx) / 2
mxrot = (y - sody) / 2
put $ state
-- { stateXAngle = sodxa + mxrot
-- , stateYAngle = sodya + myrot
-- }
else do
(kxrot, kyrot) <- liftIO $ getCursorKeyDirections win
modify $ modifyGameState (moveMap (kxrot, kyrot) 10)
mt <- liftIO GLFW.getTime
modify $ \s -> s
-- { stateGearZAngle = maybe 0 (realToFrac . (100*)) mt
-- }
q <- liftIO $ GLFW.windowShouldClose win
modify $ modifyGameState blink
unless q (run glossState txMap)
processEvents :: Game ()
processEvents = do
tc <- asks envEventsChan
me <- liftIO $ atomically $ tryReadTQueue tc
case me of
Just e -> do
processEvent e
processEvents
Nothing -> return ()
processEvent :: Event -> Game ()
processEvent ev =
case ev of
(EventError e s) -> do
printEvent "error" [show e, show s]
win <- asks envWindow
liftIO $ GLFW.setWindowShouldClose win True
(EventWindowPos _ x y) ->
printEvent "window pos" [show x, show y]
(EventWindowSize _ width height) ->
printEvent "window size" [show width, show height]
(EventWindowClose _) ->
printEvent "window close" []
(EventWindowRefresh _) ->
printEvent "window refresh" []
(EventWindowFocus _ fs) ->
printEvent "window focus" [show fs]
(EventWindowIconify _ is) ->
printEvent "window iconify" [show is]
(EventFramebufferSize _ width height) -> do
printEvent "framebuffer size" [show width, show height]
modify $ \s -> s
{ stateWindowWidth = width
, stateWindowHeight = height
}
adjustWindow
(EventMouseButton _ mb mbs mk) -> do
printEvent "mouse button" [show mb, show mbs, showModifierKeys mk]
when (mb == GLFW.MouseButton'1) $ do
let pressed = mbs == GLFW.MouseButtonState'Pressed
modify $ \s -> s
{ stateMouseDown = pressed
}
unless pressed $
modify $ \s -> s
{ stateDragging = False
}
(EventCursorPos _ x y) -> do
let x' = round x :: Int
y' = round y :: Int
printEvent "cursor pos" [show x', show y']
state <- get
when (stateMouseDown state && not (stateDragging state)) $
put $ state
{ stateDragging = True
, stateDragStartX = x
, stateDragStartY = y
-- , stateDragStartXAngle = stateXAngle state
-- , stateDragStartYAngle = stateYAngle state
}
(EventCursorEnter _ cs) ->
printEvent "cursor enter" [show cs]
(EventScroll _ x y) -> do
(modify . modifyGameState . changeScale . double2Float) y
adjustWindow
(EventKey win k scancode ks mk) -> do
printEvent "key" [show k, show scancode, show ks, showModifierKeys mk]
when (ks == GLFW.KeyState'Pressed) $ do
when (k == GLFW.Key'Escape) $
liftIO $ GLFW.setWindowShouldClose win True
when (k == GLFW.Key'Space) $
modify $ modifyGameState (turnAction [k])
when (k `elem` directionKeys) $
modify $ modifyGameState $ \gS@GameState{..} ->
moveUnitWithKey nextUnitInLine [k] gS
(EventChar _ c) ->
printEvent "char" [show c]
adjustWindow :: Game ()
adjustWindow = do
state <- get
let width = stateWindowWidth state
height = stateWindowHeight state
return () -- TODO
-- draw :: _ -> Game ()
draw glossState txMap = do
env <- ask
state <- get
liftIO $ displayPicture
(windowWidth, windowHeight)
seaColor glossState 1.0 (renderView txMap (stateGameState state))
liftIO $ return ()
where
seaColor = makeColorI 10 105 148 1
--------------------------------------------------------------------------------
getCursorKeyDirections :: GLFW.Window -> IO (Float, Float)
getCursorKeyDirections win = do
let arrowKeys = [Key'Left, Key'Right, Key'Up, Key'Down]
[x0, x1, y0, y1] <- mapM (keyIsPressed win) arrowKeys
let x0n = if x0 then 1 else 0
x1n = if x1 then (-1) else 0
y0n = if y0 then (-1) else 0
y1n = if y1 then 1 else 0
return (x0n + x1n, y0n + y1n)
keyIsPressed :: Window -> Key -> IO Bool
keyIsPressed win key = isPress `fmap` GLFW.getKey win key
isPress :: KeyState -> Bool
isPress KeyState'Pressed = True
isPress KeyState'Repeating = True
isPress _ = False
--------------------------------------------------------------------------------
printEvent :: String -> [String] -> Game ()
printEvent cbname fields =
liftIO $ putStrLn $ cbname ++ ": " ++ unwords fields
showModifierKeys :: GLFW.ModifierKeys -> String
showModifierKeys mk =
"[mod keys: " ++ keys ++ "]"
where
keys = if null xs then "none" else unwords xs
xs = catMaybes ys
ys = [ if GLFW.modifierKeysShift mk then Just "shift" else Nothing
, if GLFW.modifierKeysControl mk then Just "control" else Nothing
, if GLFW.modifierKeysAlt mk then Just "alt" else Nothing
, if GLFW.modifierKeysSuper mk then Just "super" else Nothing
]
|
joom/civ
|
src/Main.hs
|
mit
| 15,136
| 0
| 18
| 4,957
| 3,956
| 1,967
| 1,989
| 369
| 14
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, FlexibleContexts,
DeriveDataTypeable #-}
-- | This module provides facilities for implementing webservers, in a
-- servelet-like style. The general philosophy is that direct-http makes
-- as few decisions as possible for the user code, allowing such things as
-- URL routing and virtual-host policies to be implemented in any desired
-- fashion. It focuses on providing a robust transport layer which can
-- integrate well with any higher layer.
module Network.HTTP (
-- * The monad
HTTP,
HTTPState,
MonadHTTP(..),
-- * Accepting requests
HTTPServerParameters(..),
HTTPListenSocketParameters(..),
acceptLoop,
-- * Logging
httpLog,
-- * Concurrency
httpFork,
-- * Exceptions
HTTPException(..),
-- * Request information
-- | It is common practice for web servers to make their own
-- extensions to the CGI/1.1 set of defined variables. For
-- example, @REMOTE_PORT@ is not defined by the specification,
-- but often seen in the wild. Furthermore, it is also common
-- for user agents to make their own extensions to the HTTP/1.1
-- set of defined headers. One might therefore expect to see
-- functions defined here allowing direct interrogation of
-- variables and headers by name. This is not done, because it
-- is not the primary goal of direct-http to be a CGI/FastCGI
-- host, and that functionality is trivial for any user code
-- implementing such a host to provide. It would actually be
-- rather more difficult for direct-http to provide many of the
-- common values, because it does not implement the facilities
-- they are supposed to give information about. Even as simple
-- a concept as "what server address is this" must take into
-- account name-canonicalization and virtual-host policies,
-- which are left to user code. As for document root, it is
-- possible to implement a server with no capacity to serve
-- files, in which case the concept is nonsensical. Enough
-- important values are necessarily absent for reasons such as
-- these that there is little reason to provide the remaining
-- ones either.
--
-- Too long, didn't read? Instead of providing access to
-- CGI-like variables, direct-http provides higher-level calls
-- which give convenient names and types to the same
-- information. It does provide access to headers, however.
--
-- Cookies may also be manipulated through HTTP headers
-- directly; the functions here are provided only as a
-- convenience.
Header(..),
getRequestHeader,
getAllRequestHeaders,
Cookie(..),
getCookie,
getAllCookies,
getCookieValue,
getRemoteAddress,
getRequestMethod,
getRequestURI,
getServerAddress,
getContentLength,
getContentType,
-- * Request content data
-- | At the moment the handler is invoked, all request headers
-- have been received, but content data has not necessarily
-- been. Requests to read content data block the handler (but
-- not other concurrent handlers) until there is enough data in
-- the buffer to satisfy them, or until timeout where
-- applicable.
httpGet,
httpGetNonBlocking,
httpGetContents,
httpIsReadable,
-- * Response information and content data
-- | When the handler is first invoked, neither response headers
-- nor content data have been sent to the client. Setting of
-- response headers is lazy, merely setting internal variables,
-- until something forces them to be output. For example,
-- attempting to send content data will force response headers
-- to be output first. It is not necessary to close the output
-- stream explicitly, but it may be desirable, for example to
-- continue processing after returning results to the user.
--
-- There is no reason that client scripts cannot use any
-- encoding they wish, including the chunked encoding, if they
-- have set appropriate headers. This package, however, does
-- not explicitly support that, because client scripts can
-- easily implement it for themselves.
--
-- At the start of each request, the response status is set to
-- @200 OK@ and the only response header set is
-- @Content-Type: text/html@. These may be overridden by later
-- calls, at any time before headers have been sent.
--
-- Cookies may also be manipulated through HTTP headers
-- directly; the functions here are provided only as a
-- convenience.
setResponseStatus,
getResponseStatus,
setResponseHeader,
unsetResponseHeader,
getResponseHeader,
setCookie,
unsetCookie,
mkSimpleCookie,
mkCookie,
permanentRedirect,
seeOtherRedirect,
sendResponseHeaders,
responseHeadersSent,
responseHeadersModifiable,
httpPut,
httpPutStr,
httpCloseOutput,
httpIsWritable
)
where
import Control.Concurrent.Lifted
import Control.Concurrent.MSem (MSem)
import qualified Control.Concurrent.MSem as MSem
import Control.Exception.Lifted
import Control.Monad.Base
import Control.Monad.Reader
import Control.Monad.Trans.Control
import Data.Bits
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.UTF8 as UTF8
import Data.Char
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Time
import Data.Time.Clock.POSIX
import Data.Typeable
import Data.Word
import Foreign.C.Error
import GHC.IO.Exception (IOErrorType(..))
import qualified Network.Socket as Network hiding (send, sendTo, recv, recvFrom)
import qualified Network.Socket.ByteString as Network
import Numeric
import Prelude hiding (catch)
import System.Daemonize
import System.Environment
import System.Exit
import System.IO
import System.IO.Error (ioeGetErrorType)
import qualified System.IO.Error as System
import System.Locale (defaultTimeLocale)
import qualified System.Posix as POSIX
-- | An opaque type representing the state of the HTTP server during a single
-- connection from a client.
data HTTPState = HTTPState {
httpStateAccessLogMaybeHandleMVar :: MVar (Maybe Handle),
httpStateErrorLogMaybeHandleMVar :: MVar (Maybe Handle),
httpStateForkPrimitive :: IO () -> IO ThreadId,
httpStateThreadSetMVar :: MVar (Set ThreadId),
httpStateThreadTerminationMSem :: MSem Word,
httpStateMaybeConnection :: Maybe HTTPConnection
}
data HTTPConnection = HTTPConnection {
httpConnectionServerAddress :: Network.SockAddr,
httpConnectionSocket :: Network.Socket,
httpConnectionPeer :: Network.SockAddr,
httpConnectionInputBufferMVar :: MVar ByteString,
httpConnectionTimestamp :: MVar POSIXTime,
httpConnectionRemoteHostname :: MVar (Maybe (Maybe String)),
httpConnectionRequestMethod :: MVar String,
httpConnectionRequestURI :: MVar String,
httpConnectionRequestProtocol :: MVar String,
httpConnectionRequestHeaderMap :: MVar (Map Header ByteString),
httpConnectionRequestCookieMap :: MVar (Maybe (Map String Cookie)),
httpConnectionRequestContentBuffer :: MVar ByteString,
httpConnectionRequestContentParameters :: MVar RequestContentParameters,
httpConnectionResponseHeadersSent :: MVar Bool,
httpConnectionResponseHeadersModifiable :: MVar Bool,
httpConnectionResponseStatus :: MVar Int,
httpConnectionResponseHeaderMap :: MVar (Map Header ByteString),
httpConnectionResponseCookieMap :: MVar (Map String Cookie),
httpConnectionResponseContentBuffer :: MVar ByteString,
httpConnectionResponseContentParameters :: MVar ResponseContentParameters
}
data RequestContentParameters
= RequestContentUninitialized
| RequestContentNone
| RequestContentClosed
| RequestContentIdentity Int
| RequestContentChunked Int
data ResponseContentParameters
= ResponseContentUninitialized
| ResponseContentClosed
| ResponseContentBufferedIdentity
| ResponseContentUnbufferedIdentity Int
| ResponseContentChunked
deriving (Show)
-- | An object representing a cookie (a small piece of information, mostly
-- metadata, stored by a user-agent on behalf of the server), either one
-- received as part of the request or one to be sent as part of the
-- response.
data Cookie = Cookie {
cookieName :: String,
cookieValue :: String,
cookieVersion :: Int,
cookiePath :: Maybe String,
cookieDomain :: Maybe String,
cookieMaxAge :: Maybe Int,
cookieSecure :: Bool,
cookieComment :: Maybe String
} deriving (Show)
data ConnectionTerminatingError = UnexpectedEndOfInput
deriving (Typeable)
instance Exception ConnectionTerminatingError
instance Show ConnectionTerminatingError where
show UnexpectedEndOfInput = "Unexpected end of input."
-- | The monad within which each single request from a client is handled.
--
-- Note that there is an instance 'MonadBaseControl' 'IO' 'HTTP', so that
-- exceptions can be thrown, caught, and otherwise manipulated with the
-- lifted primitives from lifted-base's 'Control.Exception.Lifted'.
type HTTP = ReaderT HTTPState IO
-- | The class of monads within which the HTTP calls are valid. You may wish
-- to create your own monad implementing this class. Note that the
-- prerequisite is 'MonadBaseControl' 'IO' m, which is similar to
-- 'MonadIO' m, but with, among other things, more capability for
-- exception handling.
class (MonadBaseControl IO m) => MonadHTTP m where
-- | Returns the opaque 'HTTPState' object representing the state of
-- the HTTP server.
-- Should not be called directly by user code, except implementations of
-- 'MonadHTTP'; exported so that
-- user monads can implement the interface.
getHTTPState :: m HTTPState
instance MonadHTTP HTTP where
getHTTPState = ask
getHTTPConnection :: (MonadHTTP m) => m HTTPConnection
getHTTPConnection = do
state <- getHTTPState
case httpStateMaybeConnection state of
Nothing -> throwIO NoConnection
Just connection -> return connection
-- | Forks a thread to run the given action, using the forking primitive that
-- was passed in the configuration to 'acceptLoop', and additionally
-- registers that thread with the main server thread, which has the sole
-- effect and purpose of causing the server to not exit until and unless the
-- child thread does. All of the listener-socket and connection threads
-- created by the server go through this function.
httpFork :: (MonadHTTP m) => m () -> m ThreadId
httpFork action = do
state <- getHTTPState
let mvar = httpStateThreadSetMVar state
msem = httpStateThreadTerminationMSem state
modifyMVar mvar $ \threadSet -> do
childThread <- liftBaseDiscard (httpStateForkPrimitive state)
$ finally action
(do
modifyMVar_ mvar $ \threadSet -> do
self <- myThreadId
let threadSet' = Set.delete self threadSet'
return threadSet'
liftBase $ MSem.signal msem)
let threadSet' = Set.insert childThread threadSet
return (threadSet', childThread)
-- | A record used to configure the server. Broken informally into the four
-- categories of logging, job-control, concurrency, and networking. For
-- logging, the configuration contains optional paths to files for the
-- access and error logs (if these are omitted, logging is not done). For
-- job-control, it contains a flag indicating whether to run as a daemon,
-- and optionally the names of a Unix user and/or group to switch to in the
-- process of daemonization. For concurrency, it contains a forking
-- primitive such as 'forkIO' or 'forkOS'. Finally, for networking, it
-- contains a list of parameters for ports to listen on, each of which has
-- its own sub-configuration record.
--
-- Notice that checking the value of the Host: header, and implementing
-- virtual-host policies, is not done by direct-http but rather is up to the
-- user of the library; hence, there is no information in the configuration
-- about the hostnames to accept from the user-agent.
--
-- If the access logfile path is not Nothing, 'acceptLoop' opens this
-- logfile in append mode and uses it to log all accesses; otherwise, access
-- is not logged.
--
-- If the error logfile path is not Nothing, 'acceptLoop' opens this logfile
-- in append mode and uses it to log all errors; otherwise, if not
-- daemonizing, errors are logged to standard output; if daemonizing, errors
-- are not logged.
--
-- If the daemonize flag is True, 'acceptLoop' closes the standard IO
-- streams and moves the process into the background, doing all the usual
-- Unix things to make it run as a daemon henceforth. This is optional
-- because it might be useful to turn it off for debugging purposes.
--
-- The forking primitive is typically either 'forkIO' or 'forkOS', and is
-- used by 'acceptLoop' both to create listener threads, and to create
-- connection threads. It is valid to use a custom primitive, such as one
-- that attempts to pool OS threads, but it must actually provide
-- concurrency - otherwise there will be a deadlock. There is no support for
-- single-threaded operation.
--
-- Notice that we take the forking primitive in terms of 'IO', even though
-- we actually lift it (with 'liftBaseDiscard'). This is because
-- lifted-base, as of this writing and its version 0.1.1, only supports
-- 'forkIO' and not 'forkOS'.
--
-- The loop never returns, but will terminate the program with status 0 if
-- and when it ever has no child threads alive; child threads for this
-- purpose are those created through 'httpFork', which means all
-- listener-socket and connection threads created by 'acceptLoop', as well
-- as any threads created by client code through 'httpFork', but not threads
-- created by client code through other mechanisms.
--
-- The author of direct-http has made no effort to implement custom
-- thread-pooling forking primitives, but has attempted not to preclude
-- them. If anyone attempts to implement such a thing, feedback is hereby
-- solicited.
data HTTPServerParameters = HTTPServerParameters {
serverParametersAccessLogPath :: Maybe FilePath,
serverParametersErrorLogPath :: Maybe FilePath,
serverParametersDaemonize :: Bool,
serverParametersUserToChangeTo :: Maybe String,
serverParametersGroupToChangeTo :: Maybe String,
serverParametersForkPrimitive :: IO () -> IO ThreadId,
serverParametersListenSockets :: [HTTPListenSocketParameters]
}
-- | A record used to configure an individual port listener and its socket as
-- part of the general server configuration. Consists of a host address and
-- port number to bind the socket to, and a flag indicating whether the
-- listener should use the secure version of the protocol.
data HTTPListenSocketParameters = HTTPListenSocketParameters {
listenSocketParametersAddress :: Network.SockAddr,
listenSocketParametersSecure :: Bool
}
-- | Takes a server parameters record and a handler, and concurrently accepts
-- requests from user agents, forking with the primitive specified by the
-- parameters and invoking the handler in the forked thread inside the
-- 'HTTP' monad for each request.
--
-- Note that although there is no mechanism to substitute another type of
-- monad for HTTP, you can enter your own monad within the handler, much as
-- you would enter your own monad within IO. You simply have to implement
-- the 'MonadHTTP' class.
--
-- Any exceptions not caught within the handler are caught by
-- 'acceptLoop', and cause the termination of that handler, but not
-- of the connection or the accept loop.
acceptLoop
:: HTTPServerParameters
-- ^ Parameters describing the behavior of the server to run.
-> (HTTP ())
-- ^ A handler which is invoked once for each incoming connection.
-> IO ()
-- ^ Never actually returns.
acceptLoop parameters handler = do
if serverParametersDaemonize parameters
then daemonize (defaultDaemonOptions {
daemonUserToChangeTo =
serverParametersUserToChangeTo parameters,
daemonGroupToChangeTo =
serverParametersGroupToChangeTo parameters
})
(initialize)
(\bootstrap -> acceptLoop' bootstrap)
else do
bootstrap <- initialize
acceptLoop' bootstrap
where initialize = do
accessLogMaybeHandle
<- case serverParametersAccessLogPath parameters of
Nothing -> return Nothing
Just path -> liftBase $ openBinaryFile path AppendMode
>>= return . Just
errorLogMaybeHandle
<- case serverParametersErrorLogPath parameters of
Nothing -> if serverParametersDaemonize parameters
then return Nothing
else return $ Just stdout
Just path -> liftBase $ openBinaryFile path AppendMode
>>= return . Just
listenSockets <-
catch (mapM createListenSocket
(serverParametersListenSockets parameters))
(\e -> do
case errorLogMaybeHandle of
Nothing -> return ()
Just errorLogHandle -> do
hPutStrLn errorLogHandle $
"Failed to start: " ++ (show (e :: SomeException))
liftBase $ exitFailure)
return (listenSockets, accessLogMaybeHandle, errorLogMaybeHandle)
acceptLoop' (listenSockets,
accessLogMaybeHandle,
errorLogMaybeHandle) = do
accessLogMaybeHandleMVar <- newMVar accessLogMaybeHandle
errorLogMaybeHandleMVar <- newMVar errorLogMaybeHandle
let forkPrimitive = serverParametersForkPrimitive parameters
threadSetMVar <- newMVar Set.empty
threadTerminationMSem <- MSem.new 0
let state = HTTPState {
httpStateAccessLogMaybeHandleMVar =
accessLogMaybeHandleMVar,
httpStateErrorLogMaybeHandleMVar =
errorLogMaybeHandleMVar,
httpStateForkPrimitive = forkPrimitive,
httpStateThreadSetMVar = threadSetMVar,
httpStateThreadTerminationMSem = threadTerminationMSem,
httpStateMaybeConnection = Nothing
}
flip runReaderT state $ do
httpLog $ "Server started."
threadIDs <-
mapM (\listenSocket -> httpFork $ acceptLoop'' listenSocket)
listenSockets
threadWaitLoop
acceptLoop'' :: Network.Socket -> HTTP ()
acceptLoop'' listenSocket = do
(socket, peer) <- liftBase $ Network.accept listenSocket
httpFork $ requestLoop socket peer handler
acceptLoop'' listenSocket
threadWaitLoop = do
state <- getHTTPState
let mvar = httpStateThreadSetMVar state
msem = httpStateThreadTerminationMSem state
threadSet <- readMVar mvar
if Set.null threadSet
then liftBase exitSuccess
else do
liftBase $ MSem.wait msem
threadWaitLoop
createListenSocket
:: HTTPListenSocketParameters -> IO Network.Socket
createListenSocket parameters = do
let address = listenSocketParametersAddress parameters
addressFamily =
case address of
Network.SockAddrInet _ _ -> Network.AF_INET
Network.SockAddrInet6 _ _ _ _ -> Network.AF_INET6
Network.SockAddrUnix _ -> Network.AF_UNIX
listenSocket <- liftBase $ Network.socket addressFamily
Network.Stream
Network.defaultProtocol
liftBase $ Network.bind listenSocket address
liftBase $ Network.listen listenSocket 1024
return listenSocket
requestLoop :: Network.Socket
-> Network.SockAddr
-> HTTP ()
-> HTTP ()
requestLoop socket peer handler = do
serverAddress <- liftBase $ Network.getSocketName socket
inputBufferMVar <- newMVar $ BS.empty
timestampMVar <- newEmptyMVar
remoteHostnameMVar <- newMVar Nothing
requestMethodMVar <- newEmptyMVar
requestURIMVar <- newEmptyMVar
requestProtocolMVar <- newEmptyMVar
requestHeaderMapMVar <- newEmptyMVar
requestCookieMapMVar <- newEmptyMVar
requestContentBufferMVar <- newEmptyMVar
requestContentParametersMVar <- newEmptyMVar
responseHeadersSentMVar <- newEmptyMVar
responseHeadersModifiableMVar <- newEmptyMVar
responseStatusMVar <- newEmptyMVar
responseHeaderMapMVar <- newEmptyMVar
responseCookieMapMVar <- newEmptyMVar
responseContentBufferMVar <- newEmptyMVar
responseContentParametersMVar <- newEmptyMVar
let connection = HTTPConnection {
httpConnectionServerAddress = serverAddress,
httpConnectionSocket = socket,
httpConnectionPeer = peer,
httpConnectionInputBufferMVar = inputBufferMVar,
httpConnectionTimestamp = timestampMVar,
httpConnectionRemoteHostname = remoteHostnameMVar,
httpConnectionRequestMethod = requestMethodMVar,
httpConnectionRequestURI = requestURIMVar,
httpConnectionRequestProtocol = requestProtocolMVar,
httpConnectionRequestHeaderMap = requestHeaderMapMVar,
httpConnectionRequestCookieMap = requestCookieMapMVar,
httpConnectionRequestContentBuffer
= requestContentBufferMVar,
httpConnectionRequestContentParameters
= requestContentParametersMVar,
httpConnectionResponseHeadersSent
= responseHeadersSentMVar,
httpConnectionResponseHeadersModifiable
= responseHeadersModifiableMVar,
httpConnectionResponseStatus = responseStatusMVar,
httpConnectionResponseHeaderMap = responseHeaderMapMVar,
httpConnectionResponseCookieMap = responseCookieMapMVar,
httpConnectionResponseContentBuffer
= responseContentBufferMVar,
httpConnectionResponseContentParameters
= responseContentParametersMVar
}
requestLoop1 :: HTTP ()
requestLoop1 = do
finally requestLoop2
(catch (liftBase $ Network.sClose socket)
(\error -> do
return (error :: IOException)
return ()))
requestLoop2 :: HTTP ()
requestLoop2 = do
catch requestLoop3
(\error -> do
httpLog $ "Internal uncaught exception: "
++ (show (error :: SomeException)))
requestLoop3 :: HTTP ()
requestLoop3 = do
catch requestLoop4
(\error -> do
connection <- getHTTPConnection
httpLog $ "Connection from "
++ (show $ httpConnectionPeer connection)
++ " terminated due to error: "
++ (show (error :: ConnectionTerminatingError)))
requestLoop4 :: HTTP ()
requestLoop4 = do
maybeRequestInfo <- recvHeaders
case maybeRequestInfo of
Nothing -> return ()
Just (method, url, protocol, headers) -> do
timestamp <- liftBase getPOSIXTime
putMVar timestampMVar timestamp
putMVar requestMethodMVar $ UTF8.toString method
putMVar requestURIMVar $ UTF8.toString url
putMVar requestProtocolMVar $ UTF8.toString protocol
putMVar requestHeaderMapMVar headers
putMVar requestCookieMapMVar Nothing
putMVar requestContentBufferMVar BS.empty
putMVar requestContentParametersMVar RequestContentUninitialized
putMVar responseHeadersSentMVar False
putMVar responseHeadersModifiableMVar True
putMVar responseStatusMVar 200
putMVar responseHeaderMapMVar Map.empty
putMVar responseCookieMapMVar Map.empty
putMVar responseContentBufferMVar BS.empty
putMVar responseContentParametersMVar ResponseContentUninitialized
catch
(do
valid <- getRequestValid
if valid
then do
prepareResponse
handler
else do
setResponseStatus 400)
(\error -> do
httpLog $ "Uncaught exception: "
++ (show (error :: SomeException))
alreadySent <- responseHeadersSent
if alreadySent
then return ()
else setResponseStatus 500)
logAccess
isWritable <- httpIsWritable
if isWritable
then httpCloseOutput
else return ()
connectionShouldStayAlive <- getConnectionShouldStayAlive
if connectionShouldStayAlive
then do
takeMVar timestampMVar
takeMVar requestMethodMVar
takeMVar requestURIMVar
takeMVar requestProtocolMVar
takeMVar requestHeaderMapMVar
takeMVar requestCookieMapMVar
takeMVar requestContentBufferMVar
takeMVar requestContentParametersMVar
takeMVar responseHeadersSentMVar
takeMVar responseHeadersModifiableMVar
takeMVar responseStatusMVar
takeMVar responseHeaderMapMVar
takeMVar responseCookieMapMVar
takeMVar responseContentBufferMVar
takeMVar responseContentParametersMVar
requestLoop4
else return ()
state <- ask
lift $ flip runReaderT
(state { httpStateMaybeConnection = Just connection })
requestLoop1
getRequestValid :: (MonadHTTP m) => m Bool
getRequestValid = do
hasContent <- getRequestHasContent
let getHeadersValid = do
connection <- getHTTPConnection
headerMap <- readMVar $ httpConnectionRequestHeaderMap connection
foldM (\result header -> do
if isValidInRequest header
then return result
else do
if hasContent
then return result
else if not $ isValidOnlyWithEntity header
then return result
else do
httpLog $ "Header " ++ (show header)
++ " is valid only with an entity."
return False)
True
(Map.keys headerMap)
getContentValid = do
contentAllowed <- getRequestContentAllowed
if contentAllowed
then return True
else if not hasContent
then return True
else do
method <- getRequestMethod
httpLog $ "Content provided but not allowed with "
++ method ++ "."
return False
httpVersion <- getRequestProtocol
case httpVersion of
"HTTP/1.0" -> do
headersValid <- getHeadersValid
contentValid <- getContentValid
return $ and [headersValid, contentValid]
"HTTP/1.1" -> do
headersValid <- getHeadersValid
contentValid <- getContentValid
mandatoryHeadersIncluded <- do
maybeHost <- getRequestHeader HttpHost
case maybeHost of
Just _ -> return True
Nothing -> do
httpLog $ "Host header not provided and HTTP/1.1 used."
return False
return $ and [headersValid, mandatoryHeadersIncluded, contentValid]
_ -> return False
getConnectionShouldStayAlive :: (MonadHTTP m) => m Bool
getConnectionShouldStayAlive = do
httpVersion <- getRequestProtocol
case httpVersion of
"HTTP/1.0" -> return False
"HTTP/1.1" -> do
maybeConnection <- getRequestHeader HttpConnection
case maybeConnection of
Nothing -> return True
Just connectionValue -> do
let connectionWords = computeWords connectionValue
computeWords input =
let (before, after) = break (\c -> c == ' ') input
in if null after
then [before]
else let rest = computeWords $ drop 1 after
in before : rest
connectionTokens = map (map toLower) connectionWords
closeSpecified = elem "close" connectionTokens
return $ not closeSpecified
_ -> return False
prepareResponse :: (MonadHTTP m) => m ()
prepareResponse = do
HTTPConnection { httpConnectionTimestamp = mvar } <- getHTTPConnection
timestamp <- readMVar mvar
let dateString = formatTime defaultTimeLocale
"%a, %d %b %Y %H:%M:%S Z"
$ posixSecondsToUTCTime timestamp
setResponseHeader HttpDate dateString
setResponseHeader HttpContentType "text/html; charset=UTF8"
logAccess :: (MonadHTTP m) => m ()
logAccess = do
remoteHost <- getRemoteHost
identString <- return "-"
usernameString <- return "-"
connection <- getHTTPConnection
timestamp <- readMVar (httpConnectionTimestamp connection)
let timestampString = formatTime defaultTimeLocale
"%-d/%b/%Y:%H:%M:%S %z"
$ posixSecondsToUTCTime timestamp
methodString <- getRequestMethod
urlString <- getRequestURI
protocolString <- getRequestProtocol
responseStatusString <- getResponseStatus >>= return . show
maybeResponseSize <- return (Nothing :: Maybe Int) -- TODO
responseSizeString
<- case maybeResponseSize of
Nothing -> return "-"
Just responseSize -> return $ show responseSize
maybeReferrerString <- getRequestHeader HttpReferrer
referrerString <- case maybeReferrerString of
Nothing -> return "-"
Just referrerString -> return referrerString
maybeUserAgentString <- getRequestHeader HttpUserAgent
userAgentString <- case maybeUserAgentString of
Nothing -> return "-"
Just userAgentString -> return userAgentString
httpAccessLog $ remoteHost
++ " "
++ identString
++ " "
++ usernameString
++ " ["
++ timestampString
++ "] \""
++ methodString
++ " "
++ urlString
++ " "
++ protocolString
++ "\" "
++ responseStatusString
++ " "
++ responseSizeString
++ " \""
++ referrerString
++ "\" \""
++ userAgentString
++ "\""
parseCookies :: String -> [Cookie]
parseCookies value =
let findSeparator string
= let quotePoint = if (length string > 0) && (string !! 0 == '"')
then 1 + (findBalancingQuote $ drop 1 string)
else 0
maybeSemicolonPoint
= case (findIndex (\c -> (c == ';') || (c == ','))
$ drop quotePoint string)
of Nothing -> Nothing
Just index -> Just $ index + quotePoint
in maybeSemicolonPoint
findBalancingQuote string
= let consume accumulator ('\\' : c : rest) = consume (accumulator + 2) rest
consume accumulator ('"' : rest) = accumulator
consume accumulator (c : rest) = consume (accumulator + 1) rest
consume accumulator "" = accumulator
in consume 0 string
split [] = []
split string = case findSeparator string of
Nothing -> [string]
Just index ->
let (first, rest) = splitAt index string
in first : (split $ drop 1 rest)
splitNameValuePair string = case elemIndex '=' (filterNameValuePair string) of
Nothing -> (string, "")
Just index -> let (first, rest)
= splitAt index
(filterNameValuePair
string)
in (first, filterValue (drop 1 rest))
filterNameValuePair string
= reverse $ dropWhile isSpace $ reverse $ dropWhile isSpace string
filterValue string = if (length string > 0) && ((string !! 0) == '"')
then take (findBalancingQuote $ drop 1 string)
$ drop 1 string
else string
pairs = map splitNameValuePair $ split value
(version, pairs') = case pairs of
("$Version", versionString) : rest
-> case parseInt versionString of
Nothing -> (0, rest)
Just version -> (version, rest)
_ -> (0, pairs)
takeCookie pairs = case pairs of
(name, value) : pairs'
| (length name > 0) && (take 1 name /= "$")
-> let (maybePath, maybeDomain, pairs'')
= takePathAndDomain pairs'
in (Cookie {
cookieName = name,
cookieValue = value,
cookieVersion = version,
cookiePath = maybePath,
cookieDomain = maybeDomain,
cookieMaxAge = Nothing,
cookieSecure = False,
cookieComment = Nothing
}
: takeCookie pairs'')
_ : pairs' -> takeCookie pairs'
[] -> []
takePathAndDomain pairs = let (maybePath, pairs')
= case pairs of ("$Path", path) : rest
-> (Just path, rest)
_ -> (Nothing, pairs)
(maybeDomain, pairs'')
= case pairs' of ("$Domain", domain) : rest
-> (Just domain, rest)
_ -> (Nothing, pairs')
in (maybePath, maybeDomain, pairs'')
in takeCookie pairs'
printCookies :: [Cookie] -> ByteString
printCookies cookies =
let printCookie cookie
= BS.intercalate (UTF8.fromString ";")
$ map printNameValuePair $ nameValuePairs cookie
printNameValuePair (name, Nothing) = UTF8.fromString name
printNameValuePair (name, Just value)
= BS.concat [UTF8.fromString name,
UTF8.fromString "=",
UTF8.fromString value]
{- Safari doesn't like this.
= if isValidCookieToken value
then name ++ "=" ++ value
else name ++ "=\"" ++ escape value ++ "\""
escape "" = ""
escape ('\\':rest) = "\\\\" ++ escape rest
escape ('\"':rest) = "\\\"" ++ escape rest
escape (c:rest) = [c] ++ escape rest
-}
nameValuePairs cookie = [(cookieName cookie, Just $ cookieValue cookie)]
++ (case cookieComment cookie of
Nothing -> []
Just comment -> [("Comment", Just comment)])
++ (case cookieDomain cookie of
Nothing -> []
Just domain -> [("Domain", Just domain)])
++ (case cookieMaxAge cookie of
Nothing -> []
Just maxAge -> [("Max-Age", Just $ show maxAge)])
++ (case cookiePath cookie of
Nothing -> []
Just path -> [("Path", Just $ path)])
++ (case cookieSecure cookie of
False -> []
True -> [("Secure", Nothing)])
++ [("Version", Just $ show $ cookieVersion cookie)]
in BS.intercalate (UTF8.fromString ",") $ map printCookie cookies
parseInt :: String -> Maybe Int
parseInt string =
if (not $ null string) && (all isDigit string)
then Just $ let accumulate "" accumulator = accumulator
accumulate (n:rest) accumulator
= accumulate rest $ accumulator * 10 + digitToInt n
in accumulate string 0
else Nothing
recvHeaders :: (MonadHTTP m)
=> m (Maybe (ByteString,
ByteString,
ByteString,
Map Header ByteString))
recvHeaders = do
HTTPConnection { httpConnectionInputBufferMVar = inputBufferMVar }
<- getHTTPConnection
modifyMVar inputBufferMVar $ \inputBuffer -> do
(inputBuffer, maybeLine) <- recvLine inputBuffer
(inputBuffer, result) <- case maybeLine of
Nothing -> return (inputBuffer, Nothing)
Just line -> do
let computeWords input =
let (before, after) = BS.breakSubstring (UTF8.fromString " ") input
in if BS.null after
then [before]
else let rest = computeWords $ BS.drop 1 after
in before : rest
words = computeWords line
case words of
[method, url, protocol]
| (isValidMethod method)
&& (isValidURL url)
&& (isValidProtocol protocol)
-> do
let loop inputBuffer headersSoFar = do
(inputBuffer, maybeLine) <- recvLine inputBuffer
case maybeLine of
Nothing -> return (inputBuffer, Nothing)
Just line
| BS.null line -> do
return (inputBuffer,
Just (method, url, protocol, headersSoFar))
| otherwise -> do
case parseHeader line of
Nothing -> do
logInvalidRequest
return (inputBuffer, Nothing)
Just (header, value) -> do
let headersSoFar'
= case Map.lookup header headersSoFar of
Nothing -> Map.insert header
value
headersSoFar
Just oldValue
-> Map.insert
header
(BS.concat [oldValue,
(UTF8.fromString ","),
value])
headersSoFar
loop inputBuffer headersSoFar'
loop inputBuffer Map.empty
_ -> do
logInvalidRequest
return (inputBuffer, Nothing)
return (inputBuffer, result)
parseHeader :: ByteString -> Maybe (Header, ByteString)
parseHeader line = do
case BS.breakSubstring (UTF8.fromString ":") line of
(_, bytestring) | bytestring == BS.empty -> Nothing
(name, delimitedValue) -> Just (toHeader name, BS.drop 1 delimitedValue)
logInvalidRequest :: MonadHTTP m => m ()
logInvalidRequest = do
connection <- getHTTPConnection
httpLog $ "Invalid request from "
++ (show $ httpConnectionPeer connection)
++ "; closing its connection."
isValidMethod :: ByteString -> Bool
isValidMethod bytestring
| bytestring == UTF8.fromString "OPTIONS" = True
| bytestring == UTF8.fromString "GET" = True
| bytestring == UTF8.fromString "HEAD" = True
| bytestring == UTF8.fromString "POST" = True
| bytestring == UTF8.fromString "PUT" = True
| bytestring == UTF8.fromString "DELETE" = True
| bytestring == UTF8.fromString "TRACE" = True
| bytestring == UTF8.fromString "CONNECT" = True
| otherwise = False
isValidURL :: ByteString -> Bool
isValidURL _ = True
isValidProtocol :: ByteString -> Bool
isValidProtocol bytestring
| bytestring == UTF8.fromString "HTTP/1.0" = True
| bytestring == UTF8.fromString "HTTP/1.1" = True
| otherwise = False
recvLine :: (MonadHTTP m) => ByteString -> m (ByteString, Maybe ByteString)
recvLine inputBuffer = do
let loop inputBuffer length firstIteration = do
let blocking = not firstIteration
(inputBuffer, endOfInput)
<- extendInputBuffer inputBuffer length blocking
let (before, after)
= BS.breakSubstring (UTF8.fromString "\r\n") inputBuffer
if BS.null after
then if endOfInput
then return (inputBuffer, Nothing)
else loop inputBuffer (length + 80) False
else return (BS.drop 2 after, Just before)
let (before, after)
= BS.breakSubstring (UTF8.fromString "\r\n") inputBuffer
if BS.null after
then loop inputBuffer 80 True
else return (BS.drop 2 after, Just before)
recvBlock :: (MonadHTTP m) => Int -> m ByteString
recvBlock length = do
HTTPConnection { httpConnectionInputBufferMVar = inputBufferMVar } <-
getHTTPConnection
modifyMVar inputBufferMVar $ \inputBuffer -> do
(inputBuffer, endOfInput) <- extendInputBuffer inputBuffer length True
(result, inputBuffer) <- return $ BS.splitAt length inputBuffer
return (inputBuffer, result)
extendInputBuffer :: (MonadHTTP m)
=> ByteString -> Int -> Bool -> m (ByteString, Bool)
extendInputBuffer inputBuffer length blocking = do
HTTPConnection { httpConnectionSocket = socket } <- getHTTPConnection
let loop inputBuffer = do
if BS.length inputBuffer < length
then do
newInput <- liftBase $ Network.recv socket 4096
if BS.null newInput
then return (inputBuffer, True)
else if blocking
then loop $ BS.append inputBuffer newInput
else return (BS.append inputBuffer newInput, False)
else return (inputBuffer, False)
loop inputBuffer
-- | Logs a message using the web server's logging facility, prefixed with a
-- timestamp.
httpLog :: (MonadHTTP m) => String -> m ()
httpLog message = do
HTTPState { httpStateErrorLogMaybeHandleMVar = logMVar } <- getHTTPState
bracket (takeMVar logMVar)
(\maybeHandle -> putMVar logMVar maybeHandle)
(\maybeHandle -> do
case maybeHandle of
Nothing -> return ()
Just handle -> do
timestamp <- liftBase $ getPOSIXTime
let timestampString =
formatTime defaultTimeLocale "%Y-%m-%dT%H:%M:%SZ"
$ posixSecondsToUTCTime timestamp
liftBase $ hPutStrLn handle
$ timestampString ++ " " ++ message
liftBase $ hFlush handle)
httpAccessLog :: (MonadHTTP m) => String -> m ()
httpAccessLog message = do
HTTPState { httpStateAccessLogMaybeHandleMVar = logMVar } <- getHTTPState
withMVar logMVar
(\maybeHandle -> case maybeHandle of
Nothing -> return ()
Just handle -> do
liftBase $ hPutStrLn handle message
liftBase $ hFlush handle)
-- | Headers are classified by HTTP/1.1 as request headers, response headers,
-- entity headers, or general headers.
data Header
-- | Request headers
= HttpAccept
| HttpAcceptCharset
| HttpAcceptEncoding
| HttpAcceptLanguage
| HttpAuthorization
| HttpExpect
| HttpFrom
| HttpHost
| HttpIfMatch
| HttpIfModifiedSince
| HttpIfNoneMatch
| HttpIfRange
| HttpIfUnmodifiedSince
| HttpMaxForwards
| HttpProxyAuthorization
| HttpRange
| HttpReferrer
| HttpTE
| HttpUserAgent
-- | Response headers
| HttpAcceptRanges
| HttpAge
| HttpETag
| HttpLocation
| HttpProxyAuthenticate
| HttpRetryAfter
| HttpServer
| HttpVary
| HttpWWWAuthenticate
-- | Entity headers
| HttpAllow
| HttpContentEncoding
| HttpContentLanguage
| HttpContentLength
| HttpContentLocation
| HttpContentMD5
| HttpContentRange
| HttpContentType
| HttpExpires
| HttpLastModified
| HttpExtensionHeader ByteString
-- | General headers
| HttpCacheControl
| HttpConnection
| HttpDate
| HttpPragma
| HttpTrailer
| HttpTransferEncoding
| HttpUpgrade
| HttpVia
| HttpWarning
-- | Nonstandard headers
| HttpCookie
| HttpSetCookie
deriving (Eq, Ord)
instance Show Header where
show header = UTF8.toString $ fromHeader header
data HeaderType = RequestHeader
| ResponseHeader
| EntityHeader
| GeneralHeader
deriving (Eq, Show)
headerType :: Header -> HeaderType
headerType HttpAccept = RequestHeader
headerType HttpAcceptCharset = RequestHeader
headerType HttpAcceptEncoding = RequestHeader
headerType HttpAcceptLanguage = RequestHeader
headerType HttpAuthorization = RequestHeader
headerType HttpExpect = RequestHeader
headerType HttpFrom = RequestHeader
headerType HttpHost = RequestHeader
headerType HttpIfMatch = RequestHeader
headerType HttpIfModifiedSince = RequestHeader
headerType HttpIfNoneMatch = RequestHeader
headerType HttpIfRange = RequestHeader
headerType HttpIfUnmodifiedSince = RequestHeader
headerType HttpMaxForwards = RequestHeader
headerType HttpProxyAuthorization = RequestHeader
headerType HttpRange = RequestHeader
headerType HttpReferrer = RequestHeader
headerType HttpTE = RequestHeader
headerType HttpUserAgent = RequestHeader
headerType HttpAcceptRanges = ResponseHeader
headerType HttpAge = ResponseHeader
headerType HttpETag = ResponseHeader
headerType HttpLocation = ResponseHeader
headerType HttpProxyAuthenticate = ResponseHeader
headerType HttpRetryAfter = ResponseHeader
headerType HttpServer = ResponseHeader
headerType HttpVary = ResponseHeader
headerType HttpWWWAuthenticate = ResponseHeader
headerType HttpAllow = EntityHeader
headerType HttpContentEncoding = EntityHeader
headerType HttpContentLanguage = EntityHeader
headerType HttpContentLength = EntityHeader
headerType HttpContentLocation = EntityHeader
headerType HttpContentMD5 = EntityHeader
headerType HttpContentRange = EntityHeader
headerType HttpContentType = EntityHeader
headerType HttpExpires = EntityHeader
headerType HttpLastModified = EntityHeader
headerType (HttpExtensionHeader _) = GeneralHeader
headerType HttpCacheControl = GeneralHeader
headerType HttpConnection = GeneralHeader
headerType HttpDate = GeneralHeader
headerType HttpPragma = GeneralHeader
headerType HttpTrailer = GeneralHeader
headerType HttpTransferEncoding = GeneralHeader
headerType HttpUpgrade = GeneralHeader
headerType HttpVia = GeneralHeader
headerType HttpWarning = GeneralHeader
headerType HttpCookie = RequestHeader
headerType HttpSetCookie = ResponseHeader
fromHeader :: Header -> ByteString
fromHeader HttpAccept = UTF8.fromString "Accept"
fromHeader HttpAcceptCharset = UTF8.fromString "Accept-Charset"
fromHeader HttpAcceptEncoding = UTF8.fromString "Accept-Encoding"
fromHeader HttpAcceptLanguage = UTF8.fromString "Accept-Language"
fromHeader HttpAuthorization = UTF8.fromString "Authorization"
fromHeader HttpExpect = UTF8.fromString "Expect"
fromHeader HttpFrom = UTF8.fromString "From"
fromHeader HttpHost = UTF8.fromString "Host"
fromHeader HttpIfMatch = UTF8.fromString "If-Match"
fromHeader HttpIfModifiedSince = UTF8.fromString "If-Modified-Since"
fromHeader HttpIfNoneMatch = UTF8.fromString "If-None-Match"
fromHeader HttpIfRange = UTF8.fromString "If-Range"
fromHeader HttpIfUnmodifiedSince = UTF8.fromString "If-Unmodified-Since"
fromHeader HttpMaxForwards = UTF8.fromString "Max-Forwards"
fromHeader HttpProxyAuthorization = UTF8.fromString "Proxy-Authorization"
fromHeader HttpRange = UTF8.fromString "Range"
fromHeader HttpReferrer = UTF8.fromString "Referer"
fromHeader HttpTE = UTF8.fromString "TE"
fromHeader HttpUserAgent = UTF8.fromString "User-Agent"
fromHeader HttpAcceptRanges = UTF8.fromString "Accept-Ranges"
fromHeader HttpAge = UTF8.fromString "Age"
fromHeader HttpETag = UTF8.fromString "ETag"
fromHeader HttpLocation = UTF8.fromString "Location"
fromHeader HttpProxyAuthenticate = UTF8.fromString "Proxy-Authenticate"
fromHeader HttpRetryAfter = UTF8.fromString "Retry-After"
fromHeader HttpServer = UTF8.fromString "Server"
fromHeader HttpVary = UTF8.fromString "Vary"
fromHeader HttpWWWAuthenticate = UTF8.fromString "WWW-Authenticate"
fromHeader HttpAllow = UTF8.fromString "Allow"
fromHeader HttpContentEncoding = UTF8.fromString "Content-Encoding"
fromHeader HttpContentLanguage = UTF8.fromString "Content-Language"
fromHeader HttpContentLength = UTF8.fromString "Content-Length"
fromHeader HttpContentLocation = UTF8.fromString "Content-Location"
fromHeader HttpContentMD5 = UTF8.fromString "Content-MD5"
fromHeader HttpContentRange = UTF8.fromString "Content-Range"
fromHeader HttpContentType = UTF8.fromString "Content-Type"
fromHeader HttpExpires = UTF8.fromString "Expires"
fromHeader HttpLastModified = UTF8.fromString "Last-Modified"
fromHeader (HttpExtensionHeader name) = name
fromHeader HttpCacheControl = UTF8.fromString "Cache-Control"
fromHeader HttpConnection = UTF8.fromString "Connection"
fromHeader HttpDate = UTF8.fromString "Date"
fromHeader HttpPragma = UTF8.fromString "Pragma"
fromHeader HttpTrailer = UTF8.fromString "Trailer"
fromHeader HttpTransferEncoding = UTF8.fromString "Transfer-Encoding"
fromHeader HttpUpgrade = UTF8.fromString "Upgrade"
fromHeader HttpVia = UTF8.fromString "Via"
fromHeader HttpWarning = UTF8.fromString "Warning"
fromHeader HttpCookie = UTF8.fromString "Cookie"
fromHeader HttpSetCookie = UTF8.fromString "Set-Cookie"
toHeader :: ByteString -> Header
toHeader bytestring
| bytestring == UTF8.fromString "Accept" = HttpAccept
| bytestring == UTF8.fromString "Accept-Charset" = HttpAcceptCharset
| bytestring == UTF8.fromString "Accept-Encoding" = HttpAcceptEncoding
| bytestring == UTF8.fromString "Accept-Language" = HttpAcceptLanguage
| bytestring == UTF8.fromString "Authorization" = HttpAuthorization
| bytestring == UTF8.fromString "Expect" = HttpExpect
| bytestring == UTF8.fromString "From" = HttpFrom
| bytestring == UTF8.fromString "Host" = HttpHost
| bytestring == UTF8.fromString "If-Match" = HttpIfMatch
| bytestring == UTF8.fromString "If-Modified-Since" = HttpIfModifiedSince
| bytestring == UTF8.fromString "If-None-Match" = HttpIfNoneMatch
| bytestring == UTF8.fromString "If-Range" = HttpIfRange
| bytestring == UTF8.fromString "If-Unmodified-Since" = HttpIfUnmodifiedSince
| bytestring == UTF8.fromString "Max-Forwards" = HttpMaxForwards
| bytestring == UTF8.fromString "Proxy-Authorization" = HttpProxyAuthorization
| bytestring == UTF8.fromString "Range" = HttpRange
| bytestring == UTF8.fromString "Referer" = HttpReferrer
| bytestring == UTF8.fromString "TE" = HttpTE
| bytestring == UTF8.fromString "User-Agent" = HttpUserAgent
| bytestring == UTF8.fromString "Accept-Ranges" = HttpAcceptRanges
| bytestring == UTF8.fromString "Age" = HttpAge
| bytestring == UTF8.fromString "ETag" = HttpETag
| bytestring == UTF8.fromString "Location" = HttpLocation
| bytestring == UTF8.fromString "Proxy-Authenticate" = HttpProxyAuthenticate
| bytestring == UTF8.fromString "Retry-After" = HttpRetryAfter
| bytestring == UTF8.fromString "Server" = HttpServer
| bytestring == UTF8.fromString "Vary" = HttpVary
| bytestring == UTF8.fromString "WWW-Authenticate" = HttpWWWAuthenticate
| bytestring == UTF8.fromString "Allow" = HttpAllow
| bytestring == UTF8.fromString "Content-Encoding" = HttpContentEncoding
| bytestring == UTF8.fromString "Content-Language" = HttpContentLanguage
| bytestring == UTF8.fromString "Content-Length" = HttpContentLength
| bytestring == UTF8.fromString "Content-Location" = HttpContentLocation
| bytestring == UTF8.fromString "Content-MD5" = HttpContentMD5
| bytestring == UTF8.fromString "Content-Range" = HttpContentRange
| bytestring == UTF8.fromString "Content-Type" = HttpContentType
| bytestring == UTF8.fromString "Expires" = HttpExpires
| bytestring == UTF8.fromString "Last-Modified" = HttpLastModified
| bytestring == UTF8.fromString "Cache-Control" = HttpCacheControl
| bytestring == UTF8.fromString "Connection" = HttpConnection
| bytestring == UTF8.fromString "Date" = HttpDate
| bytestring == UTF8.fromString "Pragma" = HttpPragma
| bytestring == UTF8.fromString "Trailer" = HttpTrailer
| bytestring == UTF8.fromString "Transfer-Encoding" = HttpTransferEncoding
| bytestring == UTF8.fromString "Upgrade" = HttpUpgrade
| bytestring == UTF8.fromString "Via" = HttpVia
| bytestring == UTF8.fromString "Warning" = HttpWarning
| bytestring == UTF8.fromString "Cookie" = HttpCookie
| bytestring == UTF8.fromString "Set-Cookie" = HttpSetCookie
| otherwise = HttpExtensionHeader bytestring
isValidInRequest :: Header -> Bool
isValidInRequest header = (headerType header == RequestHeader)
|| (headerType header == EntityHeader)
|| (headerType header == GeneralHeader)
isValidInResponse :: Header -> Bool
isValidInResponse header = (headerType header == ResponseHeader)
|| (headerType header == EntityHeader)
|| (headerType header == GeneralHeader)
isValidOnlyWithEntity :: Header -> Bool
isValidOnlyWithEntity header = headerType header == EntityHeader
-- | Queries the value from the user agent of the given HTTP/1.1 header. If
-- the header is to be provided after the content as specified by the
-- Trailer header, this is potentially time-consuming.
getRequestHeader
:: (MonadHTTP m)
=> Header
-- ^ The header to query. Must be a request or entity header.
-> m (Maybe String)
-- ^ The value of the header, if the user agent provided one.
getRequestHeader header = do
connection <- getHTTPConnection
headerMap <- readMVar $ httpConnectionRequestHeaderMap connection
return $ fmap (stripHeaderValueWhitespace . UTF8.toString)
$ Map.lookup header headerMap
stripHeaderValueWhitespace :: String -> String
stripHeaderValueWhitespace input =
let input' = reverse $ dropWhile isHeaderValueWhitespace
$ reverse $ dropWhile isHeaderValueWhitespace input
computeWords input = case break isHeaderValueWhitespace input of
(all, "") -> [all]
(before, after)
-> [before]
++ (computeWords
$ dropWhile isHeaderValueWhitespace after)
words = computeWords input'
output = intercalate " " words
in output
isHeaderValueWhitespace :: Char -> Bool
isHeaderValueWhitespace char = elem char " \t\r\n"
-- | Returns an association list of name-value pairs of all the HTTP/1.1 request
-- or entity headers from the user agent. If some of these headers are to be
-- provided after the content as specified by the Trailer header, this is
-- potentially time-consuming.
getAllRequestHeaders :: (MonadHTTP m) => m [(Header, String)]
getAllRequestHeaders = do
connection <- getHTTPConnection
headerMap <- readMVar $ httpConnectionRequestHeaderMap connection
return $ map (\(header, bytestring) -> (header, UTF8.toString bytestring))
$ Map.toList headerMap
-- | Returns a 'Cookie' object for the given name, if the user agent provided one
-- in accordance with RFC 2109.
getCookie
:: (MonadHTTP m)
=> String -- ^ The name of the cookie to look for.
-> m (Maybe Cookie) -- ^ The cookie, if the user agent provided it.
getCookie name = do
cookieMap <- getRequestCookieMap
return $ Map.lookup name cookieMap
-- | Returns all 'Cookie' objects provided by the user agent in accordance
-- RFC 2109.
getAllCookies :: (MonadHTTP m) => m [Cookie]
getAllCookies = do
cookieMap <- getRequestCookieMap
return $ Map.elems cookieMap
-- | A convenience method; as 'getCookie', but returns only the value of the
-- cookie rather than a 'Cookie' object.
getCookieValue
:: (MonadHTTP m)
=> String
-- ^ The name of the cookie to look for.
-> m (Maybe String)
-- ^ The value of the cookie, if the user agent provided it.
getCookieValue name = do
cookieMap <- getRequestCookieMap
return $ fmap cookieValue $ Map.lookup name cookieMap
getRequestCookieMap :: (MonadHTTP m) => m (Map String Cookie)
getRequestCookieMap = do
connection <- getHTTPConnection
let mvar = httpConnectionRequestCookieMap connection
modifyMVar mvar $ \maybeCookieMap -> do
case maybeCookieMap of
Just cookieMap -> do
return (maybeCookieMap, cookieMap)
Nothing -> do
maybeCookieString <- getRequestHeader HttpCookie
let cookieMap =
case maybeCookieString of
Nothing -> Map.empty
Just cookieString ->
Map.fromList (map (\cookie -> (cookieName cookie, cookie))
(parseCookies cookieString))
return (Just cookieMap, cookieMap)
-- | Return the remote address, which includes both host and port information.
-- They are provided in the aggregate like this because it is the most
-- internet-protocol-agnostic representation.
getRemoteAddress :: (MonadHTTP m) => m Network.SockAddr
getRemoteAddress = do
connection <- getHTTPConnection
return $ httpConnectionPeer connection
-- | Return the remote hostname, as determined by the web server. If it has
-- not yet been looked up, performs the lookup. This is potentially
-- time-consuming.
getRemoteHost :: (MonadHTTP m) => m String
getRemoteHost = do
connection <- getHTTPConnection
let mvar = httpConnectionRemoteHostname connection
maybeMaybeHostname <- readMVar mvar
case maybeMaybeHostname of
Nothing -> do
catch (do
(maybeHostname, _) <-
liftBase $ Network.getNameInfo [] True False
(httpConnectionPeer connection)
swapMVar mvar $ Just maybeHostname
case maybeHostname of
Nothing -> return $ show (httpConnectionPeer connection)
Just hostname -> return hostname)
(\exception -> do
return (exception :: SomeException)
return $ show (httpConnectionPeer connection))
Just Nothing -> return $ show (httpConnectionPeer connection)
Just (Just hostname) -> return hostname
-- | Return the request method.
getRequestMethod :: (MonadHTTP m) => m String
getRequestMethod = do
connection <- getHTTPConnection
readMVar (httpConnectionRequestMethod connection)
-- | Return the request URI.
getRequestURI :: (MonadHTTP m) => m String
getRequestURI = do
connection <- getHTTPConnection
readMVar (httpConnectionRequestURI connection)
getRequestProtocol :: (MonadHTTP m) => m String
getRequestProtocol = do
connection <- getHTTPConnection
readMVar (httpConnectionRequestProtocol connection)
-- | Return the server address and port, as a 'Network.SockAddr'. Useful
-- for implementing virtual-hosting policies.
getServerAddress :: (MonadHTTP m) => m Network.SockAddr
getServerAddress = do
connection <- getHTTPConnection
return $ httpConnectionServerAddress connection
-- | Return whether the connection is via the secure version of the
-- protocol. Useful for implementing virtual-hosting policies.
getServerSecure :: (MonadHTTP m) => m Bool
getServerSecure = do
return False
-- | Return the request content length, if this is knowable without actually
-- receiving the content - in particular, if the Content-Length header was
-- used. Otherwise, returns Nothing.
getContentLength :: (MonadHTTP m) => m (Maybe Int)
getContentLength = do
maybeString <- getRequestHeader HttpContentLength
case maybeString of
Nothing -> return Nothing
Just string -> return $ parseInt string
-- | Return the request content type, as provided by the user agent.
getContentType :: (MonadHTTP m) => m (Maybe String)
getContentType = do
getRequestHeader HttpContentType
getRequestHasContent :: (MonadHTTP m) => m Bool
getRequestHasContent = do
HTTPConnection { httpConnectionRequestContentParameters = parametersMVar }
<- getHTTPConnection
modifyMVar parametersMVar $ \parameters -> do
parameters <- ensureRequestContentParametersInitialized parameters
let result = case parameters of
RequestContentNone -> False
_ -> True
return (parameters, result)
getRequestContentAllowed :: (MonadHTTP m) => m Bool
getRequestContentAllowed = do
method <- getRequestMethod
case method of
_ | method == "OPTIONS" -> return True
| method == "GET" -> return False
| method == "HEAD" -> return False
| method == "POST" -> return True
| method == "PUT" -> return True
| method == "DELETE" -> return False
| method == "TRACE" -> return False
| method == "CONNECT" -> return True
| otherwise -> return True
-- | Reads up to a specified amount of data from the content of the HTTP
-- request, if any, and interprets it as binary data. If input has been
-- closed, returns an empty bytestring. If no input is immediately
-- available, blocks until there is some. If output has been closed, causes
-- an 'OutputAlreadyClosed' exception.
httpGet :: (MonadHTTP m) => Int -> m BS.ByteString
httpGet size = httpGet' (Just size) True False
-- | Reads up to a specified amount of data from the content of the HTTP
-- request, if any, and interprets it as binary data. If input has been
-- closed, returns an empty bytestring. If insufficient input is available,
-- returns any input which is immediately available, or an empty bytestring
-- if there is none, never blocking. If output has been closed, causes an
-- 'OutputAlreadyClosed' exception.
httpGetNonBlocking :: (MonadHTTP m) => Int -> m BS.ByteString
httpGetNonBlocking size = httpGet' (Just size) False False
-- | Reads all remaining data from the content of the HTTP request, if any,
-- and interprets it as binary data. Blocks until all input has been
-- read. If input has been closed, returns an empty bytestring. If output
-- has been closed, causes an 'OutputAlreadyClosed' exception.
httpGetContents :: (MonadHTTP m) => m BS.ByteString
httpGetContents = httpGet' Nothing True False
-- | Returns whether the content of the HTTP request potentially has data
-- remaining, either in the buffer or yet to be read.
httpIsReadable :: (MonadHTTP m) => m Bool
httpIsReadable = do
HTTPConnection { httpConnectionRequestContentParameters = parametersMVar }
<- getHTTPConnection
modifyMVar parametersMVar $ \parameters -> do
parameters <- ensureRequestContentParametersInitialized parameters
let result = case parameters of
RequestContentNone -> False
RequestContentClosed -> False
_ -> True
return (parameters, result)
httpGet' :: (MonadHTTP m) => (Maybe Int) -> Bool -> Bool -> m BS.ByteString
httpGet' maybeSize blocking discarding = do
if not discarding
then requireOutputNotYetClosed
else return ()
modifyRequest $ \buffer parameters -> do
parameters <- ensureRequestContentParametersInitialized parameters
(buffer, parameters)
<- extendRequestContentBuffer buffer parameters maybeSize blocking
(result, buffer) <- return $ case maybeSize of
Nothing -> (buffer, BS.empty)
Just size -> BS.splitAt size buffer
return (buffer, parameters, result)
ensureRequestContentParametersInitialized
:: (MonadHTTP m)
=> RequestContentParameters
-> m RequestContentParameters
ensureRequestContentParametersInitialized RequestContentUninitialized = do
maybeLength <- getContentLength
maybeTransferEncodingString <- getRequestHeader HttpTransferEncoding
let (hasContent, chunked)
= case (maybeLength, maybeTransferEncodingString) of
(Nothing, Nothing) -> (False, False)
(Just length, Nothing) -> (True, False)
(Just length, Just encoding)
| map toLower encoding == "identity" -> (True, False)
| otherwise -> (True, True)
(_, Just _) -> (True, True)
if hasContent
then if chunked
then return $ RequestContentChunked 0
else case maybeLength of
Nothing -> return $ RequestContentNone
Just length -> return $ RequestContentIdentity length
else return RequestContentNone
ensureRequestContentParametersInitialized parameters = return parameters
extendRequestContentBuffer
:: (MonadHTTP m)
=> BS.ByteString
-> RequestContentParameters
-> (Maybe Int)
-> Bool
-> m (BS.ByteString, RequestContentParameters)
extendRequestContentBuffer highLevelBuffer
parameters
maybeTargetLength
blocking = do
let isAtLeastTargetLength buffer =
case maybeTargetLength of
Nothing -> False
Just targetLength -> BS.length buffer >= targetLength
loop highLevelBuffer lowLevelBuffer parameters = do
if isAtLeastTargetLength highLevelBuffer
then return (highLevelBuffer, lowLevelBuffer, parameters)
else do
case parameters of
RequestContentNone
-> return (highLevelBuffer, lowLevelBuffer, parameters)
RequestContentClosed
-> return (highLevelBuffer, lowLevelBuffer, parameters)
RequestContentIdentity lengthRemaining -> do
(lowLevelBuffer, endOfInput)
<- extendInputBuffer lowLevelBuffer lengthRemaining blocking
if endOfInput
then throwIO UnexpectedEndOfInput
else return ()
let (toHighLevelBuffer, lowLevelBuffer')
= BS.splitAt lengthRemaining lowLevelBuffer
lengthRead = BS.length toHighLevelBuffer
highLevelBuffer'
= BS.append highLevelBuffer toHighLevelBuffer
lengthRemaining' = if lengthRemaining > lengthRead
then lengthRemaining - lengthRead
else 0
parameters' = if lengthRemaining' > 0
then RequestContentIdentity lengthRemaining'
else RequestContentClosed
if not blocking || isAtLeastTargetLength highLevelBuffer
then return (highLevelBuffer', lowLevelBuffer', parameters')
else loop highLevelBuffer' lowLevelBuffer' parameters'
RequestContentChunked _ -> do
httpLog $ "Don't understand chunked."
throwIO UnexpectedEndOfInput
-- TODO IAK
HTTPConnection { httpConnectionInputBufferMVar = lowLevelBufferMVar }
<- getHTTPConnection
modifyMVar lowLevelBufferMVar $ \lowLevelBuffer -> do
(highLevelBuffer, lowLevelBuffer, parameters)
<- loop highLevelBuffer lowLevelBuffer parameters
return (lowLevelBuffer, (highLevelBuffer, parameters))
-- | Sets the response status which will be sent with the response headers. If
-- the response headers have already been sent, or are no longer modifiable
-- (because of a call to 'httpPut' or similar), causes a
-- 'ResponseHeadersAlreadySent' or 'ResponseHeadersNotModifiable' exception.
setResponseStatus
:: (MonadHTTP m)
=> Int -- ^ The HTTP/1.1 status code to set.
-> m ()
setResponseStatus status = do
requireResponseHeadersNotYetSent
requireResponseHeadersModifiable
HTTPConnection { httpConnectionResponseStatus = mvar } <- getHTTPConnection
swapMVar mvar status
return ()
-- | Returns the response status which will be or has been sent with the response
-- headers.
getResponseStatus
:: (MonadHTTP m)
=> m Int -- ^ The HTTP/1.1 status code.
getResponseStatus = do
HTTPConnection { httpConnectionResponseStatus = mvar } <- getHTTPConnection
readMVar mvar
-- | Sets the given response header to the given string value, overriding any
-- value which has previously been set. If the response headers have
-- already been sent, or are no longer modifiable (because of a call to
-- 'httpPut' or similar), causes a 'ResponseHeadersAlreadySent' or
-- 'ResponseHeadersNotModifiable' exception. If the header is not an
-- HTTP/1.1 or extension response, entity, or general header, ie, is not
-- valid as part of a response, causes a 'NotAResponseHeader' exception.
--
-- If a value is set for the 'HttpSetCookie' header, this overrides all
-- cookies set for this request with 'setCookie'.
setResponseHeader
:: (MonadHTTP m)
=> Header -- ^ The header to set. Must be a response header or an entity header.
-> String -- ^ The value to set.
-> m ()
setResponseHeader header value = do
requireResponseHeadersModifiable
requireResponseHeadersNotYetSent
setResponseHeader' header value
setResponseHeader'
:: (MonadHTTP m)
=> Header
-> String
-> m ()
setResponseHeader' header value = do
if isValidInResponse header
then do
connection <- getHTTPConnection
let mvar = httpConnectionResponseHeaderMap connection
modifyMVar_ mvar $ \headerMap -> do
let headerMap' = Map.insert header (UTF8.fromString value) headerMap
return headerMap'
else throwIO $ NotAResponseHeader header
-- | Causes the given 'Header' response header not to be sent, overriding
-- any value which has previously been set. If the response headers have
-- already been sent, or are no longer modifiable (because of a call to
-- 'httpPut' or similar), causes a 'ResponseHeadersAlreadySent' or
-- 'ResponseHeadersNotModifiable' exception. If
-- the header is not an HTTP/1.1 or extension response or entity header, ie,
-- is not valid as part of a response, causes a 'NotAResponseHeader'
-- exception.
--
-- Does not prevent the 'HttpSetCookie' header from being sent if cookies
-- have been set for this request with 'setCookie'.
unsetResponseHeader
:: (MonadHTTP m)
=> Header -- ^ The header to unset. Must be a response header or an entity header.
-> m ()
unsetResponseHeader header = do
requireResponseHeadersNotYetSent
requireResponseHeadersModifiable
if isValidInResponse header
then do
HTTPConnection { httpConnectionResponseHeaderMap = mvar } <- getHTTPConnection
modifyMVar_ mvar $ \headerMap -> do
headerMap <- return $ Map.delete header headerMap
return headerMap
else throwIO $ NotAResponseHeader header
-- | Returns the value of the given header which will be or has been sent with
-- the response headers. If the header is not an HTTP/1.1 or extension
-- response, entity, or general header, ie, is not valid as part of a
-- response, causes a 'NotAResponseHeader' exception.
getResponseHeader
:: (MonadHTTP m)
=> Header -- ^ The header to query. Must be a response header or an entity
-- header.
-> m (Maybe String) -- ^ The value of the queried header.
getResponseHeader header = do
if isValidInResponse header
then do
HTTPConnection { httpConnectionResponseHeaderMap = mvar } <- getHTTPConnection
headerMap <- readMVar mvar
return $ fmap UTF8.toString $ Map.lookup header headerMap
else throwIO $ NotAResponseHeader header
-- | Causes the user agent to record the given cookie and send it back with
-- future loads of this page. Does not take effect instantly, but rather
-- when headers are sent. Cookies are set in accordance with RFC 2109.
-- If an 'HttpCookie' header is set for this request by a call to
-- 'setResponseHeader', this function has no effect. If the response headers
-- have already been sent, or are no longer modifiable (because of a call to
-- 'httpPut' or similar), causes a 'ResponseHeadersAlreadySent' or
-- 'ResponseHeadersNotModifiable' exception.
-- If the name is not a possible name for a cookie, causes a 'CookieNameInvalid'
-- exception.
setCookie
:: (MonadHTTP m)
=> Cookie -- ^ The cookie to set.
-> m ()
setCookie cookie = do
requireResponseHeadersNotYetSent
requireResponseHeadersModifiable
requireValidCookieName $ cookieName cookie
connection <- getHTTPConnection
let mvar = httpConnectionResponseCookieMap connection
modifyMVar_ mvar $ \responseCookieMap -> do
let responseCookieMap' =
Map.insert (cookieName cookie) cookie responseCookieMap
return responseCookieMap'
-- | Causes the user agent to unset any cookie applicable to this page with the
-- given name. Does not take effect instantly, but rather when headers are
-- sent. If an 'HttpCookie' header is set for this request by a call to
-- 'setResponseHeader', this function has no effect. If the response headers
-- have already been sent, or are no longer modifiable (because of a call to
-- 'httpPut' or similar), causes a 'ResponseHeadersAlreadySent' or
-- 'ResponseHeadersNotModifiable' exception.
-- If the name is not a possible name for a cookie, causes a
-- 'CookieNameInvalid' exception.
unsetCookie
:: (MonadHTTP m)
=> String -- ^ The name of the cookie to unset.
-> m ()
unsetCookie name = do
requireResponseHeadersNotYetSent
requireResponseHeadersModifiable
requireValidCookieName name
connection <- getHTTPConnection
let mvar = httpConnectionResponseCookieMap connection
modifyMVar_ mvar $ \responseCookieMap -> do
let responseCookieMap' =
Map.insert name (mkUnsetCookie name) responseCookieMap
return responseCookieMap'
-- | Constructs a cookie with the given name and value. Version is set to 1;
-- path, domain, and maximum age are set to @Nothing@; and the secure flag is
-- set to @False@. Constructing the cookie does not cause it to be set; to do
-- that, call 'setCookie' on it.
mkSimpleCookie
:: String -- ^ The name of the cookie to construct.
-> String -- ^ The value of the cookie to construct.
-> Cookie -- ^ A cookie with the given name and value.
mkSimpleCookie name value = Cookie {
cookieName = name,
cookieValue = value,
cookieVersion = 1,
cookiePath = Nothing,
cookieDomain = Nothing,
cookieMaxAge = Nothing,
cookieSecure = False,
cookieComment = Nothing
}
-- | Constructs a cookie with the given parameters. Version is set to 1.
-- Constructing the cookie does not cause it to be set; to do that, call 'setCookie'
-- on it.
mkCookie
:: String -- ^ The name of the cookie to construct.
-> String -- ^ The value of the cookie to construct.
-> (Maybe String) -- ^ The path of the cookie to construct.
-> (Maybe String) -- ^ The domain of the cookie to construct.
-> (Maybe Int) -- ^ The maximum age of the cookie to construct, in seconds.
-> Bool -- ^ Whether to flag the cookie to construct as secure.
-> Cookie -- ^ A cookie with the given parameters.
mkCookie name value maybePath maybeDomain maybeMaxAge secure
= Cookie {
cookieName = name,
cookieValue = value,
cookieVersion = 1,
cookiePath = maybePath,
cookieDomain = maybeDomain,
cookieMaxAge = maybeMaxAge,
cookieSecure = secure,
cookieComment = Nothing
}
mkUnsetCookie :: String -> Cookie
mkUnsetCookie name = Cookie {
cookieName = name,
cookieValue = "",
cookieVersion = 1,
cookiePath = Nothing,
cookieDomain = Nothing,
cookieMaxAge = Just 0,
cookieSecure = False,
cookieComment = Nothing
}
requireValidCookieName :: (MonadHTTP m) => String -> m ()
requireValidCookieName name = do
if not $ isValidCookieToken name
then throwIO $ CookieNameInvalid name
else return ()
isValidCookieToken :: String -> Bool
isValidCookieToken token =
let validCharacter c = (ord c > 0) && (ord c < 128)
&& (not $ elem c "()<>@,;:\\\"/[]?={} \t")
in (length token > 0) && (all validCharacter token)
-- | An exception originating within the HTTP infrastructure or the web server.
data HTTPException
= ResponseHeadersAlreadySent
-- ^ An exception thrown by operations which require the response headers not
-- to have been sent yet.
| ResponseHeadersNotModifiable
-- ^ An exception thrown by operations which require the response headers
-- to still be modifiable.
| OutputAlreadyClosed
-- ^ An exception thrown by operations which produce output when output has
-- been closed, as by 'httpCloseOutput'.
| OutputIncomplete
-- ^ An exception thrown when output is closed, as by 'httpCloseOutput',
-- when the response headers imply that there will be a certain amount
-- of data and there is not.
| NotAResponseHeader Header
-- ^ An exception thrown by operations which are given a header that does not
-- meet their requirement of being valid in a response.
| CookieNameInvalid String
-- ^ An exception thrown by operations which are given cookie names that do not
-- meet the appropriate syntax requirements.
| NoConnection
-- ^ An exception thrown by operations which expect a connection to
-- exist (as it always does within a handler), when none does.
deriving (Show, Typeable)
instance Exception HTTPException
-- | Sets the HTTP/1.1 return status to 301 and sets the 'HttpLocation' header
-- to the provided URL. This has the effect of issuing a permanent redirect
-- to the user agent. Permanent redirects, as opposed to temporary redirects,
-- may cause bookmarks or incoming links to be updated. If the response
-- headers have already been sent, or are no longer modifiable (because of a
-- call to 'httpPut' or similar), causes a 'ResponseHeadersAlreadySent' or
-- 'ResponseHeadersNotModifiable' exception.
permanentRedirect
:: (MonadHTTP m)
=> String -- ^ The URL to redirect to, as a string.
-> m ()
permanentRedirect url = do
setResponseStatus 301
setResponseHeader HttpLocation url
-- | Sets the HTTP/1.1 return status to 303 and sets the 'HttpLocation' header
-- to the provided URL. This has the effect of issuing a see-other or
-- "temporary" redirect to the user agent. Temporary redirects, as opposed to
-- permanent redirects, do not cause bookmarks or incoming links to be
-- updated. If the response headers have already been sent, or are no longer
-- modifiable (because of a call to 'httpPut' or similar), causes a
-- 'ResponseHeadersAlreadySent' or 'ResponseHeadersNotModifiable' exception.
seeOtherRedirect
:: (MonadHTTP m)
=> String -- ^ The URL to redirect to, as a string.
-> m ()
seeOtherRedirect url = do
setResponseStatus 303
setResponseHeader HttpLocation url
-- | Ensures that the response headers have been sent. If they are already
-- sent, does nothing. If output has already been closed, causes an
-- 'OutputAlreadyClosed' exception. Note that if the buffered identity
-- output mode (the first mode of operation described for 'httpPut') is
-- to be used, this function implies that there is no additional content
-- beyond what has already been sent.
sendResponseHeaders :: (MonadHTTP m) => m ()
sendResponseHeaders = do
requireOutputNotYetClosed
connection <- getHTTPConnection
let socket = httpConnectionSocket connection
alreadySentMVar = httpConnectionResponseHeadersSent connection
modifiableMVar = httpConnectionResponseHeadersModifiable connection
parametersMVar = httpConnectionResponseContentParameters connection
bufferMVar = httpConnectionResponseContentBuffer connection
parameters <-
modifyMVar parametersMVar $ \parameters -> do
parameters <- ensureResponseContentParametersInitialized parameters
return (parameters, parameters)
modifyMVar_ alreadySentMVar $ \alreadySent -> do
if not alreadySent
then do
_ <- swapMVar modifiableMVar False
case parameters of
ResponseContentBufferedIdentity -> do
buffer <- readMVar bufferMVar
setResponseHeader' HttpContentLength (show $ BS.length buffer)
_ -> do
headersBuffer <- getHeadersBuffer
send headersBuffer
else return ()
return True
getHeadersBuffer :: (MonadHTTP m) => m ByteString
getHeadersBuffer = do
connection <- getHTTPConnection
responseStatus <- readMVar (httpConnectionResponseStatus connection)
responseHeaderMap <- readMVar (httpConnectionResponseHeaderMap connection)
responseCookieMap <- readMVar (httpConnectionResponseCookieMap connection)
let statusLine = BS.concat [UTF8.fromString "HTTP/1.1 ",
UTF8.fromString $ show responseStatus,
UTF8.fromString " ",
reasonPhrase responseStatus,
UTF8.fromString "\r\n"]
nameValuePairs
= concat [map (\(header, value) -> (fromHeader header, value))
$ Map.toList responseHeaderMap,
if (isNothing $ Map.lookup HttpSetCookie responseHeaderMap)
&& (not $ Map.null responseCookieMap)
then [(UTF8.fromString "Set-Cookie", setCookieValue)]
else []]
setCookieValue = printCookies $ Map.elems responseCookieMap
delimiterLine = UTF8.fromString "\r\n"
buffer = BS.concat $ [statusLine]
++ (concat
$ map (\(name, value)
-> [name, UTF8.fromString ": ",
value, UTF8.fromString "\r\n"])
nameValuePairs)
++ [delimiterLine]
return buffer
markResponseHeadersUnmodifiable :: (MonadHTTP m) => m ()
markResponseHeadersUnmodifiable = do
HTTPConnection { httpConnectionResponseHeadersModifiable = modifiableMVar }
<- getHTTPConnection
swapMVar modifiableMVar False
return ()
reasonPhrase :: Int -> ByteString
reasonPhrase status =
UTF8.fromString $ case status of
100 -> "Continue"
101 -> "Switching Protocols"
200 -> "OK"
201 -> "Created"
202 -> "Accepted"
203 -> "Non-Authoritative Information"
204 -> "No Content"
205 -> "Reset Content"
206 -> "Partial Content"
300 -> "Multiple Choices"
301 -> "Moved Permanently"
302 -> "Found"
303 -> "See Other"
304 -> "Not Modified"
305 -> "Use Proxy"
307 -> "Temporary Redirect"
400 -> "Bad Request"
401 -> "Unauthorized"
402 -> "Payment Required"
403 -> "Forbidden"
404 -> "Not Found"
405 -> "Method Not Allowed"
406 -> "Not Acceptable"
407 -> "Proxy Authentication Required"
408 -> "Request Time-out"
409 -> "Conflict"
410 -> "Gone"
411 -> "Length Required"
412 -> "Precondition Failed"
413 -> "Request Entity Too Large"
414 -> "Request-URI Too Large"
415 -> "Unsupported Media Type"
416 -> "Requested range not satisfiable"
417 -> "Expectation Failed"
500 -> "Internal Server Error"
501 -> "Not Implemented"
502 -> "Bad Gateway"
503 -> "Service Unavailable"
504 -> "Gateway Time-out"
505 -> "HTTP Version not supported"
_ -> "Extension"
-- | Returns whether the response headers have been sent, regardless of whether
-- they are modifiable (they might not be because of a call to 'httpPut' or
-- similar).
responseHeadersSent :: (MonadHTTP m) => m Bool
responseHeadersSent = do
connection <- getHTTPConnection
readMVar (httpConnectionResponseHeadersSent connection)
-- | Returns whether the response headers are modifiable, a prerequisite of
-- which is that they have not already been sent. (They might not be
-- modifiable because of a call to 'httpPut' or similar.)
responseHeadersModifiable :: (MonadHTTP m) => m Bool
responseHeadersModifiable = do
connection <- getHTTPConnection
readMVar (httpConnectionResponseHeadersModifiable connection)
-- | Appends data, interpreted as binary, to the content of the HTTP response.
-- Makes the response headers no longer modifiable, effective immediately.
-- If output has already been closed, causes an 'OutputAlreadyClosed'
-- exception. If the response Transfer-Encoding as set in the response
-- headers is "identity" or omitted, and the response Content-Length is
-- omitted, data is buffered until output is closed, then sent all at once
-- with an appropriate Content-Length header. Otherwise - that is, if there
-- is a Transfer-Encoding other than "identity" set, or if Content-Length is
-- set - data is sent immediately. If Content-Length is set, and the
-- provided data would cause the cumulative data sent to exceed that length,
-- causes an 'OutputAlreadyClosed' exception. At the time that data is
-- actually sent, if the response headers have not been sent, first sends
-- them.
--
-- In other words, there are effectively three modes of operation for output.
-- The first, simplest mode is used if the handler does nothing special. In
-- this mode output is buffered and sent all at once; headers are not sent
-- until this time. In this mode 'httpCloseOutput' may be useful to force
-- output to be sent before the handler returns, perhaps so that additional
-- time-consuming processing can be done. This mode is easiest to use, in the
-- sense that it requires no support on the handler's part, but probably the
-- second mode should always be used instead.
--
-- The second mode is used if the handler sets a Transfer-Encoding, for
-- example "chunked", and no Content-Length. In this case headers are sent
-- immediately upon the first 'httpPut' or 'httpPutStr', and output is sent
-- as it is provided. Output in this mode is transformed by 'httpPut' into
-- the appropriate transfer encoding. Thus handler code need only specify a
-- transfer encoding, not actually implement that encoding itself. This mode
-- is advantageous to allow user agents to begin displaying partial content as
-- it is received, and particularly useful when the content is quite large
-- or takes significant time to generate. If you are unsure which mode to
-- use, it should probably be this one.
--
-- The third mode is used if the handler sets a Content-Length and no
-- Transfer-Encoding. In this case headers are again sent immediately upon
-- the first 'httpPut' or 'httpPutStr', and output is again sent as it is
-- provided. Output in this mode is not transformed. This may be more
-- efficient than the second mode if output is generated in many small pieces,
-- as it avoids computing and sending the length tags of the "chunked"
-- encoding. However, it requires the content length to be known in advance
-- of actually sending any content. It may be useful if you wish to have
-- direct-http validate that the handler is well-behaved in sending a binary
-- object of known size with no "garbage" inserted by spurious additional
-- puts.
httpPut :: (MonadHTTP m) => BS.ByteString -> m ()
httpPut bytestring = do
requireOutputNotYetClosed
markResponseHeadersUnmodifiable
connection <- getHTTPConnection
let alreadySentMVar = httpConnectionResponseHeadersSent connection
maybeException <- do
modifyResponse $ \buffer parameters -> do
parameters <- ensureResponseContentParametersInitialized parameters
case parameters of
ResponseContentClosed -> do
return (buffer, parameters, Just OutputAlreadyClosed)
ResponseContentBufferedIdentity -> do
return (BS.append buffer bytestring, parameters, Nothing)
ResponseContentUnbufferedIdentity lengthRemaining -> do
modifyMVar_ alreadySentMVar $ \alreadySent -> do
if alreadySent
then return ()
else do
headersBuffer <- getHeadersBuffer
send headersBuffer
return True
let lengthThisPut = BS.length bytestring
if lengthThisPut > lengthRemaining
then do
return (BS.empty,
ResponseContentClosed,
Just OutputAlreadyClosed)
else do
let parameters' = ResponseContentUnbufferedIdentity
$ lengthRemaining - lengthThisPut
send bytestring
return (buffer, parameters', Nothing)
ResponseContentChunked -> do
modifyMVar_ alreadySentMVar $ \alreadySent -> do
if alreadySent
then return ()
else do
headersBuffer <- getHeadersBuffer
send headersBuffer
return True
if BS.length bytestring > 0
then do
let lengthBuffer =
UTF8.fromString $ showHex (BS.length bytestring) ""
++ "\r\n"
crlfBuffer = UTF8.fromString "\r\n"
send $ BS.concat [lengthBuffer, bytestring, crlfBuffer]
else return ()
return (buffer, parameters, Nothing)
case maybeException of
Nothing -> return ()
Just exception -> throwIO exception
ensureResponseContentParametersInitialized
:: (MonadHTTP m)
=> ResponseContentParameters
-> m ResponseContentParameters
ensureResponseContentParametersInitialized ResponseContentUninitialized = do
maybeLengthString <- getResponseHeader HttpContentLength
let maybeLength = case maybeLengthString of
Nothing -> Nothing
Just lengthString -> parseInt lengthString
maybeTransferEncodingString <- getResponseHeader HttpTransferEncoding
let (hasContent, chunked)
= case (maybeLengthString, maybeTransferEncodingString) of
(Nothing, Nothing) -> (False, False)
(Just length, Nothing) -> (True, False)
(Just length, Just encoding)
| map toLower encoding == "identity" -> (True, False)
| otherwise -> (True, True)
(_, Just _) -> (True, True)
if hasContent
then if chunked
then return $ ResponseContentChunked
else case maybeLength of
Nothing -> return ResponseContentBufferedIdentity
Just length ->
return $ ResponseContentUnbufferedIdentity length
else return ResponseContentBufferedIdentity
ensureResponseContentParametersInitialized parameters = return parameters
finishResponseContent :: (MonadHTTP m) => m ()
finishResponseContent = do
connection <- getHTTPConnection
let bufferMVar = httpConnectionResponseContentBuffer connection
parametersMVar = httpConnectionResponseContentParameters connection
maybeException <-
modifyResponse $ \buffer parameters -> do
parameters <- ensureResponseContentParametersInitialized parameters
result <-
case parameters of
ResponseContentClosed -> do
return $ Just OutputAlreadyClosed
ResponseContentBufferedIdentity -> do
headersBuffer <- getHeadersBuffer
send $ BS.concat [headersBuffer, buffer]
return Nothing
ResponseContentUnbufferedIdentity lengthRemaining -> do
if lengthRemaining > 0
then return $ Just OutputIncomplete
else return Nothing
ResponseContentChunked -> do
let emptyChunkBuffer = UTF8.fromString $ "0\r\n\r\n\r\n"
send emptyChunkBuffer
return Nothing
return (BS.empty, ResponseContentClosed, result)
case maybeException of
Nothing -> return ()
Just exception -> throwIO exception
send :: (MonadHTTP m) => ByteString -> m ()
send bytestring = do
HTTPConnection { httpConnectionSocket = socket } <- getHTTPConnection
liftBase $ Network.sendAll socket bytestring
-- | Appends text, encoded as UTF8, to the content of the HTTP response. In
-- all respects this behaves as 'httpPut', but for the fact that it takes
-- text rather than binary data.
httpPutStr :: (MonadHTTP m) => String -> m ()
httpPutStr string = httpPut $ UTF8.fromString string
-- | Informs the web server and the user agent that the request has completed.
-- As side-effects, the response headers are sent if they have not yet been,
-- any unread input is discarded and no more can be read, and any unsent
-- output is sent. This is implicitly called, if it has not already been,
-- after the handler returns; it may be useful within a handler if the
-- handler wishes to return results and then perform time-consuming
-- computations before exiting. If output has already been closed, causes an
-- 'OutputAlreadyClosed' exception. If the response headers imply that there
-- will be a certain amount of data and there is not, causes an
-- 'OutputIncomplete' exception.
httpCloseOutput :: (MonadHTTP m) => m ()
httpCloseOutput = do
requireOutputNotYetClosed
sendResponseHeaders
finishResponseContent
httpGet' Nothing True True
return ()
-- | Returns whether it is possible to write more data; ie, whether output has
-- not yet been closed as by 'httpCloseOutput'.
httpIsWritable :: (MonadHTTP m) => m Bool
httpIsWritable = do
connection <- getHTTPConnection
let parametersMVar = httpConnectionResponseContentParameters connection
modifyMVar parametersMVar $ \parameters -> do
parameters <- ensureResponseContentParametersInitialized parameters
let result = case parameters of
ResponseContentClosed -> False
_ -> True
return (parameters, result)
requireResponseHeadersNotYetSent :: (MonadHTTP m) => m ()
requireResponseHeadersNotYetSent = do
alreadySent <- responseHeadersSent
if alreadySent
then throwIO ResponseHeadersAlreadySent
else return ()
requireResponseHeadersModifiable :: (MonadHTTP m) => m ()
requireResponseHeadersModifiable = do
modifiable <- responseHeadersModifiable
if modifiable
then return ()
else throwIO ResponseHeadersNotModifiable
requireOutputNotYetClosed :: (MonadHTTP m) => m ()
requireOutputNotYetClosed = do
isWritable <- httpIsWritable
case isWritable of
False -> throwIO OutputAlreadyClosed
True -> return ()
modifyRequest
:: (MonadHTTP m)
=> (ByteString
-> RequestContentParameters
-> m (ByteString, RequestContentParameters, a))
-> m a
modifyRequest action = do
HTTPConnection {
httpConnectionRequestContentBuffer = bufferMVar,
httpConnectionRequestContentParameters = parametersMVar
} <- getHTTPConnection
modifyMVar bufferMVar $ \buffer -> do
modifyMVar parametersMVar $ \parameters -> do
(buffer, parameters, result) <- action buffer parameters
return (parameters, (buffer, result))
modifyResponse
:: (MonadHTTP m)
=> (ByteString
-> ResponseContentParameters
-> m (ByteString, ResponseContentParameters, a))
-> m a
modifyResponse action = do
HTTPConnection {
httpConnectionResponseContentBuffer = bufferMVar,
httpConnectionResponseContentParameters = parametersMVar
} <- getHTTPConnection
modifyMVar bufferMVar $ \buffer -> do
modifyMVar parametersMVar $ \parameters -> do
(buffer, parameters, result) <- action buffer parameters
return (parameters, (buffer, result))
|
IreneKnapp/direct-http
|
Haskell/Network/HTTP.hs
|
mit
| 100,984
| 0
| 51
| 30,050
| 16,863
| 8,564
| 8,299
| 1,724
| 41
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-application-kinesisstreamsinput.html
module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationKinesisStreamsInput where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- KinesisAnalyticsV2ApplicationKinesisStreamsInput. See
-- 'kinesisAnalyticsV2ApplicationKinesisStreamsInput' for a more convenient
-- constructor.
data KinesisAnalyticsV2ApplicationKinesisStreamsInput =
KinesisAnalyticsV2ApplicationKinesisStreamsInput
{ _kinesisAnalyticsV2ApplicationKinesisStreamsInputResourceARN :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsV2ApplicationKinesisStreamsInput where
toJSON KinesisAnalyticsV2ApplicationKinesisStreamsInput{..} =
object $
catMaybes
[ (Just . ("ResourceARN",) . toJSON) _kinesisAnalyticsV2ApplicationKinesisStreamsInputResourceARN
]
-- | Constructor for 'KinesisAnalyticsV2ApplicationKinesisStreamsInput'
-- containing required fields as arguments.
kinesisAnalyticsV2ApplicationKinesisStreamsInput
:: Val Text -- ^ 'kavaksiResourceARN'
-> KinesisAnalyticsV2ApplicationKinesisStreamsInput
kinesisAnalyticsV2ApplicationKinesisStreamsInput resourceARNarg =
KinesisAnalyticsV2ApplicationKinesisStreamsInput
{ _kinesisAnalyticsV2ApplicationKinesisStreamsInputResourceARN = resourceARNarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-application-kinesisstreamsinput.html#cfn-kinesisanalyticsv2-application-kinesisstreamsinput-resourcearn
kavaksiResourceARN :: Lens' KinesisAnalyticsV2ApplicationKinesisStreamsInput (Val Text)
kavaksiResourceARN = lens _kinesisAnalyticsV2ApplicationKinesisStreamsInputResourceARN (\s a -> s { _kinesisAnalyticsV2ApplicationKinesisStreamsInputResourceARN = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationKinesisStreamsInput.hs
|
mit
| 1,988
| 0
| 13
| 166
| 176
| 102
| 74
| 23
| 1
|
{-# LANGUAGE RecordWildCards
, LambdaCase
, FlexibleContexts
, RankNTypes
, TypeFamilies #-}
module App ( run
, module AppDefs
) where
import Control.Lens
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Either
import Control.Monad.STM
import Control.Monad.Trans.Control
import Control.Concurrent.STM.TQueue
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.UI.GLFW as GLFW
import Text.Printf
import Data.Time
import Data.Maybe
import AppDefs
import GLFWHelpers
import GLHelpers
import Timing
import Trace
import Font
import FrameBuffer
import QuadRendering
import qualified BoundedSequence as BS
import Experiment
import Median
processAllEvents :: MonadIO m => TQueue a -> (a -> m ()) -> m ()
processAllEvents tq processEvent =
(liftIO . atomically $ tryReadTQueue tq) >>= \case
Just e -> processEvent e >> processAllEvents tq processEvent
_ -> return ()
processGLFWEvent :: GLFWEvent -> AppIO ()
processGLFWEvent ev = do
case ev of
GLFWEventError e s -> do
window <- view aeWindow
liftIO $ do
traceS TLError $ "GLFW Error " ++ show e ++ " " ++ show s
GLFW.setWindowShouldClose window True
GLFWEventKey win k _sc ks _mk | ks == GLFW.KeyState'Pressed ->
case k of
GLFW.Key'Escape -> do
lastPress <- use asLastEscPress
tick <- use asCurTick
-- Only close when ESC has been pressed twice quickly
when (tick - lastPress < 0.5) .
liftIO $ GLFW.setWindowShouldClose win True
asLastEscPress .= tick
GLFW.Key'T -> view aeFB >>= \fb -> liftIO $ saveFrameBufferToPNG fb .
map (\c -> if c `elem` ['/', '\\', ':', ' '] then '-' else c)
. printf "Screenshot-%s.png" =<< show <$> getZonedTime
GLFW.Key'V -> asVSync %= not >> setVSync
-- Exit and switch to a different experiment
GLFW.Key'Minus -> onRenderSettingsChage >> left ExpPrev
GLFW.Key'Equal -> onRenderSettingsChage >> left ExpNext
_ -> return ()
GLFWEventFramebufferSize _win _w _h -> resize
_ -> return ()
runExperimentState $ experimentGLFWEvent ev -- Pass on event to the experiment
-- Handle changes in window and frame buffer size
resize :: (MonadReader AppEnv m, MonadState AppState m, MonadIO m) => m ()
resize = do
window <- view aeWindow
fb <- view aeFB
liftIO $ do (w, h) <- GLFW.getFramebufferSize window
setupViewport w h
resizeFrameBuffer fb w h
onRenderSettingsChage
onRenderSettingsChage :: MonadState AppState m => m ()
onRenderSettingsChage = do
-- Reset frame time measurements and frame index when the rendering settings have changed
asFrameTimes %= BS.clear
asFrameIdx .= 0
draw :: AppIO ()
draw = do
AppEnv { .. } <- ask
AppState { .. } <- get
-- Clear
liftIO $ do
GL.clearColor GL.$= (GL.Color4 1 0 1 1 :: GL.Color4 GL.GLclampf)
GL.clear [GL.ColorBuffer, GL.DepthBuffer]
GL.depthFunc GL.$= Just GL.Lequal
-- Allow the experiment to draw into the framebuffer
runExperimentState $ experimentDraw _aeFB _asCurTick
-- Render everything quad based
(liftIO $ GLFW.getFramebufferSize _aeWindow) >>= \(w, h) ->
void . withQuadRenderBuffer _aeQR w h $ \qb -> do
ftStr <- updateAndReturnFrameTimes
(fbWdh, fbHgt) <- liftIO $ getFrameBufferDim _aeFB
expDesc <- use asExperimentDesc
vsync <- use asVSync
statusString <- (++) ( "2x[ESC] Exit | Screensho[T] | %ix%i | %s\n" ++
"[V]Sync: %s | Exp. [-][=] %s | "
)
<$> runExperimentState experimentStatusString
liftIO $ do
-- Draw frame buffer contents
drawFrameBuffer _aeFB qb 0 0 (fromIntegral w) (fromIntegral h)
-- FPS counter and mode / statistics / key bindings display
let statusStringHgt = (succ . length $ filter (== '\n') statusString) * 12
drawQuad qb
0 (fromIntegral h - fromIntegral statusStringHgt)
(fromIntegral w) (fromIntegral h)
2
FCBlack
(TRBlend 0.5)
Nothing
QuadUVDefault
drawTextWithShadow _aeFontTexture qb 3 (h - 12) $ printf
statusString
fbWdh
fbHgt
ftStr
(if vsync then "On" else "Off")
expDesc
where drawTextWithShadow :: GL.TextureObject -> QuadRenderBuffer -> Int -> Int -> String -> IO ()
drawTextWithShadow tex qb x y str = do
drawText tex qb (x + 1) (y - 1) 0x00000000 str
drawText tex qb x y 0x0000FF00 str
updateAndReturnFrameTimes :: MonadState AppState m => m String
updateAndReturnFrameTimes = do
frameTimes <- use $ asFrameTimes.to BS.toList
curTick <- use asCurTick
asFrameTimes %= BS.push_ curTick
let frameDeltas = case frameTimes of (x:xs) -> goFD x xs; _ -> []
goFD prev (x:xs) = (prev - x) : goFD x xs
goFD _ [] = []
fdMedian = fromMaybe 1 $ median frameDeltas
fdWorst = case frameDeltas of [] -> 0; xs -> maximum xs
fdBest = case frameDeltas of [] -> 0; xs -> minimum xs
in return $ printf "%.2fFPS/%.1fms (L: %.2fms, H: %.2fms)"
(1.0 / fdMedian)
(fdMedian * 1000)
(fdBest * 1000)
(fdWorst * 1000)
setVSync :: (MonadIO m, MonadState AppState m) => m ()
setVSync = use asVSync >>= \vsync -> liftIO . GLFW.swapInterval $ if vsync then 1 else 0
run :: AppEnv -> AppState -> IO ()
run env st =
-- Setup state, reader, OpenGL / GLFW and enter loop
flip runReaderT env . flip evalStateT st $ do
resize
setVSync
experimentLoop 2
where -- Initialize / shutdown / switch experiment
experimentLoop :: Int -> StateT AppState (ReaderT AppEnv IO) ()
experimentLoop expIdx = do
-- We use the either to break out of the current experiment, and either exit
-- or switch to a different one
r <- runEitherT $ do
-- Because of the existential type we have to call withExperiment from
-- inside the lambda where we pattern match it out of the AnyWithExperiment.
-- Also use monad-control to bring our stack across the IO of withExperiment
curExp <- (!! expIdx) <$> view aeExperiments
control $ \runMonad ->
(\(AnyWithExperiment withExperiment') ->
liftIO $ withExperiment' (runMonad . withExperimentInner expIdx)
) curExp
numExp <- length <$> view aeExperiments
-- Exit or keep running with a different experiment?
case r of
Left ExpNext -> experimentLoop $ wrapExpIdx (expIdx + 1) numExp
Left ExpPrev -> experimentLoop $ wrapExpIdx (expIdx - 1) numExp
Left ExpExit -> return ()
Right () -> return ()
where wrapExpIdx idx numExp | idx < 0 = numExp - 1
| idx >= numExp = 0
| otherwise = idx
-- Experiment setup complete, store state and enter main loop
withExperimentInner :: Experiment e => Int -> e -> AppIO ()
withExperimentInner expIdx expState = do
let name = experimentName expState
liftIO . traceS TLInfo $ "Switching to experiment: " ++ name
numExp <- length <$> view aeExperiments
asExperimentDesc .= printf "%i/%i: %s" (expIdx + 1) numExp name
asExperiment .= AnyExperiment expState
mainLoop
-- Main loop
mainLoop :: AppIO ()
mainLoop = do
window <- view aeWindow
asCurTick <~ liftIO getTick
tqGLFW <- view aeGLFWEventsQueue
processAllEvents tqGLFW processGLFWEvent
-- GLFW / OpenGL
draw
liftIO $ {-# SCC swapAndPoll #-} do
-- GL.flush
-- GL.finish
GLFW.swapBuffers window
GLFW.pollEvents
traceOnGLError $ Just "main loop"
-- Drop the first three frame deltas, they are often outliers
use asFrameIdx >>= \idx -> when (idx < 3) (asFrameTimes %= BS.clear)
asFrameIdx += 1
-- Done?
flip unless mainLoop =<< liftIO (GLFW.windowShouldClose window)
|
blitzcode/rust-exp
|
hs-src/App.hs
|
mit
| 9,287
| 0
| 25
| 3,461
| 2,201
| 1,088
| 1,113
| -1
| -1
|
module Main(main) where
import Control.Exception
import Control.Monad.Identity
import Data.Char
import Prelude
import System.Directory
import System.FilePath as FP
import System.IO
import qualified Data.ByteString.Lazy as LBS
import DataConstructors
import E.Main
import E.Program
import E.Rules
import E.Type
import FrontEnd.Class
import Grin.Main(compileToGrin)
import Grin.Show(render)
import Ho.Build
import Ho.Collected
import Ho.Library
import Name.Name
import Options
import StringTable.Atom
import Support.TempDir
import Util.Gen
import Util.SetLike as S
import Version.Version(versionSimple)
import qualified FlagDump as FD
import qualified Interactive
main = wrapMain $ do
hSetEncoding stdout utf8
hSetEncoding stderr utf8
o <- processOptions
when (dump FD.Atom) $
addAtExit dumpStringTableStats
-- set temporary directory
maybeDo $ do x <- optWorkDir o; return $ setTempDir x
let darg = progressM $ do
(argstring,_) <- getArgString
return (argstring ++ "\n" ++ versionSimple)
case optMode o of
BuildHl hl -> darg >> buildLibrary processInitialHo processDecls hl
ListLibraries -> listLibraries
ShowHo ho -> dumpHoFile ho
PurgeCache -> purgeCache
Preprocess -> forM_ (optArgs o) $ \fn -> do
lbs <- LBS.readFile fn
res <- preprocessHs options fn lbs
LBS.putStr res
_ -> darg >> processFiles (optArgs o)
-- we are very careful to only delete cache files.
purgeCache = do
Just hc <- findHoCache
ds <- getDirectoryContents hc
let cacheFile fn = case map toLower (reverse fn) of
'o':'h':'.':fs -> length fs == 26 && all isAlphaNum fs
_ -> False
forM_ ds $ \fn -> when (cacheFile fn) (removeFile (hc </> fn))
processFiles :: [String] -> IO ()
processFiles cs = f cs (optMainFunc options) where
f [] Nothing = do
int <- Interactive.isInteractive
when (not int) $ putErrDie "jhc: no input files"
g [Left preludeModule]
f [] (Just (b,m)) = do
m <- getModule (parseName Val m)
g [Left m]
f cs _ = g (map fileOrModule cs)
g fs = processCollectedHo . snd =<< parseFiles options [outputName] []
fs processInitialHo processDecls
fileOrModule f = case reverse f of
('s':'h':'.':_) -> Right f
('s':'h':'l':'.':_) -> Right f
('c':'s':'h':'.':_) -> Right f
_ -> Left $ toModule f
processCollectedHo cho = do
if optStop options == CompileHo then return () else do
putProgressLn "Collected Compilation..."
when (dump FD.ClassSummary) $ do
putStrLn " ---- class summary ---- "
printClassSummary (choClassHierarchy cho)
when (dump FD.Class) $ do
putStrLn " ---- class hierarchy ---- "
printClassHierarchy (choClassHierarchy cho)
let dataTable = choDataTable cho
combinators = values $ choCombinators cho
evaluate dataTable
evaluate combinators
let prog = programUpdate program {
progCombinators = combinators,
progDataTable = dataTable
}
-- dump final version of various requested things
wdump FD.Datatable $ putErrLn (render $ showDataTable dataTable)
wdump FD.DatatableBuiltin $
putErrLn (render $ showDataTable samplePrimitiveDataTable)
dumpRules (Rules $ fromList
[(combIdent x,combRules x) | x <- combinators, not $ null (combRules x)])
-- enter interactive mode
int <- Interactive.isInteractive
if int then Interactive.interact cho else do
prog <- compileWholeProgram prog
compileToGrin prog
progressM c = wdump FD.Progress $ (c >>= putErrLn) >> hFlush stderr
|
dec9ue/jhc_copygc
|
src/Main.hs
|
gpl-2.0
| 3,749
| 6
| 19
| 967
| 1,219
| 596
| 623
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-| Implementation of the Ganeti logging functionality.
This currently lacks the following (FIXME):
- log file reopening
Note that this requires the hslogger library version 1.1 and above.
-}
{-
Copyright (C) 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Logging
( setupLogging
, MonadLog(..)
, Priority(..)
, logDebug
, logInfo
, logNotice
, logWarning
, logError
, logCritical
, logAlert
, logEmergency
, SyslogUsage(..)
, syslogUsageToRaw
, syslogUsageFromRaw
, withErrorLogAt
) where
import Control.Monad
import Control.Monad.Error (Error(..), MonadError(..), catchError)
import Control.Monad.Reader
import System.Log.Logger
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Handler (setFormatter, LogHandler)
import System.Log.Formatter
import System.IO
import Ganeti.BasicTypes (ResultT(..))
import Ganeti.THH
import qualified Ganeti.ConstantUtils as ConstantUtils
-- | Syslog usage type.
$(declareLADT ''String "SyslogUsage"
[ ("SyslogNo", "no")
, ("SyslogYes", "yes")
, ("SyslogOnly", "only")
])
-- | Builds the log formatter.
logFormatter :: String -- ^ Program
-> Bool -- ^ Multithreaded
-> Bool -- ^ Syslog
-> LogFormatter a
logFormatter prog mt syslog =
let parts = [ if syslog
then "[$pid]:"
else "$time: " ++ prog ++ " pid=$pid"
, if mt then if syslog then " ($tid)" else "/$tid"
else ""
, " $prio $msg"
]
in tfLogFormatter "%F %X,%q %Z" $ concat parts
-- | Helper to open and set the formatter on a log if enabled by a
-- given condition, otherwise returning an empty list.
openFormattedHandler :: (LogHandler a) => Bool
-> LogFormatter a -> IO a -> IO [a]
openFormattedHandler False _ _ = return []
openFormattedHandler True fmt opener = do
handler <- opener
return [setFormatter handler fmt]
-- | Sets up the logging configuration.
setupLogging :: Maybe String -- ^ Log file
-> String -- ^ Program name
-> Bool -- ^ Debug level
-> Bool -- ^ Log to stderr
-> Bool -- ^ Log to console
-> SyslogUsage -- ^ Syslog usage
-> IO ()
setupLogging logf program debug stderr_logging console syslog = do
let level = if debug then DEBUG else INFO
destf = if console then Just ConstantUtils.devConsole else logf
fmt = logFormatter program False False
file_logging = syslog /= SyslogOnly
updateGlobalLogger rootLoggerName (setLevel level)
stderr_handlers <- openFormattedHandler stderr_logging fmt $
streamHandler stderr level
file_handlers <- case destf of
Nothing -> return []
Just path -> openFormattedHandler file_logging fmt $
fileHandler path level
let handlers = file_handlers ++ stderr_handlers
updateGlobalLogger rootLoggerName $ setHandlers handlers
-- syslog handler is special (another type, still instance of the
-- typeclass, and has a built-in formatter), so we can't pass it in
-- the above list
when (syslog /= SyslogNo) $ do
syslog_handler <- openlog program [PID] DAEMON INFO
updateGlobalLogger rootLoggerName $ addHandler syslog_handler
-- * Logging function aliases
-- | A monad that allows logging.
class Monad m => MonadLog m where
-- | Log at a given level.
logAt :: Priority -> String -> m ()
instance MonadLog IO where
logAt = logM rootLoggerName
instance (MonadLog m) => MonadLog (ReaderT r m) where
logAt p = lift . logAt p
instance (MonadLog m, Error e) => MonadLog (ResultT e m) where
logAt p = lift . logAt p
-- | Log at debug level.
logDebug :: (MonadLog m) => String -> m ()
logDebug = logAt DEBUG
-- | Log at info level.
logInfo :: (MonadLog m) => String -> m ()
logInfo = logAt INFO
-- | Log at notice level.
logNotice :: (MonadLog m) => String -> m ()
logNotice = logAt NOTICE
-- | Log at warning level.
logWarning :: (MonadLog m) => String -> m ()
logWarning = logAt WARNING
-- | Log at error level.
logError :: (MonadLog m) => String -> m ()
logError = logAt ERROR
-- | Log at critical level.
logCritical :: (MonadLog m) => String -> m ()
logCritical = logAt CRITICAL
-- | Log at alert level.
logAlert :: (MonadLog m) => String -> m ()
logAlert = logAt ALERT
-- | Log at emergency level.
logEmergency :: (MonadLog m) => String -> m ()
logEmergency = logAt EMERGENCY
-- * Logging in an error monad with rethrowing errors
-- | If an error occurs within a given computation, it annotated
-- with a given message and logged and the error is re-thrown.
withErrorLogAt :: (MonadLog m, MonadError e m, Show e)
=> Priority -> String -> m a -> m a
withErrorLogAt prio msg = flip catchError $ \e -> do
logAt prio (msg ++ ": " ++ show e)
throwError e
|
badp/ganeti
|
src/Ganeti/Logging.hs
|
gpl-2.0
| 5,648
| 0
| 12
| 1,373
| 1,178
| 633
| 545
| 104
| 4
|
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.Random.Spectrum.Purple
-- Copyright : (c) Matthew Donadio 2003
-- License : GPL
--
-- Maintainer : m.p.donadio@ieee.org
-- Stability : experimental
-- Portability : portable
--
-- Function for purple noise, which is differentiated white noise
--
-- This currently just does a simple first-order difference. This is
-- equivalent to filtering the white noise with @ h[n] = [1,-1] @
-- A better solution would be to use a proper FIR differentiator.
--
-----------------------------------------------------------------------------
module Numeric.Random.Spectrum.Purple (purple) where
purple :: [Double] -- ^ noise
-> [Double] -- ^ purple noise
purple xs = zipWith (-) xs (0:xs)
|
tolysz/dsp
|
Numeric/Random/Spectrum/Purple.hs
|
gpl-2.0
| 809
| 0
| 7
| 135
| 71
| 50
| 21
| 4
| 1
|
{-# LANGUAGE DeriveFunctor #-}
// A perceptive reader might ask the question:
// If the derivation of the Functor instance for algebraic data types is so mechanical,
// cant it be automated and performed by the compiler?
// Indeed, it can, and it is. You need to enable a particular Haskell extension by including
// this line at the top of your source file: {-# LANGUAGE DeriveFunctor #-}
//
// and then add deriving Functor to your data structure:
data Maybe a = Nothing | Just a
deriving Functor
// the corresponding fmap will be implemented for you.
|
sujeet4github/MyLangUtils
|
CategoryTheory_BartoszMilewsky/PI_08_Functoriality/DerivingFunctors.hs
|
gpl-3.0
| 560
| 24
| 6
| 103
| 192
| 99
| 93
| 3
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.KnowledgeGraphSearch.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.KnowledgeGraphSearch.Types.Product where
import Network.Google.KnowledgeGraphSearch.Types.Sum
import Network.Google.Prelude
-- | Response message includes the context and a list of matching results
-- which contain the detail of associated entities.
--
-- /See:/ 'searchResponse' smart constructor.
data SearchResponse =
SearchResponse'
{ _srContext :: !(Maybe JSONValue)
, _srItemListElement :: !(Maybe [JSONValue])
, _srType :: !(Maybe JSONValue)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SearchResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srContext'
--
-- * 'srItemListElement'
--
-- * 'srType'
searchResponse
:: SearchResponse
searchResponse =
SearchResponse'
{_srContext = Nothing, _srItemListElement = Nothing, _srType = Nothing}
-- | The local context applicable for the response. See more details at
-- http:\/\/www.w3.org\/TR\/json-ld\/#context-definitions.
srContext :: Lens' SearchResponse (Maybe JSONValue)
srContext
= lens _srContext (\ s a -> s{_srContext = a})
-- | The item list of search results.
srItemListElement :: Lens' SearchResponse [JSONValue]
srItemListElement
= lens _srItemListElement
(\ s a -> s{_srItemListElement = a})
. _Default
. _Coerce
-- | The schema type of top-level JSON-LD object, e.g. ItemList.
srType :: Lens' SearchResponse (Maybe JSONValue)
srType = lens _srType (\ s a -> s{_srType = a})
instance FromJSON SearchResponse where
parseJSON
= withObject "SearchResponse"
(\ o ->
SearchResponse' <$>
(o .:? "@context") <*>
(o .:? "itemListElement" .!= mempty)
<*> (o .:? "@type"))
instance ToJSON SearchResponse where
toJSON SearchResponse'{..}
= object
(catMaybes
[("@context" .=) <$> _srContext,
("itemListElement" .=) <$> _srItemListElement,
("@type" .=) <$> _srType])
|
brendanhay/gogol
|
gogol-kgsearch/gen/Network/Google/KnowledgeGraphSearch/Types/Product.hs
|
mpl-2.0
| 2,692
| 0
| 13
| 603
| 423
| 253
| 170
| 53
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidPublisher.Purchases.Subscriptions.Acknowledge
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Acknowledges a subscription purchase.
--
-- /See:/ <https://developers.google.com/android-publisher Google Play Android Developer API Reference> for @androidpublisher.purchases.subscriptions.acknowledge@.
module Network.Google.Resource.AndroidPublisher.Purchases.Subscriptions.Acknowledge
(
-- * REST Resource
PurchasesSubscriptionsAcknowledgeResource
-- * Creating a Request
, purchasesSubscriptionsAcknowledge
, PurchasesSubscriptionsAcknowledge
-- * Request Lenses
, psaXgafv
, psaUploadProtocol
, psaPackageName
, psaAccessToken
, psaToken
, psaUploadType
, psaPayload
, psaSubscriptionId
, psaCallback
) where
import Network.Google.AndroidPublisher.Types
import Network.Google.Prelude
-- | A resource alias for @androidpublisher.purchases.subscriptions.acknowledge@ method which the
-- 'PurchasesSubscriptionsAcknowledge' request conforms to.
type PurchasesSubscriptionsAcknowledgeResource =
"androidpublisher" :>
"v3" :>
"applications" :>
Capture "packageName" Text :>
"purchases" :>
"subscriptions" :>
Capture "subscriptionId" Text :>
"tokens" :>
CaptureMode "token" "acknowledge" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
SubscriptionPurchasesAcknowledgeRequest
:> Post '[JSON] ()
-- | Acknowledges a subscription purchase.
--
-- /See:/ 'purchasesSubscriptionsAcknowledge' smart constructor.
data PurchasesSubscriptionsAcknowledge =
PurchasesSubscriptionsAcknowledge'
{ _psaXgafv :: !(Maybe Xgafv)
, _psaUploadProtocol :: !(Maybe Text)
, _psaPackageName :: !Text
, _psaAccessToken :: !(Maybe Text)
, _psaToken :: !Text
, _psaUploadType :: !(Maybe Text)
, _psaPayload :: !SubscriptionPurchasesAcknowledgeRequest
, _psaSubscriptionId :: !Text
, _psaCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PurchasesSubscriptionsAcknowledge' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'psaXgafv'
--
-- * 'psaUploadProtocol'
--
-- * 'psaPackageName'
--
-- * 'psaAccessToken'
--
-- * 'psaToken'
--
-- * 'psaUploadType'
--
-- * 'psaPayload'
--
-- * 'psaSubscriptionId'
--
-- * 'psaCallback'
purchasesSubscriptionsAcknowledge
:: Text -- ^ 'psaPackageName'
-> Text -- ^ 'psaToken'
-> SubscriptionPurchasesAcknowledgeRequest -- ^ 'psaPayload'
-> Text -- ^ 'psaSubscriptionId'
-> PurchasesSubscriptionsAcknowledge
purchasesSubscriptionsAcknowledge pPsaPackageName_ pPsaToken_ pPsaPayload_ pPsaSubscriptionId_ =
PurchasesSubscriptionsAcknowledge'
{ _psaXgafv = Nothing
, _psaUploadProtocol = Nothing
, _psaPackageName = pPsaPackageName_
, _psaAccessToken = Nothing
, _psaToken = pPsaToken_
, _psaUploadType = Nothing
, _psaPayload = pPsaPayload_
, _psaSubscriptionId = pPsaSubscriptionId_
, _psaCallback = Nothing
}
-- | V1 error format.
psaXgafv :: Lens' PurchasesSubscriptionsAcknowledge (Maybe Xgafv)
psaXgafv = lens _psaXgafv (\ s a -> s{_psaXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
psaUploadProtocol :: Lens' PurchasesSubscriptionsAcknowledge (Maybe Text)
psaUploadProtocol
= lens _psaUploadProtocol
(\ s a -> s{_psaUploadProtocol = a})
-- | The package name of the application for which this subscription was
-- purchased (for example, \'com.some.thing\').
psaPackageName :: Lens' PurchasesSubscriptionsAcknowledge Text
psaPackageName
= lens _psaPackageName
(\ s a -> s{_psaPackageName = a})
-- | OAuth access token.
psaAccessToken :: Lens' PurchasesSubscriptionsAcknowledge (Maybe Text)
psaAccessToken
= lens _psaAccessToken
(\ s a -> s{_psaAccessToken = a})
-- | The token provided to the user\'s device when the subscription was
-- purchased.
psaToken :: Lens' PurchasesSubscriptionsAcknowledge Text
psaToken = lens _psaToken (\ s a -> s{_psaToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
psaUploadType :: Lens' PurchasesSubscriptionsAcknowledge (Maybe Text)
psaUploadType
= lens _psaUploadType
(\ s a -> s{_psaUploadType = a})
-- | Multipart request metadata.
psaPayload :: Lens' PurchasesSubscriptionsAcknowledge SubscriptionPurchasesAcknowledgeRequest
psaPayload
= lens _psaPayload (\ s a -> s{_psaPayload = a})
-- | The purchased subscription ID (for example, \'monthly001\').
psaSubscriptionId :: Lens' PurchasesSubscriptionsAcknowledge Text
psaSubscriptionId
= lens _psaSubscriptionId
(\ s a -> s{_psaSubscriptionId = a})
-- | JSONP
psaCallback :: Lens' PurchasesSubscriptionsAcknowledge (Maybe Text)
psaCallback
= lens _psaCallback (\ s a -> s{_psaCallback = a})
instance GoogleRequest
PurchasesSubscriptionsAcknowledge
where
type Rs PurchasesSubscriptionsAcknowledge = ()
type Scopes PurchasesSubscriptionsAcknowledge =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient PurchasesSubscriptionsAcknowledge'{..}
= go _psaPackageName _psaSubscriptionId _psaToken
_psaXgafv
_psaUploadProtocol
_psaAccessToken
_psaUploadType
_psaCallback
(Just AltJSON)
_psaPayload
androidPublisherService
where go
= buildClient
(Proxy ::
Proxy PurchasesSubscriptionsAcknowledgeResource)
mempty
|
brendanhay/gogol
|
gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Purchases/Subscriptions/Acknowledge.hs
|
mpl-2.0
| 6,886
| 0
| 23
| 1,649
| 953
| 553
| 400
| 144
| 1
|
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : Swish.RDF.Vocabulary.SIOC
-- Copyright : (c) 2011 Douglas Burke
-- License : GPL V2
--
-- Maintainer : Douglas Burke
-- Stability : experimental
-- Portability : OverloadedStrings
--
-- This module defines some commonly used vocabulary terms from the SIOC
-- project (<http://sioc-project.org/>).
--
--------------------------------------------------------------------------------
module Swish.RDF.Vocabulary.SIOC
(
-- | The version used for this module is Revison 1.35 of the
-- \"SIOC Core Ontology Specification\", dated 25 March 2010,
-- <http://rdfs.org/sioc/spec/>.
namespaceSIOC
-- * Classes
, siocCommunity
, siocContainer
, siocForum
, siocItem
, siocPost
, siocRole
, siocSite
, siocSpace
, siocThread
, siocUserAccount
, siocUsergroup
-- * Properties
, siocabout
, siocaccount_of
, siocaddressed_to
, siocadministrator_of
, siocattachment
, siocavatar
, sioccontainer_of
, sioccontent
, sioccreator_of
, siocearlier_version
, siocemail
, siocemail_sha1
, siocembeds_knowledge
, siocfeed
, siocfollows
, siocfunction_of
, siochas_administrator
, siochas_container
, siochas_creator
, siochas_discussion
, siochas_function
, siochas_host
, siochas_member
, siochas_moderator
, siochas_modifier
, siochas_owner
, siochas_parent
, siochas_reply
, siochas_scope
, siochas_space
, siochas_subscriber
, siochas_usergroup
, siochost_of
, siocid
, siocip_address
, sioclast_activity_date
, sioclast_item_date
, sioclast_reply_date
, sioclater_version
, sioclatest_version
, sioclink
, sioclinks_to
, siocmember_of
, siocmoderator_of
, siocmodifier_of
, siocname
, siocnext_by_date
, siocnext_version
, siocnote
, siocnum_authors
, siocnum_items
, siocnum_replies
, siocnum_threads
, siocnum_views
, siocowner_of
, siocparent_of
, siocprevious_by_date
, siocprevious_version
, siocrelated_to
, siocreply_of
, siocscope_of
, siocsibling
, siocspace_of
, siocsubscriber_of
, sioctopic
, siocusergroup_of
)
where
import Swish.Namespace (Namespace, makeNamespace, ScopedName, makeNSScopedName)
import Swish.QName (LName)
import Data.Maybe (fromMaybe)
import Network.URI (URI, parseURI)
------------------------------------------------------------
-- Namespace
------------------------------------------------------------
siocURI :: URI
siocURI = fromMaybe (error "Internal error processing SIOC URI") $ parseURI "http://rdfs.org/sioc/ns#"
-- | Maps @sioc@ to <http://rdfs.org/sioc/ns#>.
namespaceSIOC :: Namespace
namespaceSIOC = makeNamespace (Just "sioc") siocURI
------------------------------------------------------------
-- Terms
------------------------------------------------------------
toS :: LName -> ScopedName
toS = makeNSScopedName namespaceSIOC
-- Classes
-- | @sioc:Community@ from <http://rdfs.org/sioc/spec/#term_Community>.
siocCommunity :: ScopedName
siocCommunity = toS "Community"
-- | @sioc:Container@ from <http://rdfs.org/sioc/spec/#term_Container>.
siocContainer :: ScopedName
siocContainer = toS "Container"
-- | @sioc:Forum@ from <http://rdfs.org/sioc/spec/#term_Forum>.
siocForum :: ScopedName
siocForum = toS "Forum"
-- | @sioc:Item@ from <http://rdfs.org/sioc/spec/#term_Item>.
siocItem :: ScopedName
siocItem = toS "Item"
-- | @sioc:Post@ from <http://rdfs.org/sioc/spec/#term_Post>.
siocPost :: ScopedName
siocPost = toS "Post"
-- | @sioc:Role@ from <http://rdfs.org/sioc/spec/#term_Role>.
siocRole :: ScopedName
siocRole = toS "Role"
-- | @sioc:Site@ from <http://rdfs.org/sioc/spec/#term_Site>.
siocSite :: ScopedName
siocSite = toS "Site"
-- | @sioc:Space@ from <http://rdfs.org/sioc/spec/#term_Space>.
siocSpace :: ScopedName
siocSpace = toS "Space"
-- | @sioc:Thread@ from <http://rdfs.org/sioc/spec/#term_Thread>.
siocThread :: ScopedName
siocThread = toS "Thread"
-- | @sioc:UserAccount@ from <http://rdfs.org/sioc/spec/#term_UserAccount>.
siocUserAccount :: ScopedName
siocUserAccount = toS "UserAccount"
-- | @sioc:Usergroup@ from <http://rdfs.org/sioc/spec/#term_Usergroup>.
siocUsergroup :: ScopedName
siocUsergroup = toS "Usergroup"
-- Properties
-- | @sioc:about@ from <http://rdfs.org/sioc/spec/#term_about>.
siocabout :: ScopedName
siocabout = toS "about"
-- | @sioc:account_of@ from <http://rdfs.org/sioc/spec/#term_account_of>.
siocaccount_of :: ScopedName
siocaccount_of = toS "account_of"
-- | @sioc:addressed_to@ from <http://rdfs.org/sioc/spec/#term_addressed_to>.
siocaddressed_to :: ScopedName
siocaddressed_to = toS "addressed_to"
-- | @sioc:administrator_of@ from <http://rdfs.org/sioc/spec/#term_administrator_of>.
siocadministrator_of :: ScopedName
siocadministrator_of = toS "administrator_of"
-- | @sioc:attachment@ from <http://rdfs.org/sioc/spec/#term_attachment>.
siocattachment :: ScopedName
siocattachment = toS "attachment"
-- | @sioc:avatar@ from <http://rdfs.org/sioc/spec/#term_avatar>.
siocavatar :: ScopedName
siocavatar = toS "avatar"
-- | @sioc:container_of@ from <http://rdfs.org/sioc/spec/#term_container_of>.
sioccontainer_of :: ScopedName
sioccontainer_of = toS "container_of"
-- | @sioc:content@ from <http://rdfs.org/sioc/spec/#term_content>.
sioccontent :: ScopedName
sioccontent = toS "content"
-- | @sioc:creator_of@ from <http://rdfs.org/sioc/spec/#term_creator_of>.
sioccreator_of :: ScopedName
sioccreator_of = toS "creator_of"
-- | @sioc:earlier_version@ from <http://rdfs.org/sioc/spec/#term_earlier_version>.
siocearlier_version :: ScopedName
siocearlier_version = toS "earlier_version"
-- | @sioc:email@ from <http://rdfs.org/sioc/spec/#term_email>.
siocemail :: ScopedName
siocemail = toS "email"
-- | @sioc:email_sha1@ from <http://rdfs.org/sioc/spec/#term_email_sha1>.
siocemail_sha1 :: ScopedName
siocemail_sha1 = toS "email_sha1"
-- | @sioc:embeds_knowledge@ from <http://rdfs.org/sioc/spec/#term_embeds_knowledge>.
siocembeds_knowledge :: ScopedName
siocembeds_knowledge = toS "embeds_knowledge"
-- | @sioc:feed@ from <http://rdfs.org/sioc/spec/#term_feed>.
siocfeed :: ScopedName
siocfeed = toS "feed"
-- | @sioc:follows@ from <http://rdfs.org/sioc/spec/#term_follows>.
siocfollows :: ScopedName
siocfollows = toS "follows"
-- | @sioc:function_of@ from <http://rdfs.org/sioc/spec/#term_function_of>.
siocfunction_of :: ScopedName
siocfunction_of = toS "function_of"
-- | @sioc:has_administrator@ from <http://rdfs.org/sioc/spec/#term_has_administrator>.
siochas_administrator :: ScopedName
siochas_administrator = toS "has_administrator"
-- | @sioc:has_container@ from <http://rdfs.org/sioc/spec/#term_has_container>.
siochas_container :: ScopedName
siochas_container = toS "has_container"
-- | @sioc:has_creator@ from <http://rdfs.org/sioc/spec/#term_has_creator>.
siochas_creator :: ScopedName
siochas_creator = toS "has_creator"
-- | @sioc:has_discussion@ from <http://rdfs.org/sioc/spec/#term_has_discussion>.
siochas_discussion :: ScopedName
siochas_discussion = toS "has_discussion"
-- | @sioc:has_function@ from <http://rdfs.org/sioc/spec/#term_has_function>.
siochas_function :: ScopedName
siochas_function = toS "has_function"
-- | @sioc:has_host@ from <http://rdfs.org/sioc/spec/#term_has_host>.
siochas_host :: ScopedName
siochas_host = toS "has_host"
-- | @sioc:has_member@ from <http://rdfs.org/sioc/spec/#term_has_member>.
siochas_member :: ScopedName
siochas_member = toS "has_member"
-- | @sioc:has_moderator@ from <http://rdfs.org/sioc/spec/#term_has_moderator>.
siochas_moderator :: ScopedName
siochas_moderator = toS "has_moderator"
-- | @sioc:has_modifier@ from <http://rdfs.org/sioc/spec/#term_has_modifier>.
siochas_modifier :: ScopedName
siochas_modifier = toS "has_modifier"
-- | @sioc:has_owner@ from <http://rdfs.org/sioc/spec/#term_has_owner>.
siochas_owner :: ScopedName
siochas_owner = toS "has_owner"
-- | @sioc:has_parent@ from <http://rdfs.org/sioc/spec/#term_has_parent>.
siochas_parent :: ScopedName
siochas_parent = toS "has_parent"
-- | @sioc:has_reply@ from <http://rdfs.org/sioc/spec/#term_has_reply>.
siochas_reply :: ScopedName
siochas_reply = toS "has_reply"
-- | @sioc:has_scope@ from <http://rdfs.org/sioc/spec/#term_has_scope>.
siochas_scope :: ScopedName
siochas_scope = toS "has_scope"
-- | @sioc:has_space@ from <http://rdfs.org/sioc/spec/#term_has_space>.
siochas_space :: ScopedName
siochas_space = toS "has_space"
-- | @sioc:has_subscriber@ from <http://rdfs.org/sioc/spec/#term_has_subscriber>.
siochas_subscriber :: ScopedName
siochas_subscriber = toS "has_subscriber"
-- | @sioc:has_usergroup@ from <http://rdfs.org/sioc/spec/#term_has_usergroup>.
siochas_usergroup :: ScopedName
siochas_usergroup = toS "has_usergroup"
-- | @sioc:host_of@ from <http://rdfs.org/sioc/spec/#term_host_of>.
siochost_of :: ScopedName
siochost_of = toS "host_of"
-- | @sioc:id@ from <http://rdfs.org/sioc/spec/#term_id>.
siocid :: ScopedName
siocid = toS "id"
-- | @sioc:ip_address@ from <http://rdfs.org/sioc/spec/#term_ip_address>.
siocip_address :: ScopedName
siocip_address = toS "ip_address"
-- | @sioc:last_activity_date@ from <http://rdfs.org/sioc/spec/#term_last_activity_date>.
sioclast_activity_date :: ScopedName
sioclast_activity_date = toS "last_activity_date"
-- | @sioc:last_item_date@ from <http://rdfs.org/sioc/spec/#term_last_item_date>.
sioclast_item_date :: ScopedName
sioclast_item_date = toS "last_item_date"
-- | @sioc:last_reply_date@ from <http://rdfs.org/sioc/spec/#term_last_reply_date>.
sioclast_reply_date :: ScopedName
sioclast_reply_date = toS "last_reply_date"
-- | @sioc:later_version@ from <http://rdfs.org/sioc/spec/#term_later_version>.
sioclater_version :: ScopedName
sioclater_version = toS "later_version"
-- | @sioc:latest_version@ from <http://rdfs.org/sioc/spec/#term_latest_version>.
sioclatest_version :: ScopedName
sioclatest_version = toS "latest_version"
-- | @sioc:link@ from <http://rdfs.org/sioc/spec/#term_link>.
sioclink :: ScopedName
sioclink = toS "link"
-- | @sioc:links_to@ from <http://rdfs.org/sioc/spec/#term_links_to>.
sioclinks_to :: ScopedName
sioclinks_to = toS "links_to"
-- | @sioc:member_of@ from <http://rdfs.org/sioc/spec/#term_member_of>.
siocmember_of :: ScopedName
siocmember_of = toS "member_of"
-- | @sioc:moderator_of@ from <http://rdfs.org/sioc/spec/#term_moderator_of>.
siocmoderator_of :: ScopedName
siocmoderator_of = toS "moderator_of"
-- | @sioc:modifier_of@ from <http://rdfs.org/sioc/spec/#term_modifier_of>.
siocmodifier_of :: ScopedName
siocmodifier_of = toS "modifier_of"
-- | @sioc:name@ from <http://rdfs.org/sioc/spec/#term_name>.
siocname :: ScopedName
siocname = toS "name"
-- | @sioc:next_by_date@ from <http://rdfs.org/sioc/spec/#term_next_by_date>.
siocnext_by_date :: ScopedName
siocnext_by_date = toS "next_by_date"
-- | @sioc:next_version@ from <http://rdfs.org/sioc/spec/#term_next_version>.
siocnext_version :: ScopedName
siocnext_version = toS "next_version"
-- | @sioc:note@ from <http://rdfs.org/sioc/spec/#term_note>.
siocnote :: ScopedName
siocnote = toS "note"
-- | @sioc:num_authors@ from <http://rdfs.org/sioc/spec/#term_num_authors>.
siocnum_authors :: ScopedName
siocnum_authors = toS "num_authors"
-- | @sioc:num_items@ from <http://rdfs.org/sioc/spec/#term_num_items>.
siocnum_items :: ScopedName
siocnum_items = toS "num_items"
-- | @sioc:num_replies@ from <http://rdfs.org/sioc/spec/#term_num_replies>.
siocnum_replies :: ScopedName
siocnum_replies = toS "num_replies"
-- | @sioc:num_threads@ from <http://rdfs.org/sioc/spec/#term_num_threads>.
siocnum_threads :: ScopedName
siocnum_threads = toS "num_threads"
-- | @sioc:num_views@ from <http://rdfs.org/sioc/spec/#term_num_views>.
siocnum_views :: ScopedName
siocnum_views = toS "num_views"
-- | @sioc:owner_of@ from <http://rdfs.org/sioc/spec/#term_owner_of>.
siocowner_of :: ScopedName
siocowner_of = toS "owner_of"
-- | @sioc:parent_of@ from <http://rdfs.org/sioc/spec/#term_parent_of>.
siocparent_of :: ScopedName
siocparent_of = toS "parent_of"
-- | @sioc:previous_by_date@ from <http://rdfs.org/sioc/spec/#term_previous_by_date>.
siocprevious_by_date :: ScopedName
siocprevious_by_date = toS "previous_by_date"
-- | @sioc:previous_version@ from <http://rdfs.org/sioc/spec/#term_previous_version>.
siocprevious_version :: ScopedName
siocprevious_version = toS "previous_version"
-- | @sioc:related_to@ from <http://rdfs.org/sioc/spec/#term_related_to>.
siocrelated_to :: ScopedName
siocrelated_to = toS "related_to"
-- | @sioc:reply_of@ from <http://rdfs.org/sioc/spec/#term_reply_of>.
siocreply_of :: ScopedName
siocreply_of = toS "reply_of"
-- | @sioc:scope_of@ from <http://rdfs.org/sioc/spec/#term_scope_of>.
siocscope_of :: ScopedName
siocscope_of = toS "scope_of"
-- | @sioc:sibling@ from <http://rdfs.org/sioc/spec/#term_sibling>.
siocsibling :: ScopedName
siocsibling = toS "sibling"
-- | @sioc:space_of@ from <http://rdfs.org/sioc/spec/#term_space_of>.
siocspace_of :: ScopedName
siocspace_of = toS "space_of"
-- | @sioc:subscriber_of@ from <http://rdfs.org/sioc/spec/#term_subscriber_of>.
siocsubscriber_of :: ScopedName
siocsubscriber_of = toS "subscriber_of"
-- | @sioc:topic@ from <http://rdfs.org/sioc/spec/#term_topic>.
sioctopic :: ScopedName
sioctopic = toS "topic"
-- | @sioc:usergroup_of@ from <http://rdfs.org/sioc/spec/#term_usergroup_of>.
siocusergroup_of :: ScopedName
siocusergroup_of = toS "usergroup_of"
--------------------------------------------------------------------------------
--
-- Copyright (c) 2011 Douglas Burke
-- All rights reserved.
--
-- This file is part of Swish.
--
-- Swish is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Swish is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Swish; if not, write to:
-- The Free Software Foundation, Inc.,
-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
--------------------------------------------------------------------------------
|
DougBurke/swish
|
src/Swish/RDF/Vocabulary/SIOC.hs
|
lgpl-2.1
| 15,092
| 0
| 8
| 2,137
| 1,492
| 898
| 594
| 245
| 1
|
module TokenTest where
-- Test new style token manager
bob a b = x
where x = 3
bib a b = x
where
x = 3
bab a b =
let bar = 3
in b + bar -- ^trailing comment
-- leading comment
foo x y =
do c <- getChar
return c
|
alanz/haskell-token-utils
|
test/testdata/TokenTest.hs
|
unlicense
| 246
| 0
| 8
| 90
| 89
| 46
| 43
| 11
| 1
|
{- |
Module : Bio.Motions.Callback.StandardScore
Description : Contains the definition of the standard score function.
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Bio.Motions.Callback.StandardScore(StandardScore) where
import Bio.Motions.Types
import Bio.Motions.Common
import Bio.Motions.Callback.Class
import Bio.Motions.Callback.Serialisation
import Bio.Motions.Representation.Class
import Control.Lens
import Data.List
import Data.MonoTraversable
import Data.Foldable
import Linear
import Data.Profunctor.Unsafe
import Control.DeepSeq
{- |
Represents the standard score function, i.e. the sum over all contacts of the binding energy
between the contacting atoms. Contacts are defined as pairs (binder, bead) with unit l_1
distance.
-}
newtype StandardScore = StandardScore Int
deriving (Eq, Ord, Num, Integral, Enum, Real, CallbackSerialisable, NFData)
instance Show StandardScore where
show (StandardScore i) = show i
instance Monoid StandardScore where
mempty = 0
{-# INLINE mempty #-}
mappend = (+)
{-# INLINE mappend #-}
instance Callback 'Pre StandardScore where
callbackName _ = "Standard Score"
runCallback repr = do
numChains <- getNumberOfChains repr
fold <$> traverse (chainScore repr) [0..numChains - 1]
updateCallback repr prev (MoveFromTo moveFrom moveTo) = do
Just fromAtom <- getAtomAt moveFrom repr
atFrom <- energyToMany repr fromAtom . neighbours $ fromAtom ^. position
atTo <- energyToMany repr fromAtom . delete moveFrom $ neighbours moveTo
pure $ prev - atFrom + atTo
{-# INLINEABLE updateCallback #-}
-- |Returns the score between an object and the atom placed on the specified position.
energyTo :: (Functor m, CallbackRepresentation m repr, HaveEnergyBetween obj Atom) =>
repr -> obj -> Vec3 -> m StandardScore
energyTo repr obj pos = StandardScore #. energyBetween obj <$> getAtomAt pos repr
{-# INLINE energyTo #-}
-- |Returns the total score between an object (e.g. an atom) and the atoms placed on the
-- specified positions.
energyToMany :: (Applicative m, CallbackRepresentation m repr,
HaveEnergyBetween obj Atom, Traversable t) =>
repr -> obj -> t Vec3 -> m StandardScore
energyToMany repr obj poss = fold <$> traverse (energyTo repr obj) poss
{-# INLINE energyToMany #-}
-- |Returns the neighbours of a given position
neighbours :: Vec3 -> [Vec3]
neighbours x = (x ^+^) <$> ([id, negated] <*> basis)
{-# INLINE neighbours #-}
-- |Returns the total score for beads belonging to a particular chain.
chainScore :: (Monad m, CallbackRepresentation m repr) => repr -> Int -> m StandardScore
chainScore repr idx = getChain repr idx $ ofoldlM combine mempty
where
combine acc beadInfo = mappend acc <$>
energyToMany repr (asAtom beadInfo) (neighbours $ beadInfo ^. position)
|
Motions/motions
|
src/Bio/Motions/Callback/StandardScore.hs
|
apache-2.0
| 3,151
| 0
| 12
| 557
| 650
| 346
| 304
| 55
| 1
|
-- Copyright 2013 Joseph Tel Abrahamson
--
-- Licensed under the Apache License, Version 2.0 (the "License"); you
-- may not use this file except in compliance with the License. You
-- may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-- implied. See the License for the specific language governing
-- permissions and limitations under the License.
-- |
-- Module : Network.Nanomsg.C.Version
-- Copyright : (c) Joseph Abrahamson 2013
-- License : Apache 2.0
--
-- Maintainer : me@jspha.com
-- Stability : experimental
-- Portability : non-portable
module Network.Nanomsg.C.Version (
-- * The dynamic Nanomsg version
version
) where
import Network.Nanomsg.C.Syms
-- Version
----------
-- | The current version of Nanomsg.
version :: (Int, Int, Int)
version = ( fromIntegral $ getSym "NN_VERSION_MAJOR"
, fromIntegral $ getSym "NN_VERSION_MINOR"
, fromIntegral $ getSym "NN_VERSION_PATCH"
)
|
tel/hs-nanomsg
|
src/Network/Nanomsg/C/Version.hs
|
apache-2.0
| 1,195
| 0
| 7
| 235
| 94
| 66
| 28
| 7
| 1
|
import Graphics.SpriteKit
sprite = spriteWithImageNamed "Chicken.png"
apple = spriteWithImageNamed "Apple.png"
brownColor = colorWithRGBA 0.8 0.8 0.2 1
move = (moveBy (Vector (-100) 0)){actionDuration = 4}
chickenSprite = sprite{ nodeName = Just "Chicken"
, nodePosition = Point 200 100
, nodeXScale = 0.5
, nodeYScale = 0.5
, spriteColor = brownColor
, spriteColorBlendFactor = 0.5
}
appleSprite = apple { nodePosition = Point 100 100
, nodeXScale = 0.5
, nodeYScale = 0.5 }
myScene = (sceneWithSize (Size 800 640))
{ sceneBackgroundColor = colorWithRGBA 0 0.9 0.1 1
, sceneChildren = [appleSprite, chickenSprite]
, sceneHandleEvent = Just eventHandler
, sceneUpdate = Just moveChicken
, sceneData = ""
}
eventHandler (KeyEvent { keyEventType = KeyDown
, keyEventCharacters = keys})
world
= Just keys
eventHandler (KeyEvent { keyEventType = KeyUp
, keyEventCharacters = keys})
world
= Just ""
eventHandler event world = Nothing
moveChicken scene time
= case sceneData scene of
"w" -> let move = moveBy (Vector 0 2)
in
scene { sceneActionDirectives
= [runAction (runActionOnChildWithName
move
"Chicken")] }
"s" -> let move = moveBy (Vector 0 (-2))
in
scene { sceneActionDirectives
= [runAction (runActionOnChildWithName
move
"Chicken")] }
"a" -> let move = moveBy (Vector (-2) 0)
in
scene { sceneActionDirectives
= [runAction (runActionOnChildWithName
move
"Chicken")] }
"d" -> let move = moveBy (Vector 2 0)
in
scene { sceneActionDirectives
= [runAction (runActionOnChildWithName
move
"Chicken")] }
keys -> scene
|
mchakravarty/lets-code
|
Part3/Game.hsproj/Game.hs
|
bsd-2-clause
| 2,483
| 0
| 16
| 1,196
| 526
| 282
| 244
| 52
| 5
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, BangPatterns #-}
module BuggyLazyEvaluation (
testBuggyLazyEvaluation
) where
import Process (safeBracket)
import Control.Concurrent.Async
import Control.DeepSeq
import Control.Exception.Safe (catch, catchAny, onException, finally, handleAny, bracket
, SomeException, throwIO, throw, Exception, MonadMask
, withException, displayException)
import Data.Char
import qualified Data.Text.Lazy.IO as LText
import qualified Data.Text.Lazy as LText
import System.IO (openFile, hClose, IOMode(..))
toUpperCase :: LText.Text -> LText.Text
toUpperCase t = LText.map toUpper t
-- bad-bracket-begin
readFileContent1 :: FilePath -> IO LText.Text
readFileContent1 fileName
= bracket
(openFile fileName ReadMode)
hClose
(\handle -> do content <- LText.hGetContents handle
return $ toUpperCase content)
-- bad-bracket-end
-- lazy-begin
printFileContent1 :: FilePath -> IO ()
printFileContent1 fileName = do
c <- readFileContent1 fileName
putStrLn $ LText.unpack c
-- lazy-end
-- safe-bracket-begin
readFileContent2 :: FilePath -> IO LText.Text
readFileContent2 fileName
= safeBracket
(openFile fileName ReadMode)
(\_ handle -> hClose handle)
(\handle -> do content <- LText.hGetContents handle
return $ toUpperCase content)
-- safe-bracket-end
printFileContent2 :: FilePath -> IO ()
printFileContent2 fileName = do
c <- readFileContent2 fileName
putStrLn $ LText.unpack c
fileName :: FilePath
fileName = "resources/file.txt"
testBuggyLazyEvaluation :: IO ()
testBuggyLazyEvaluation = do
putStrLn "File content using bad bracket: "
printFileContent1 fileName
putStrLn ""
putStrLn "File content using safe bracket: "
printFileContent2 fileName
|
massimo-zaniboni/threads-post
|
src/BuggyLazyEvaluation.hs
|
bsd-2-clause
| 1,908
| 0
| 12
| 397
| 445
| 237
| 208
| 47
| 1
|
module HSH.ShellState where
import HSH.MonitoredDirectory
import qualified Data.Map as Map
import Data.List.Split
import Data.Maybe
data ShellState = ShellState {
envVars :: Map.Map EnvVarName EnvVarValue,
pathDirs :: [MonitoredDirectory]
} deriving (Eq, Show)
{-
-- ENV var manipulation
-}
type EnvVarName = String
type EnvVarValue = String
-- | Set an environment variable. Takes a name, value, and existing state and returns a
-- modified state.
setEnv :: EnvVarName -> EnvVarValue -> ShellState -> ShellState
setEnv name val shellstate =
shellstate { envVars = Map.insert name val (envVars shellstate) }
-- | Get an environment variable. Takes a name and state and attempts to return the value
-- associated.
getEnv :: EnvVarName -> ShellState -> Maybe EnvVarValue
getEnv name ShellState{envVars = env} = Map.lookup name env
-- | The default shell state.
defaultShellState :: ShellState
defaultShellState = ShellState {
envVars = Map.fromList [("PROMPT", "haskell-sh $"), ("PATH", "/bin:/sbin")],
pathDirs = []
}
-- | Compute the shell prompt based on the current state.
shellPrompt :: ShellState -> String
shellPrompt ShellState{ envVars = env } =
prompt ++ " "
where
prompt = fromMaybe
"Prompt Undefined >"
(Map.lookup "PROMPT" env)
resolveExecutable :: ShellState -> String -> String
resolveExecutable ShellState { pathDirs = [] } command = command
resolveExecutable currentState command =
case listToMaybe $ mapMaybe (lookupCommand command) candidateDirs of
Just (QualifiedFilePath x) -> x
Nothing -> command
where
lookupCommand command mondir = Map.lookup command $ contents mondir
candidateDirs = pathDirs currentState
{-
--
-- Impure path-expanding code
--
-}
initialPathLoad :: ShellState -> IO ShellState
initialPathLoad oldState = do
newPathDirs <- mapM loadDirectory pathDirectories
return oldState { pathDirs = newPathDirs }
where
pathDirectories = splitOn ":" path
path = fromJust $ Map.lookup "PATH" $ envVars oldState
refreshPath :: ShellState -> IO ShellState
refreshPath oldState = do
newPathDirs <- mapM refreshDirectory (pathDirs oldState)
return oldState { pathDirs = newPathDirs }
|
jessekempf/hsh
|
src/HSH/ShellState.hs
|
bsd-2-clause
| 2,250
| 0
| 10
| 449
| 534
| 285
| 249
| 44
| 2
|
-- | Defining classes that represent knowledge about Citations
module Language.Drasil.Classes.Citations (
HasFields(getFields)
) where
import Language.Drasil.Data.Citation (CiteField)
import Control.Lens (Lens')
-- | Citations have Fields
class HasFields c where
getFields :: Lens' c [CiteField]
|
JacquesCarette/literate-scientific-software
|
code/drasil-lang/Language/Drasil/Classes/Citations.hs
|
bsd-2-clause
| 308
| 0
| 8
| 47
| 63
| 39
| 24
| 8
| 0
|
module DuckTest.Internal.Common
(module X, module DuckTest.Internal.Common) where
import Data.Set as X (Set)
import Data.Map as X (Map)
import Debug.Trace as X
import Language.Python.Common as X hiding (empty)
import Control.Monad as X
import Control.Applicative as X
import Data.List as X
import Data.Maybe as X
import Text.Printf as X
import Data.String.Utils as X hiding (join)
mconcatMap :: (Monoid m) => (a -> m) -> [a] -> m
mconcatMap fn = mconcat . map fn
mconcatMapM :: (Monoid mo, Monad m) => (a -> m mo) -> [a] -> m mo
mconcatMapM fn lst = mconcat <$> mapM fn lst
mconcatMapMaybe :: (Monoid m) => (a -> Maybe m) -> [a] -> m
mconcatMapMaybe fn = mconcat . mapMaybe fn
{- flip fmap -}
for :: (Functor f) => f a -> (a -> b) -> f b
for = flip fmap
{- maybe function, but with args rearranged to make it easier-}
maybe' :: Maybe a -> b -> (a -> b) -> b
maybe' a b c = maybe b c a
unless' :: (Monad m) => m Bool -> m () -> m ()
unless' cond fn = cond >>= flip unless fn
when' :: (Monad m) => m Bool -> m () -> m ()
when' cond fn = cond >>= flip when fn
(<.<) :: (Monad m) => (a -> c) -> (b -> m a) -> b -> m c
(<.<) f1 f2 arg = f1 <$> f2 arg
foldM' :: (Monad m) => a -> [b] -> (a -> b -> m a) -> m a
foldM' a b c = foldM c a b
|
jrahm/DuckTest
|
src/DuckTest/Internal/Common.hs
|
bsd-2-clause
| 1,249
| 0
| 12
| 287
| 607
| 332
| 275
| 30
| 1
|
-- Copyright (c) 2017, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings, OverloadedLists #-}
module Network.IRC.Client.Amphibian.ServerReplies
(rpl_WELCOME,
rpl_YOURHOST,
rpl_CREATED,
rpl_MYINFO,
rpl_BOUNCE,
rpl_USERHOST,
rpl_ISON,
rpl_AWAY,
rpl_UNAWAY,
rpl_NOWAWAY,
rpl_WHOISUSER,
rpl_WHOISSERVER,
rpl_WHOISOPERATOR,
rpl_WHOISIDLE,
rpl_ENDOFWHOIS,
rpl_WHOISCHANNELS,
rpl_WHOWASUSER,
rpl_ENDOFWHOWAS,
rpl_LISTSTART,
rpl_LIST,
rpl_LISTEND,
rpl_UNIQOPIS,
rpl_CHANNELMODEIS,
rpl_NOTOPIC,
rpl_TOPIC,
rpl_INVITING,
rpl_SUMMONING,
rpl_INVITELIST,
rpl_ENDOFINVITELIST,
rpl_EXCEPTLIST,
rpl_ENDOFEXCEPTLIST,
rpl_VERSION,
rpl_WHOREPLY,
rpl_ENDOFWHO,
rpl_NAMREPLY,
rpl_ENDOFNAMES,
rpl_LINKS,
rpl_ENDOFLINKS,
rpl_BANLIST,
rpl_ENDOFBANLIST,
rpl_INFO,
rpl_ENDOFINFO,
rpl_MOTDSTART,
rpl_MOTD,
rpl_ENDOFMOTD,
rpl_YOUREOPER,
rpl_REHASHING,
rpl_YOURESERVICE,
rpl_TIME,
rpl_USERSSTART,
rpl_USERS,
rpl_ENDOFUSERS,
rpl_NOUSERS,
rpl_TRACELINK,
rpl_TRACECONNECTING,
rpl_TRACEHANDSHAKE,
rpl_TRACEUNKNOWN,
rpl_TRACEOPERATOR,
rpl_TRACEUSER,
rpl_TRACESERVER,
rpl_TRACESERVICE,
rpl_TRACENEWTYPE,
rpl_TRACECLASS,
rpl_TRACERECONNECT,
rpl_TRACELOG,
rpl_TRACEEND,
rpl_STATSLINKINFO,
rpl_STATSCOMMANDS,
rpl_ENDOFSTATS,
rpl_STATSUPTIME,
rpl_STATSOLINE,
rpl_UMODEIS,
rpl_SERVLIST,
rpl_SERVLISTEND,
rpl_LUSERCLIENT,
rpl_LUSEROP,
rpl_LUSERUNKNOWN,
rpl_LUSERCHANNELS,
rpl_LUSERME,
rpl_ADMINME,
rpl_ADMINLOC1,
rpl_ADMINLOC2,
rpl_ADMINEMAIL,
rpl_TRYAGAIN,
err_NOSUCHNICK,
err_NOSUCHSERVER,
err_NOSUCHCHANNEL,
err_CANNOTSENDTOCHAN,
err_TOOMANYCHANNELS,
err_WASNOSUCHNICK,
err_TOOMANYTARGETS,
err_NOSUCHSERVICE,
err_NOORIGIN,
err_NORECIPIENT,
err_NOTEXTTOSEND,
err_NOTOPLEVEL,
err_WILDTOPLEVEL,
err_BADMASK,
err_UNKNOWNCOMMAND,
err_NOMOTD,
err_NOADMININFO,
err_FILEERROR,
err_NONICKNAMEGIVEN,
err_ERRONEUSNICKNAME,
err_NICKNAMEINUSE,
err_NICKCOLLISION,
err_UNAVAILRESOURCE,
err_USERNOTINCHANNEL,
err_NOTONCHANNEL,
err_USERONCHANNEL,
err_NOLOGIN,
err_SUMMONDISABLED,
err_USERSDISABLED,
err_NOTREGISTERED,
err_NEEDMOREPARAMS,
err_ALREADYREGISTRED,
err_NOPERMFORHOST,
err_PASSWDMISMATCH,
err_YOUREBANNEDCREEP,
err_YOUWILLBEBANNED,
err_KEYSET,
err_CHANNELISFULL,
err_UNKNOWNMODE,
err_INVITEONLYCHAN,
err_BANNEDFROMCHAN,
err_BADCHANNELKEY,
err_BADCHANMASK,
err_NOCHANMODES,
err_BANLISTFULL,
err_NOPRIVILEGES,
err_CHANOPRIVSNEEDED,
err_CANTKILLSERVER,
err_RESTRICTED,
err_UNIQOPPRIVSNEEDED,
err_NOOPERHOST,
err_UMODEUNKNOWNFLAG,
err_USERSDONTMATCH,
rpl_SERVICEINFO,
rpl_ENDOFSERVICES,
rpl_SERVICE,
rpl_NONE,
rpl_WHOISCHANOP,
rpl_KILLDONE,
rpl_CLOSING,
rpl_CLOSEEND,
rpl_INFOSTART,
rpl_MYPORTIS,
rpl_STATSCLINE,
rpl_STATSNLINE,
rpl_STATSILINE,
rpl_STATSKLINE,
rpl_STATSQLINE,
rpl_STATSYLINE,
rpl_STATSVLINE,
rpl_STATSLLINE,
rpl_STATSHLINE,
rpl_STATSSLINE,
rpl_STATSPING,
rpl_STATSBLINE,
rpl_STATSDLINE,
err_NOSERVICEHOST)
where
import qualified Data.ByteString as B
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8)
rpl_WELCOME :: B.ByteString
rpl_WELCOME = encodeUtf8 "001"
rpl_YOURHOST :: B.ByteString
rpl_YOURHOST = encodeUtf8 "002"
rpl_CREATED :: B.ByteString
rpl_CREATED = encodeUtf8 "003"
rpl_MYINFO :: B.ByteString
rpl_MYINFO = encodeUtf8 "004"
rpl_BOUNCE :: B.ByteString
rpl_BOUNCE = encodeUtf8 "005"
rpl_USERHOST :: B.ByteString
rpl_USERHOST = encodeUtf8 "302"
rpl_ISON :: B.ByteString
rpl_ISON = encodeUtf8 "303"
rpl_AWAY :: B.ByteString
rpl_AWAY = encodeUtf8 "301"
rpl_UNAWAY :: B.ByteString
rpl_UNAWAY = encodeUtf8 "305"
rpl_NOWAWAY :: B.ByteString
rpl_NOWAWAY = encodeUtf8 "306"
rpl_WHOISUSER :: B.ByteString
rpl_WHOISUSER = encodeUtf8 "311"
rpl_WHOISSERVER :: B.ByteString
rpl_WHOISSERVER = encodeUtf8 "312"
rpl_WHOISOPERATOR :: B.ByteString
rpl_WHOISOPERATOR = encodeUtf8 "313"
rpl_WHOISIDLE :: B.ByteString
rpl_WHOISIDLE = encodeUtf8 "317"
rpl_ENDOFWHOIS :: B.ByteString
rpl_ENDOFWHOIS = encodeUtf8 "318"
rpl_WHOISCHANNELS :: B.ByteString
rpl_WHOISCHANNELS = encodeUtf8 "319"
rpl_WHOWASUSER :: B.ByteString
rpl_WHOWASUSER = encodeUtf8 "314"
rpl_ENDOFWHOWAS :: B.ByteString
rpl_ENDOFWHOWAS = encodeUtf8 "369"
rpl_LISTSTART :: B.ByteString
rpl_LISTSTART = encodeUtf8 "321"
rpl_LIST :: B.ByteString
rpl_LIST = encodeUtf8 "322"
rpl_LISTEND :: B.ByteString
rpl_LISTEND = encodeUtf8 "323"
rpl_UNIQOPIS :: B.ByteString
rpl_UNIQOPIS = encodeUtf8 "325"
rpl_CHANNELMODEIS :: B.ByteString
rpl_CHANNELMODEIS = encodeUtf8 "324"
rpl_NOTOPIC :: B.ByteString
rpl_NOTOPIC = encodeUtf8 "331"
rpl_TOPIC :: B.ByteString
rpl_TOPIC = encodeUtf8 "332"
rpl_INVITING :: B.ByteString
rpl_INVITING = encodeUtf8 "341"
rpl_SUMMONING :: B.ByteString
rpl_SUMMONING = encodeUtf8 "342"
rpl_INVITELIST :: B.ByteString
rpl_INVITELIST = encodeUtf8 "346"
rpl_ENDOFINVITELIST :: B.ByteString
rpl_ENDOFINVITELIST = encodeUtf8 "347"
rpl_EXCEPTLIST :: B.ByteString
rpl_EXCEPTLIST = encodeUtf8 "348"
rpl_ENDOFEXCEPTLIST :: B.ByteString
rpl_ENDOFEXCEPTLIST = encodeUtf8 "349"
rpl_VERSION :: B.ByteString
rpl_VERSION = encodeUtf8 "351"
rpl_WHOREPLY :: B.ByteString
rpl_WHOREPLY = encodeUtf8 "352"
rpl_ENDOFWHO :: B.ByteString
rpl_ENDOFWHO = encodeUtf8 "315"
rpl_NAMREPLY :: B.ByteString
rpl_NAMREPLY = encodeUtf8 "353"
rpl_ENDOFNAMES :: B.ByteString
rpl_ENDOFNAMES = encodeUtf8 "366"
rpl_LINKS :: B.ByteString
rpl_LINKS = encodeUtf8 "364"
rpl_ENDOFLINKS :: B.ByteString
rpl_ENDOFLINKS = encodeUtf8 "365"
rpl_BANLIST :: B.ByteString
rpl_BANLIST = encodeUtf8 "367"
rpl_ENDOFBANLIST :: B.ByteString
rpl_ENDOFBANLIST = encodeUtf8 "368"
rpl_INFO :: B.ByteString
rpl_INFO = encodeUtf8 "371"
rpl_ENDOFINFO :: B.ByteString
rpl_ENDOFINFO = encodeUtf8 "374"
rpl_MOTDSTART :: B.ByteString
rpl_MOTDSTART = encodeUtf8 "375"
rpl_MOTD :: B.ByteString
rpl_MOTD = encodeUtf8 "372"
rpl_ENDOFMOTD :: B.ByteString
rpl_ENDOFMOTD = encodeUtf8 "376"
rpl_YOUREOPER :: B.ByteString
rpl_YOUREOPER = encodeUtf8 "381"
rpl_REHASHING :: B.ByteString
rpl_REHASHING = encodeUtf8 "382"
rpl_YOURESERVICE :: B.ByteString
rpl_YOURESERVICE = encodeUtf8 "383"
rpl_TIME :: B.ByteString
rpl_TIME = encodeUtf8 "391"
rpl_USERSSTART :: B.ByteString
rpl_USERSSTART = encodeUtf8 "392"
rpl_USERS :: B.ByteString
rpl_USERS = encodeUtf8 "393"
rpl_ENDOFUSERS :: B.ByteString
rpl_ENDOFUSERS = encodeUtf8 "394"
rpl_NOUSERS :: B.ByteString
rpl_NOUSERS = encodeUtf8 "395"
rpl_TRACELINK :: B.ByteString
rpl_TRACELINK = encodeUtf8 "200"
rpl_TRACECONNECTING :: B.ByteString
rpl_TRACECONNECTING = encodeUtf8 "201"
rpl_TRACEHANDSHAKE :: B.ByteString
rpl_TRACEHANDSHAKE = encodeUtf8 "202"
rpl_TRACEUNKNOWN :: B.ByteString
rpl_TRACEUNKNOWN = encodeUtf8 "203"
rpl_TRACEOPERATOR :: B.ByteString
rpl_TRACEOPERATOR = encodeUtf8 "204"
rpl_TRACEUSER :: B.ByteString
rpl_TRACEUSER = encodeUtf8 "205"
rpl_TRACESERVER :: B.ByteString
rpl_TRACESERVER = encodeUtf8 "206"
rpl_TRACESERVICE :: B.ByteString
rpl_TRACESERVICE = encodeUtf8 "207"
rpl_TRACENEWTYPE :: B.ByteString
rpl_TRACENEWTYPE = encodeUtf8 "208"
rpl_TRACECLASS :: B.ByteString
rpl_TRACECLASS = encodeUtf8 "209"
rpl_TRACERECONNECT :: B.ByteString
rpl_TRACERECONNECT = encodeUtf8 "210"
rpl_TRACELOG :: B.ByteString
rpl_TRACELOG = encodeUtf8 "261"
rpl_TRACEEND :: B.ByteString
rpl_TRACEEND = encodeUtf8 "262"
rpl_STATSLINKINFO :: B.ByteString
rpl_STATSLINKINFO = encodeUtf8 "211"
rpl_STATSCOMMANDS :: B.ByteString
rpl_STATSCOMMANDS = encodeUtf8 "212"
rpl_ENDOFSTATS :: B.ByteString
rpl_ENDOFSTATS = encodeUtf8 "219"
rpl_STATSUPTIME :: B.ByteString
rpl_STATSUPTIME = encodeUtf8 "242"
rpl_STATSOLINE :: B.ByteString
rpl_STATSOLINE = encodeUtf8 "243"
rpl_UMODEIS :: B.ByteString
rpl_UMODEIS = encodeUtf8 "221"
rpl_SERVLIST :: B.ByteString
rpl_SERVLIST = encodeUtf8 "234"
rpl_SERVLISTEND :: B.ByteString
rpl_SERVLISTEND = encodeUtf8 "235"
rpl_LUSERCLIENT :: B.ByteString
rpl_LUSERCLIENT = encodeUtf8 "251"
rpl_LUSEROP :: B.ByteString
rpl_LUSEROP = encodeUtf8 "252"
rpl_LUSERUNKNOWN :: B.ByteString
rpl_LUSERUNKNOWN = encodeUtf8 "253"
rpl_LUSERCHANNELS :: B.ByteString
rpl_LUSERCHANNELS = encodeUtf8 "254"
rpl_LUSERME :: B.ByteString
rpl_LUSERME = encodeUtf8 "255"
rpl_ADMINME :: B.ByteString
rpl_ADMINME = encodeUtf8 "256"
rpl_ADMINLOC1 :: B.ByteString
rpl_ADMINLOC1 = encodeUtf8 "257"
rpl_ADMINLOC2 :: B.ByteString
rpl_ADMINLOC2 = encodeUtf8 "258"
rpl_ADMINEMAIL :: B.ByteString
rpl_ADMINEMAIL = encodeUtf8 "259"
rpl_TRYAGAIN :: B.ByteString
rpl_TRYAGAIN = encodeUtf8 "263"
err_NOSUCHNICK :: B.ByteString
err_NOSUCHNICK = encodeUtf8 "401"
err_NOSUCHSERVER :: B.ByteString
err_NOSUCHSERVER = encodeUtf8 "402"
err_NOSUCHCHANNEL :: B.ByteString
err_NOSUCHCHANNEL = encodeUtf8 "403"
err_CANNOTSENDTOCHAN :: B.ByteString
err_CANNOTSENDTOCHAN = encodeUtf8 "404"
err_TOOMANYCHANNELS :: B.ByteString
err_TOOMANYCHANNELS = encodeUtf8 "405"
err_WASNOSUCHNICK :: B.ByteString
err_WASNOSUCHNICK = encodeUtf8 "406"
err_TOOMANYTARGETS :: B.ByteString
err_TOOMANYTARGETS = encodeUtf8 "407"
err_NOSUCHSERVICE :: B.ByteString
err_NOSUCHSERVICE = encodeUtf8 "408"
err_NOORIGIN :: B.ByteString
err_NOORIGIN = encodeUtf8 "409"
err_NORECIPIENT :: B.ByteString
err_NORECIPIENT = encodeUtf8 "411"
err_NOTEXTTOSEND :: B.ByteString
err_NOTEXTTOSEND = encodeUtf8 "412"
err_NOTOPLEVEL :: B.ByteString
err_NOTOPLEVEL = encodeUtf8 "413"
err_WILDTOPLEVEL :: B.ByteString
err_WILDTOPLEVEL = encodeUtf8 "414"
err_BADMASK :: B.ByteString
err_BADMASK = encodeUtf8 "415"
err_UNKNOWNCOMMAND :: B.ByteString
err_UNKNOWNCOMMAND = encodeUtf8 "421"
err_NOMOTD :: B.ByteString
err_NOMOTD = encodeUtf8 "422"
err_NOADMININFO :: B.ByteString
err_NOADMININFO = encodeUtf8 "423"
err_FILEERROR :: B.ByteString
err_FILEERROR = encodeUtf8 "424"
err_NONICKNAMEGIVEN :: B.ByteString
err_NONICKNAMEGIVEN = encodeUtf8 "431"
err_ERRONEUSNICKNAME :: B.ByteString
err_ERRONEUSNICKNAME = encodeUtf8 "432"
err_NICKNAMEINUSE :: B.ByteString
err_NICKNAMEINUSE = encodeUtf8 "433"
err_NICKCOLLISION :: B.ByteString
err_NICKCOLLISION = encodeUtf8 "436"
err_UNAVAILRESOURCE :: B.ByteString
err_UNAVAILRESOURCE = encodeUtf8 "437"
err_USERNOTINCHANNEL :: B.ByteString
err_USERNOTINCHANNEL = encodeUtf8 "441"
err_NOTONCHANNEL :: B.ByteString
err_NOTONCHANNEL = encodeUtf8 "442"
err_USERONCHANNEL :: B.ByteString
err_USERONCHANNEL = encodeUtf8 "443"
err_NOLOGIN :: B.ByteString
err_NOLOGIN = encodeUtf8 "444"
err_SUMMONDISABLED :: B.ByteString
err_SUMMONDISABLED = encodeUtf8 "445"
err_USERSDISABLED :: B.ByteString
err_USERSDISABLED = encodeUtf8 "446"
err_NOTREGISTERED :: B.ByteString
err_NOTREGISTERED = encodeUtf8 "451"
err_NEEDMOREPARAMS :: B.ByteString
err_NEEDMOREPARAMS = encodeUtf8 "461"
err_ALREADYREGISTRED :: B.ByteString
err_ALREADYREGISTRED = encodeUtf8 "462"
err_NOPERMFORHOST :: B.ByteString
err_NOPERMFORHOST = encodeUtf8 "463"
err_PASSWDMISMATCH :: B.ByteString
err_PASSWDMISMATCH = encodeUtf8 "464"
err_YOUREBANNEDCREEP :: B.ByteString
err_YOUREBANNEDCREEP = encodeUtf8 "465"
err_YOUWILLBEBANNED :: B.ByteString
err_YOUWILLBEBANNED = encodeUtf8 "466"
err_KEYSET :: B.ByteString
err_KEYSET = encodeUtf8 "467"
err_CHANNELISFULL :: B.ByteString
err_CHANNELISFULL = encodeUtf8 "471"
err_UNKNOWNMODE :: B.ByteString
err_UNKNOWNMODE = encodeUtf8 "472"
err_INVITEONLYCHAN :: B.ByteString
err_INVITEONLYCHAN = encodeUtf8 "473"
err_BANNEDFROMCHAN :: B.ByteString
err_BANNEDFROMCHAN = encodeUtf8 "474"
err_BADCHANNELKEY :: B.ByteString
err_BADCHANNELKEY = encodeUtf8 "475"
err_BADCHANMASK :: B.ByteString
err_BADCHANMASK = encodeUtf8 "476"
err_NOCHANMODES :: B.ByteString
err_NOCHANMODES = encodeUtf8 "477"
err_BANLISTFULL :: B.ByteString
err_BANLISTFULL = encodeUtf8 "478"
err_NOPRIVILEGES :: B.ByteString
err_NOPRIVILEGES = encodeUtf8 "481"
err_CHANOPRIVSNEEDED :: B.ByteString
err_CHANOPRIVSNEEDED = encodeUtf8 "482"
err_CANTKILLSERVER :: B.ByteString
err_CANTKILLSERVER = encodeUtf8 "483"
err_RESTRICTED :: B.ByteString
err_RESTRICTED = encodeUtf8 "484"
err_UNIQOPPRIVSNEEDED :: B.ByteString
err_UNIQOPPRIVSNEEDED = encodeUtf8 "485"
err_NOOPERHOST :: B.ByteString
err_NOOPERHOST = encodeUtf8 "491"
err_UMODEUNKNOWNFLAG :: B.ByteString
err_UMODEUNKNOWNFLAG = encodeUtf8 "501"
err_USERSDONTMATCH :: B.ByteString
err_USERSDONTMATCH = encodeUtf8 "502"
rpl_SERVICEINFO :: B.ByteString
rpl_SERVICEINFO = encodeUtf8 "231"
rpl_ENDOFSERVICES :: B.ByteString
rpl_ENDOFSERVICES = encodeUtf8 "232"
rpl_SERVICE :: B.ByteString
rpl_SERVICE = encodeUtf8 "233"
rpl_NONE :: B.ByteString
rpl_NONE = encodeUtf8 "300"
rpl_WHOISCHANOP :: B.ByteString
rpl_WHOISCHANOP = encodeUtf8 "316"
rpl_KILLDONE :: B.ByteString
rpl_KILLDONE = encodeUtf8 "361"
rpl_CLOSING :: B.ByteString
rpl_CLOSING = encodeUtf8 "362"
rpl_CLOSEEND :: B.ByteString
rpl_CLOSEEND = encodeUtf8 "363"
rpl_INFOSTART :: B.ByteString
rpl_INFOSTART = encodeUtf8 "373"
rpl_MYPORTIS :: B.ByteString
rpl_MYPORTIS = encodeUtf8 "384"
rpl_STATSCLINE :: B.ByteString
rpl_STATSCLINE = encodeUtf8 "213"
rpl_STATSNLINE :: B.ByteString
rpl_STATSNLINE = encodeUtf8 "214"
rpl_STATSILINE :: B.ByteString
rpl_STATSILINE = encodeUtf8 "215"
rpl_STATSKLINE :: B.ByteString
rpl_STATSKLINE = encodeUtf8 "216"
rpl_STATSQLINE :: B.ByteString
rpl_STATSQLINE = encodeUtf8 "217"
rpl_STATSYLINE :: B.ByteString
rpl_STATSYLINE = encodeUtf8 "218"
rpl_STATSVLINE :: B.ByteString
rpl_STATSVLINE = encodeUtf8 "240"
rpl_STATSLLINE :: B.ByteString
rpl_STATSLLINE = encodeUtf8 "241"
rpl_STATSHLINE :: B.ByteString
rpl_STATSHLINE = encodeUtf8 "244"
rpl_STATSSLINE :: B.ByteString
rpl_STATSSLINE = encodeUtf8 "244"
rpl_STATSPING :: B.ByteString
rpl_STATSPING = encodeUtf8 "246"
rpl_STATSBLINE :: B.ByteString
rpl_STATSBLINE = encodeUtf8 "247"
rpl_STATSDLINE :: B.ByteString
rpl_STATSDLINE = encodeUtf8 "250"
err_NOSERVICEHOST :: B.ByteString
err_NOSERVICEHOST = encodeUtf8 "492"
|
tabemann/amphibian
|
src/Network/IRC/Client/Amphibian/ServerReplies.hs
|
bsd-3-clause
| 15,643
| 0
| 5
| 2,226
| 2,965
| 1,666
| 1,299
| 488
| 1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
module VSim.VIR.Types where
import Data.ByteString.Char8 as B
import Data.Map (Map)
import Data.Set (Set)
import Data.Data
import Data.Typeable
import Data.Generics
import VSim.Data.Loc
import VSim.Data.Int128
import VSim.Data.TInt
import VSim.Data.NamePath (Ident)
import VSim.VIR.Lexer as L
-- type Ident = B.ByteString
-- | VIR-file toplevel decarations
data IRTop
= IRTProcess IRProcess
| IRTFunction IRFunction
| IRTType IRType
| IRTConstant IRConstant
| IRTSignal IRSignal
| IRTAlias IRAlias
| IRTPort IRPort
| IRTProcedure IRProcedure
| IRTGenerate IRGen
| IRTMM MemoryMapRange
| IRTCorresp ([Ident],[Ident])
deriving(Show)
data UnitDecl = UnitDecl Loc Ident Int128 Ident
deriving (Show)
data Constrained t
= Unconstrained t
| Constrained Bool t
deriving (Show, Eq)
-- | Диапазон адресов, четвёрка:
-- ( суффикс процессора в иерархии , суффикс сигнала с адресами
-- , стартовый адрес , конечный адрес (не включается)).
data MemoryMapRange = MemoryMapRange [B.ByteString] [B.ByteString] Integer Integer
deriving (Show)
data IRProcess
= IRProcess WLHierNameWPath [IRNameG] [IRLetDecl] IRStat
deriving(Show)
data IRGen
= IRGenIf IRExpr [IRTop]
| IRGenFor String WLHierNameWPath Integer Integer [IRTop]
deriving(Show)
data IRType = IRType WLHierNameWPath IRTypeDescr
deriving(Show)
data IRTypeDescr
= ITDName (WithLoc Ident)
| ITDRangeDescr IRRangeDescr
| ITDEnum [EnumElement]
| ITDArray [Constrained IRArrayRangeDescr] IRTypeDescr
| ITDPhysical IRRangeDescr Ident [UnitDecl]
| ITDRecord [(Loc, Ident, IRTypeDescr)]
| ITDAccess IRTypeDescr
| ITDResolved Loc Ident IRTypeDescr
| ITDConstraint Loc IRTypeDescr [IRArrayRangeDescr]
deriving (Show)
data IRRangeDescr
= IRDRange Loc IRExpr Bool IRExpr
| IRDARange Loc IRExpr IRName
| IRDAReverseRange Loc IRExpr IRName
deriving (Show)
data IRArrayRangeDescr
= IRARDRange IRRangeDescr
| IRARDTypeMark Loc IRTypeDescr
| IRARDConstrained Loc IRTypeDescr IRRangeDescr
-- ^ вместо IRTypeDescr был TypeMark, но с ним фиг передашь статичность
deriving (Show)
data IRElementAssociation
= IEAExpr Loc IRExpr
| IEAOthers Loc IRExpr
| IEAType Loc IRArrayRangeDescr IRExpr
| IEAField Loc Ident IRExpr
| IEAExprIndex Loc IRExpr IRExpr
deriving (Show)
data IRNameG
= INAggregate Loc [IRElementAssociation] IRTypeDescr
| INIdent WLHierNameWPath
| INField IRNameG Loc Ident
| INIndex NameExprKind IRNameG Loc IREGList
| INSlice NameExprKind IRNameG Loc IRArrayRangeDescr
deriving (Show)
type IREGList = [(Loc, IRExpr)]
data NameCheck = ExprCheck | AssignCheck | SignalCheck | TypeCheck
deriving (Show)
data IRName = IRName IRNameG NameCheck
deriving (Show)
-- | Old VIR parser legacy. FIXME: remove it from this layer
data NameExprKind
= NEKStatic [B.ByteString]
-- ^ string suffix
| NEKDynamic
deriving (Show)
data IRTypeAttr
= T_left
| T_right
| T_high
| T_low
| T_ascending
deriving (Show)
data IRTypeValueAttr
= T_succ
| T_pred
| T_val
| T_pos
deriving (Show)
data IRArrayAttr
= A_left
| A_right
| A_high
| A_low
| A_ascending
| A_length
deriving (Show)
data IRValueAttr
= V_image
| V_value
| V_pos
| V_val
| V_succ
| V_pred
| V_leftof
| V_rightof
deriving (Show)
data IRSignalAttr
= S_event
| S_active
| S_last_value
deriving (Show)
data IRSignalAttrTimed
= S_stable
| S_delayed
| S_quiet
deriving (Show)
data IRRelOp
= IEq
| INeq
| ILess
| ILessEqual
| IGreater
| IGreaterEqual
deriving (Show)
data IRBinOp
= IMod | IRem | IDiv | IPlus | IMinus | IMul | IExp
| IAnd | INand | IOr | INor | IXor | IXNor
| IConcat
deriving (Show)
data IRUnOp
= IUPlus | IUMinus | IAbs | INot
deriving (Show)
data IRExpr
= IEName Loc IRName
| IEString Loc B.ByteString
| IEAggregate Loc [IRElementAssociation]
| IEQualifyType Loc IRTypeDescr IRExpr
| IEVQualifyType Loc IRTypeDescr IRTypeDescr IRExpr
| IETypeAttr Loc IRTypeAttr IRTypeDescr
| IETypeValueAttr Loc IRTypeValueAttr IRExpr IRTypeDescr
| IEArrayAttr Loc IRArrayAttr IRExpr IRName
| IESignalAttr Loc IRSignalAttr IRName
| IESignalAttrTimed Loc IRSignalAttrTimed IRName IRExpr
| IEEnumIdent Loc EnumElement
| IEInt Loc TInt
| IEDouble Loc Double
| IEPhysical (WithLoc Int128) (WithLoc Ident)
| IEFunctionCall (WithLoc Ident) IREGList Loc
| IERelOp Loc IRRelOp IRTypeDescr IRExpr IRExpr
| IEGenericBinop Loc IRGenericBinOp
IRTypeDescr IRTypeDescr IRExpr IRExpr
| IEBinOp Loc IRBinOp IRExpr IRExpr
| IEUnOp Loc IRUnOp IRExpr
| IECurrentTime Loc
deriving (Show)
data IRGenericBinOp = IRGenericDiv | IRGenericMul
deriving (Show)
data IRAfter
= IRAfter IRExpr IRExpr
-- ^ value time
deriving (Show)
type WLHierNameWPath = WithLoc (Ident, [Ident])
type LoopLabel = Ident
data IRStat
= ISReturn Loc
| ISReturnExpr Loc IRExpr
| ISProcCall (WithLoc Ident) IREGList Loc
| ISIf Loc IRExpr IRStat IRStat
| ISLet [IRLetDecl] IRStat
| ISAssign Loc IRName Loc IRExpr
| ISSignalAssign Loc IRName Loc [IRAfter]
| ISAssert Loc IRExpr IRExpr IRExpr -- expr report severity
| ISReport Loc IRExpr IRExpr -- report severity
| ISWait Loc [IRNameG] (Maybe IRExpr) (Maybe IRExpr) -- on until for
| ISNop Loc
| ISCase Loc IRExpr IRTypeDescr [IRCaseElement]
| ISFor LoopLabel Loc (WithLoc Ident) IRTypeDescr IRStat
| ISWhile LoopLabel Loc IRExpr IRStat
| ISSeq IRStat IRStat
| ISExit Loc LoopLabel
| ISNext Loc LoopLabel
| ISNil
deriving (Show)
data IRCaseElement
= ICEExpr Loc IREGList IRStat
| ICEOthers Loc IRStat
deriving (Show)
data IRLetDecl
= ILDConstant IRConstant
| ILDVariable IRVariable
| ILDAlias IRAlias
| ILDType WLHierNameWPath IRTypeDescr
| ILDFunction IRFunction
| ILDProcedure IRProcedure
deriving (Show)
data IRConstant = IRConstant WLHierNameWPath IRTypeDescr Loc IRExpr
deriving (Show)
data IRVariable = IRVariable WLHierNameWPath IRTypeDescr IROptExpr
deriving (Show)
data IRSignal = IRSignal WLHierNameWPath IRTypeDescr IROptExpr
deriving (Show)
data IRPort = IRPort WLHierNameWPath IRTypeDescr IROptExpr
deriving (Show)
data IRAlias = IRAlias WLHierNameWPath IRTypeDescr Loc IRName
deriving (Show)
data IROptExpr
= IOEJustExpr Loc IRExpr
| IOENothing Loc
deriving (Show)
data IRFunction = IRFunction WLHierNameWPath [IRArg] IRTypeDescr IRStat
deriving (Show)
data IRProcedure = IRProcedure WLHierNameWPath [IRArg] IRStat
deriving (Show)
data ArgMode
= AMIn
| AMOut
| AMInout
deriving (Show, Eq)
data NamedItemKind
= NIKConstant
| NIKVariable
| NIKSignal
| NIKFile
| NIKAlias NamedItemKind
deriving (Show, Eq, Ord)
data IRArg = IRArg (WithLoc Ident) IRTypeDescr NamedItemKind ArgMode
deriving (Show)
data LetEnv = LetEnv {
leCurFunc :: LetName
, leCurPath :: Ident
, leCurLevel :: Int
, leFuncs :: [LetName]
, leVars :: [LetName]
, leTypes :: [LetName]
}
deriving (Show)
data LetName = LetName {
-- | var1
lnName :: Ident
-- | .func.
, lnPath :: Ident
, lnLevel :: Int
}
deriving (Eq, Ord)
instance Show LetName where
show (LetName {..}) = "\"" ++ unpack (append lnPath lnName)
++ "[" ++ show lnLevel ++ "]\""
type LetUsageMap = Map LetName (Set LetName)
|
ierton/vsim
|
src/VSim/VIR/Types.hs
|
bsd-3-clause
| 8,071
| 0
| 12
| 1,992
| 1,953
| 1,136
| 817
| 257
| 0
|
{-# LANGUAGE LambdaCase #-}
module Game.Innovation.TestHelper
where
import SpecHelper
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Game.Innovation.Types
import Game.Innovation.Cards
getAllCardsFromMap :: Stack a =>
Map k a -> RawStack
getAllCardsFromMap = Map.foldr (++) [] . Map.map getRawStack
getAllCurrentCards :: Board -> RawStack
getAllCurrentCards (Board _ drawStacks dominateables players _ _) = cardsInDrawStacks ++ (getRawStack dominateables) ++ cardsAtPlayers
where
cardsInDrawStacks = getAllCardsFromMap drawStacks
cardsAtPlayers = concatMap getAllCardsOfPlayer players
getAllCardsOfPlayer :: Player -> RawStack
getAllCardsOfPlayer (Player _ stacks influence (Dominations ds) hand) = getAllCardsFromMap stacks ++ (getRawStack influence) ++ dominationCards ++ (getRawStack hand)
where
dominationCards = concatMap (\case
AgeDomination c -> [c]
_ -> []) ds
exactlyAllCardsArePresent :: Board -> Bool
exactlyAllCardsArePresent board = noCardsAreDuplicates && allCardsArePresent
where
allCurrentCards = sort $ getAllCurrentCards board
allStartingCards = sort getCards
noCardsAreDuplicates = allCurrentCards == nub allCurrentCards
allCardsArePresent = allCurrentCards == allStartingCards
|
maximilianhuber/innovation
|
test/Game/Innovation/TestHelper.hs
|
bsd-3-clause
| 1,422
| 0
| 14
| 335
| 324
| 172
| 152
| 26
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.