code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Latte.Lang (
module Latte.Lang.Abs,
module Latte.Lang.ErrM,
module Latte.Lang.Lex,
module Latte.Lang.Par,
module Latte.Lang.Print,
module Latte.Lang.Skel,
parseProgram
) where
import Control.Monad.Except
import Latte.Lang.Abs
import Latte.Lang.ErrM
import Latte.Lang.Lex
import Latte.Lang.Par
import Latte.Lang.Print
import Latte.Lang.Skel
import Latte.Commons
import Latte.Errors
parseProgram :: String -> Runner () Program
parseProgram text = let ts = myLexer text in
case pProgram ts of
Bad s -> do
liftIO $ print s
throwError $ ParseError
Ok tree -> do
liftIO $ print "Parse correct"
return tree
|
mpsk2/LatteCompiler
|
src/Latte/Lang.hs
|
mit
| 636
| 16
| 13
| 107
| 217
| 121
| 96
| 26
| 2
|
module NightClub where
import Text.Show.Functions
import Data.List
type Trago = (Cliente -> Cliente)
data Cliente = UnCliente {
nombre :: String,
resistencia :: Int,
amigos :: [Cliente],
tragos :: [Trago]
}
instance Eq Cliente where
(==) cliente1 cliente2 = nombre cliente1 == nombre cliente2
instance Show Cliente where
show cliente = "{ nombre: " ++ show (nombre cliente)
++ ", resistencia: " ++ show (resistencia cliente)
++ ", amigos: " ++ show (map nombre (amigos cliente))
++ ", tragos: [Tragos x" ++ show (length . tragos $ cliente) ++ "] }"
modificarNombre :: String -> Cliente -> Cliente
modificarNombre nuevoNombre cliente = cliente { nombre = nuevoNombre }
modificarResistencia :: (Int -> Int -> Int) -> Int -> Cliente -> Cliente
modificarResistencia operation valor cliente = cliente { resistencia = (resistencia cliente) `operation` valor }
modificarAmigos :: (Cliente -> Cliente) -> Cliente -> Cliente
modificarAmigos function cliente = cliente { amigos = (map function . amigos) cliente }
modificarTragos :: ([Trago] -> [Trago]) -> Cliente -> Cliente
modificarTragos function cliente = cliente { tragos = function $ tragos cliente }
---
rodri = UnCliente {
nombre = "Rodri",
resistencia = 55,
amigos = [],
tragos = [tomarTintico]
}
marcos = UnCliente {
nombre = "Marcos",
resistencia = 40,
amigos = [rodri],
tragos = [(tomarKlusener "Guinda")]
}
cristian = UnCliente {
nombre = "Cristian",
resistencia = 2,
amigos = [],
tragos = [tomarGrogXD, tomarJarraLoca]
}
ana = UnCliente {
nombre = "Ana",
resistencia = 120,
amigos = [marcos,rodri],
tragos = []
}
robertoCarlos = UnCliente {
nombre = "Roberto Carlos",
resistencia = 165,
amigos = [],
tragos = []
}
chuckNorris = UnCliente {
nombre = "Chuck Norris",
resistencia = 1000,
amigos = [ana],
tragos = [ tomarSoda x | x <- [1..] ]
}
{-
Justificar: ¿Puede chuckNorris pedir otro trago con la función dameOtro?
La funcion `dameOtro` no sera valida con chuck, ya que el compilador intentara reducir una lista infinita para encontrar el ultimo elementos
Justificar: ¿puedo hacer que chuckNorris realice el itinerario básico y conocer su resistencia resultante?
Si, gracias al `lazy evaluation` de Haskell, al no necesitar la lista de tragos (infinita) de chuck, no habra nignu tipo de problema para aplicarle el itinerario.
Justificar: ¿puedo preguntar si chuckNorris tiene más resistencia que ana?
Si, exactamente por lo mismo del punto anterior
-}
---
comoEsta :: Cliente -> String
comoEsta cliente
| resistencia cliente > 50 = "fresco"
| (length . amigos) cliente > 1 = "piola"
| otherwise = "duro"
---
esAmigo :: Cliente -> Cliente -> Bool
esAmigo amigo = elem amigo . amigos
reconocerAmigo :: Cliente -> Cliente -> Cliente
reconocerAmigo amigo cliente
| amigo == cliente || amigo `esAmigo` cliente = cliente
| otherwise = cliente { amigos = amigo : amigos cliente }
agregarTrago :: Trago -> Cliente -> Cliente
agregarTrago trago = modificarTragos ((:) trago)
---
tomarGrogXD :: Trago
tomarGrogXD = agregarTrago tomarGrogXD . modificarResistencia (*) 0
tomarJarraLoca :: Trago
tomarJarraLoca = agregarTrago tomarJarraLoca . modificarAmigos efectoJarra . efectoJarra
where efectoJarra = modificarResistencia (-) 10
tomarKlusener :: String -> Trago
tomarKlusener gusto = agregarTrago (tomarKlusener gusto) . modificarResistencia (-) (length gusto)
tomarTintico :: Trago
tomarTintico cliente = agregarTrago tomarTintico $ modificarResistencia (+) dif cliente
where dif = 5 * (length . amigos) cliente
tomarSoda :: Int -> Trago
tomarSoda fuerza cliente = agregarTrago (tomarSoda fuerza) $ modificarNombre nuevoNombre cliente
where nuevoNombre = "e" ++ replicate fuerza 'r' ++ "p" ++ nombre cliente
tomarJarraPopular :: Int -> Trago
tomarJarraPopular espirituosidad cliente
| espirituosidad == 0 = agregarTrago (tomarJarraPopular espirituosidad) cliente
| otherwise = tomarJarraPopular (espirituosidad - 1) (hacerseAmigo cliente)
---
rescatarse :: Int -> Cliente -> Cliente
rescatarse horas cliente
| (>3) horas = modificarResistencia (+) 200 cliente
| (>0) horas = modificarResistencia (+) 100 cliente
| otherwise = error "Not valid hour input"
----
tomarTragos :: [Trago] -> Cliente -> Cliente
tomarTragos [] = id
tomarTragos tragos = foldl1 (.) tragos
dameOtro :: Cliente -> Cliente
dameOtro cliente
| (not . null . tragos) cliente = ultimoTrago cliente
| otherwise = error "Cliente no tomó nada"
where ultimoTrago = (head . tragos) cliente
---
cualesPuedeTomar :: [Trago] -> Cliente -> [Trago]
cualesPuedeTomar listaTragos cliente = filter resistenciaMayorCero listaTragos
where resistenciaMayorCero trago = (resistencia . trago) cliente > 0
cuantasPuedeTomar :: [Trago] -> Cliente -> Int
cuantasPuedeTomar listaTragos = length . cualesPuedeTomar listaTragos
---
data Itinerario = UnItinerario {
descripcion :: String,
duracion :: Float,
acciones :: [Cliente -> Cliente]
}
mezclaExplosiva = UnItinerario {
descripcion = "Mezcla Explosiva",
duracion = 2.5,
acciones = [tomarKlusener "Frutilla", tomarKlusener "Huevo", tomarGrogXD, tomarGrogXD]
}
itinerarioBasico = UnItinerario {
descripcion = "Basico",
duracion = 5,
acciones = [tomarKlusener "Huevo", rescatarse 2, tomarKlusener "Chocolate", tomarJarraLoca]
}
salidaDeAmigos = UnItinerario {
descripcion = "Salida de amigos",
duracion = 1,
acciones = [tomarJarraLoca, reconocerAmigo robertoCarlos, tomarTintico, tomarSoda 1]
}
realizarItinerario :: Itinerario -> Cliente -> Cliente
realizarItinerario itinerario = foldl1 (.) (acciones itinerario)
----
intensidadItinerario :: Itinerario -> Float
intensidadItinerario itinerario = genericLength (acciones itinerario) / duracion itinerario
----
itinerarioMasIntenso :: [Itinerario] -> Itinerario
itinerarioMasIntenso = foldl1 itinerarioConMasIntensidad
itinerarioConMasIntensidad i1 i2
| intensidadItinerario i1 > intensidadItinerario i2 = i1
| otherwise = i2
----
hacerseAmigo :: Cliente -> Cliente
hacerseAmigo cliente = foldr reconocerAmigo cliente amigosDeAmigos
where amigosDeAmigos = (concat . map amigos . amigos) cliente
|
emanuelcasco/tp-paradigmas-funcional
|
src/NightClub.hs
|
mit
| 6,247
| 0
| 16
| 1,128
| 1,734
| 939
| 795
| 130
| 1
|
module ProjectRosalind.Motif.ExpressionParsing where
import Text.Parsec (ParseError, SourcePos, getPosition)
import Text.Parsec.String (Parser)
import ProjectRosalind.Motif.Parsec (try)
import ProjectRosalind.Motif.Char (oneOf, noneOf, char, digit, satisfy)
import ProjectRosalind.Motif.Combinator (eof, many1, manyTill, choice, chainl1, count)
import Control.Applicative ((<|>), many)
import Control.Monad (void)
import Data.Char (isLetter, isDigit)
import ProjectRosalind.Motif.FunctionsAndTypesForParsing
numberExamples :: [(String,Integer)]
numberExamples = [("1", 1)
,("23", 23)]
num :: Parser Integer
num = do
n <- many1 digit
return (read n)
num1 :: Parser Integer
num1 = do
n <- many digit
return (read n)
varExamples :: [(String,String)]
varExamples = [("test", "test")
,("_stuff", "_stuff")
,("_1234", "_1234")]
var :: Parser String
var = do
fc <- firstChar
rest <- many nonFirstChar
return (fc:rest)
where
firstChar = satisfy (\a -> isLetter a || a == '_')
nonFirstChar = satisfy (\a -> isDigit a || isLetter a || a == '_')
spanned :: Parser a -> Parser (SourcePos, a)
spanned p = do
pos <- getPosition
a <- p
pure (pos, a)
-- > parseTest (spanned (many1 (char 'a'))) "aaaaafff"
--((line 1, column 1),(line 1, column 6),"aaaaa")
glycosylation :: Parser String-- (SourcePos, SourcePos, String )
glycosylation = do
n <- char 'N'
notP1 <- noneOf ['P']
sOrt <- oneOf ['S', 'T']
notP2 <- noneOf ['P']
return [n, notP1, sOrt, notP2]
q = regularParse (spanned glycosylation) "QEWRQEWRLLELE"
r = regularParse (spanned glycosylation) "NRTX"
nnn = "MKNKFKTQEELVNHLKTVGFVFANSEIYNGLANAWDYGPLGVLLKNNLKNLWWKEFVTKQ\
\KDVVGLDSAIILNPLVWKASGHLDNFSDPLIDCKNCKARYRADKLIESFDENIHIAENSS\
\NEEFAKVLNDYEISCPTCKQFNWTEIRHFNLMFKTYQGVIEDAKNVVYLRPETAQGIFVN\
\FKNVQRSMRLHLPFGIAQIGKSFRNEITPGNFIFRTREFEQMEIEFFLKEESAYDIFDKY\
\LNQIENWLVSACGLSLNNLRKHEHPKEELSHYSKKTIDFEYNFLHGFSELYGIAYRTNYD\
\LSVHMNLSKKDLTYFDEQTKEKYVPHVIEPSVGVERLLYAILTEATFIEKLENDDERILM\
\DLKYDLAPYKIAVMPLVNKLKDKAEEIYGKILDLNISATFDNSGSIGKRYRRQDAIGTIY\
\CLTIDFDSLDDQQDPSFTIRERNSMAQKRIKLSELPLYLNQKAHEDFQRQCQK"
-- regularParse --
xx = manyTill (spanned glycosylation) eof
xxx = count 100 (spanned glycosylation)
xxxx = regularParse xxx nnn
qqqq = regularParse xx nnn
-- manyTill (spanned glycosylation) eof
|
brodyberg/Notes
|
ProjectRosalind.hsproj/LearnHaskell/lib/ProjectRosalind/Motif/ExpressionParsing.hs
|
mit
| 2,427
| 0
| 13
| 438
| 629
| 345
| 284
| 51
| 1
|
import Test.Tasty
import Test.Tasty.QuickCheck as QC
-- I will use Tasty as a testing library as soon as I figure out how to unit test
-- IO functions like getCharacters and getUpcomingCalendarEvents
main :: IO ()
main = putStrLn "Test suite not yet implemented"
|
AlexGagne/evecalendar
|
test/Spec.hs
|
mit
| 266
| 0
| 6
| 47
| 34
| 20
| 14
| 4
| 1
|
module Y2018.M01.D23.Solution where
{--
Continuing on our Prolog-y adventures in Haskell, let's look at P27 from P99:
P27 (**) Group the elements of a set into disjoint subsets.
a) In how many ways can a group of 9 people work in 3 disjoint subgroups of 2,
3 and 4 persons? Write a predicate that generates all the possibilities via
backtracking.
Example:
?- group3([aldo,beat,carla,david,evi,flip,gary,hugo,ida],G1,G2,G3).
G1 = [aldo,beat], G2 = [carla,david,evi], G3 = [flip,gary,hugo,ida]
...
That is to say (in Haskell terms), generate from a list of 9 elements all the
combinations of 2, 3, and 4 elements.
--}
import Data.Set (Set)
import qualified Data.Set as Set
-- below imports available via 1HaskellADay git repository
import Control.List (takeout)
import Data.QBit
group3 :: Ord a => [a] -> Set [Set a]
group3 elts =
-- we can do this with list compression? But we have to avoid pulling the same
-- value twice. So, let's order takeout:
Set.fromList (takeout elts >>= \(e1, elts1) -> -- ... etc
takeout elts1 >>= \(e2, elts2) ->
takeout elts2 >>= \(e3, elts3) ->
takeout elts3 >>= \(e4, elts4) ->
takeout elts4 >>= \(e5, elts5) ->
takeout elts5 >>= \(e6, elts6) ->
takeout elts6 >>= \(e7, elts7) ->
takeout elts7 >>= \(e8, elts8) ->
takeout elts8 >>= \(e9, []) ->
return (map Set.fromList [[e1,e2],[e3,e4,e5],[e6,e7,e8,e9]]))
-- you can also do this with careful permutation
-- given:
data Peeps = Aldo | Beat | Carla | David | Evi | Flip | Gary | Hugo | Ida
deriving (Eq, Ord, Enum, Show)
{--
>>> take 5 $ Set.toList (group3 [Aldo .. Ida])
[[fromList [Aldo,Beat],fromList [Carla,David,Evi],fromList [Flip,Gary,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,David,Flip],fromList [Evi,Gary,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,David,Gary],fromList [Evi,Flip,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,David,Hugo],fromList [Evi,Flip,Gary,Ida]],
[fromList [Aldo,Beat],fromList [Carla,David,Ida],fromList [Evi,Flip,Gary,Hugo]]]
>>> length $ group3 [Aldo .. Ida]
1260
That took awhile. Of course, there are combinators that can be used to compute
these numbers instead of counting every distinct set ...
--}
{-- BONUS -----------------------------------------------------------------
b) Generalize the above predicate in a way that we can specify a list of group
sizes and the predicate will return a list of groups.
Example:
?- group([aldo,beat,carla,david,evi,flip,gary,hugo,ida],[2,2,5],Gs).
Gs = [[aldo,beat],[carla,david],[evi,flip,gary,hugo,ida]]
...
Note that we do not want permutations of the group members; i.e.
[[aldo,beat],...] is the same solution as [[beat,aldo],...]. However, we make a
difference between [[aldo,beat],[carla,david],...] and
[[carla,david],[aldo,beat],...].
You may find more about this combinatorial problem in a good book on discrete
mathematics under the term "multinomial coefficients".
--}
grouper :: Ord a => [a] -> [Int] -> Set [Set a]
grouper peeps sizes =
let bits = map (`replicate` free) sizes
soln = drawing bits peeps
in Set.fromList (map (map (Set.fromList . map extract)) soln)
drawing :: Ord a => [[QBit a]] -> [a] -> [[[QBit a]]]
drawing [] _ = [[]]
drawing (q:q') list = draws q list >>= \(qs, rest) ->
drawing q' rest >>= return . (qs:)
{--
>>> take 5 $ Set.toList (grouper [Aldo .. Ida] [2,2,5])
[[fromList [Aldo,Beat],fromList [Carla,David],fromList [Evi,Flip,Gary,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,Evi],fromList [David,Flip,Gary,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,Flip],fromList [David,Evi,Gary,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,Gary],fromList [David,Evi,Flip,Hugo,Ida]],
[fromList [Aldo,Beat],fromList [Carla,Hugo],fromList [David,Evi,Flip,Gary,Ida]]]
>>> length $ grouper [Aldo .. Ida] [2,2,5]
756
--}
-- of course, I had invented takeouts centuries ago ... but only as QBits, smh
|
geophf/1HaskellADay
|
exercises/HAD/Y2018/M01/D23/Solution.hs
|
mit
| 3,906
| 0
| 29
| 619
| 582
| 328
| 254
| 28
| 1
|
module HaskQuest.Game.Parser
( PlayerAction (..)
, parseChoice
)
where
import qualified Data.Char (toLower)
{-
For handling user input and then doing things.
-}
data PlayerAction
= Go String
| Back
| Inventory
| Description
| Inspect String
| Take String
| Drop String
| Quit
| Invalid
deriving (Show)
parseChoice :: String -> PlayerAction
parseChoice choice
| firstWord == "go" = if secondWord == "back"
then
Back
else
Go firstRest
| firstWord == "inventory" = Inventory
| oneOf firstWord ["description", "room", "where"] = Description
| firstWord == "inspect" = Inspect firstRest
| firstWord == "take" = Take firstRest
| firstWord == "drop" = Drop firstRest
| firstWord == "quit" = Quit
| otherwise = Invalid
where
firstWord = toLower $ head $ words choice
firstRest = unwords $ drop 1 $ words choice
secondWord = toLower $ head $ tail $ words choice
oneOf :: Eq a => a -> [a] -> Bool
oneOf x xs = (not . null) $ filter (x ==) xs
toLower :: String -> String
toLower = map Data.Char.toLower
|
pdarragh/HaskQuest
|
src/HaskQuest/Game/Parser.hs
|
mit
| 1,157
| 0
| 9
| 339
| 355
| 186
| 169
| 34
| 2
|
module T where
import Tests.KesterelBasis
-- |||| demands coPaused and shouldn't be.
e = nothingE |||| pauseE
c = arr (\() -> ()) >>> runE e
test_constructive = isJust (isConstructive c)
|
peteg/ADHOC
|
Tests/08_Kesterel/BugFinders/000_parallel_coPaused.hs
|
gpl-2.0
| 191
| 0
| 9
| 35
| 60
| 33
| 27
| 5
| 1
|
{-# LANGUAGE TypeSynonymInstances, FlexibleContexts, FlexibleInstances, GeneralizedNewtypeDeriving, MultiParamTypeClasses, RecursiveDo, TypeFamilies, OverloadedStrings, RecordWildCards,UndecidableInstances, PackageImports, TemplateHaskell, RankNTypes #-}
module MarXup.Diagram.Tikz where
import Graphics.Diagrams.Core
import Prelude hiding (sum,mapM_,mapM,concatMap)
import Data.List (intercalate)
import MarXup
import MarXup.MultiRef (newLabel)
import MarXup.Tex
import Numeric (showFFloat)
import Data.Foldable
import Data.Monoid
type TexDiagram = Diagram TeX Tex
instance Element (Diagram TeX Tex ()) where
type Target (Diagram TeX Tex ()) = TeX
element d = do
texLn "" -- otherwise beamer does not understand where a tikzpicture ends (?!!)
braces $ do
usepkg "tikz" 100 []
env "tikzpicture" $
runDiagram tikzBackend d
-- diaDebug msg = diaRaw $ "\n%DBG:" ++ msg ++ "\n"
class Tikz a where
toTikz :: a -> String
instance Tikz FrozenPoint where
toTikz (Point x y) = "(" <> showDistance x <> "," <> showDistance y <> ")"
instance Tikz (Frozen Segment) where
toTikz (StraightTo p) = "--" <> toTikz p
toTikz (CurveTo c d p) = "..controls" <> toTikz c <> "and" <> toTikz d <> ".." <> toTikz p
toTikz Cycle = "--cycle"
-- toTikz (VH p) = "|-" <> toTikz p
-- toTikz (HV p) = "-|" <> toTikz p
-- toTikz (Rounded Nothing) = "[sharp corners]"
-- toTikz (Rounded (Just r)) = "[" <> toTikz (constant r) <> "]"
showDistance :: Constant -> String
showDistance x = showFFloat (Just 4) x tikzUnit
where tikzUnit = "pt"
instance Tikz LineTip where
toTikz t = case t of
ToTip -> "to"
StealthTip -> "stealth"
CircleTip -> "o"
NoTip -> ""
LatexTip -> "latex"
ReversedTip x -> toTikz x ++ " reversed"
BracketTip -> "["
ParensTip -> "("
showDashPat :: DashPattern -> String
showDashPat xs = intercalate " " ["on " <> showDistance on <>
" off " <> showDistance off | (on,off) <- xs]
instance Tikz PathOptions where
toTikz PathOptions{..} = "["
<> toTikz _startTip <> "-" <> toTikz _endTip <> ","
<> col "draw" _drawColor
<> col "fill" _fillColor
<> "line width=" <> showDistance _lineWidth <> ","
<> "line cap=" <> (case _lineCap of
RoundCap -> "round"
RectCap -> "rect"
ButtCap -> "butt") <> ","
<> "line join=" <> (case _lineJoin of
RoundJoin -> "round"
BevelJoin -> "bevel"
MiterJoin -> "miter") <> ","
<> "dash pattern=" <> showDashPat _dashPattern
<> (case _decoration of
Decoration [] -> ""
Decoration d -> ",decorate,decoration=" ++ d)
<> "]"
where col attr = maybe "" (\c -> attr <> "=" <> c <> ",")
tikzBackend :: Backend TeX Tex
tikzBackend = Backend {..} where
_tracePath options p = do
tex $ "\\path"
<> toTikz options
<> case p of
EmptyPath -> ""
(Path start segs) -> toTikz start ++ concatMap toTikz segs
tex ";\n"
_traceLabel :: Monad x =>
(location -> (FrozenPoint -> Tex ()) -> x ()) -> -- freezer
(forall a. Tex a -> x a) -> -- embedder
location ->
Tex () -> -- label specification
x BoxSpec
_traceLabel freezer embedder point lab = do
bxId <- embedder $ Tex newLabel
freezer point $ \p' -> do
tex $ "\\node[anchor=north west,inner sep=0] at " ++ toTikz p'
fillBox bxId True $ braces $ lab
tex ";\n"
embedder $ getBoxFromId bxId
type Dia = TexDiagram ()
|
jyp/MarXup
|
MarXup/Diagram/Tikz.hs
|
gpl-2.0
| 3,692
| 0
| 25
| 1,061
| 1,023
| 511
| 512
| 87
| 2
|
{- |
Description : experimental logic for the specification of qualitative constraint calculi
Copyright : (c) Otto-von-Guericke University of Magdeburg
License : GPLv2 or higher, see LICENSE.txt
The "ConstraintCASL" folder contains the skeleton of
an instance of "Logic.Logic" for ConstraintCASL, an experimental logic for the specification of qualitative constraint calculi.
-}
module ConstraintCASL where
|
spechub/Hets
|
ConstraintCASL.hs
|
gpl-2.0
| 422
| 0
| 2
| 67
| 5
| 4
| 1
| 1
| 0
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : Comorphism from Propostional Logic to OWL 2
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : f.mance@jacobs-university.de
Stability : provisional
Portability : non-portable (via Logic.Logic)
-}
module OWL2.Propositional2OWL2 where
import Common.ProofTree
import Logic.Logic
import Logic.Comorphism
import Common.AS_Annotation
import Common.Id
import Common.Result
import OWL2.AS
import OWL2.Keywords
import OWL2.MS
import qualified OWL2.Morphism as OWLMor
import qualified OWL2.ProfilesAndSublogics as OWLSub
import qualified OWL2.Sign as OWLSign
import qualified OWL2.Logic_OWL2 as OWLLogic
import qualified OWL2.Symbols as OWLSym
import qualified Propositional.Logic_Propositional as PLogic
import Propositional.AS_BASIC_Propositional
import qualified Propositional.Sublogic as PSL
import qualified Propositional.Sign as PSign
import qualified Propositional.Morphism as PMor
import qualified Propositional.Symbol as PSymbol
import qualified Data.Set as Set
data Propositional2OWL2 = Propositional2OWL2 deriving Show
instance Language Propositional2OWL2
instance Comorphism Propositional2OWL2
PLogic.Propositional
PSL.PropSL
BASIC_SPEC
FORMULA
SYMB_ITEMS
SYMB_MAP_ITEMS
PSign.Sign
PMor.Morphism
PSymbol.Symbol
PSymbol.Symbol
ProofTree
OWLLogic.OWL2
OWLSub.ProfSub
OntologyDocument
Axiom
OWLSym.SymbItems
OWLSym.SymbMapItems
OWLSign.Sign
OWLMor.OWLMorphism
Entity
OWLSym.RawSymb
ProofTree
where
sourceLogic Propositional2OWL2 = PLogic.Propositional
sourceSublogic Propositional2OWL2 = PSL.top
targetLogic Propositional2OWL2 = OWLLogic.OWL2
mapSublogic Propositional2OWL2 = Just . mapSub -- TODO
map_theory Propositional2OWL2 = mapTheory
-- map_morphism Propositional2OWL2 = mapMorphism
-- map_symbol Propositional2OWL2 _ = mapSymbol
isInclusionComorphism Propositional2OWL2 = True
has_model_expansion Propositional2OWL2 = True
mkOWLDeclaration :: ClassExpression -> Axiom
mkOWLDeclaration ex = PlainAxiom (ClassEntity $ Expression $ setPrefix "owl"
$ mkQName thingS) $ ListFrameBit (Just SubClass) $ ExpressionBit [([], ex)]
mapFormula :: FORMULA -> ClassExpression
mapFormula f = case f of
False_atom _ -> Expression $ mkQName nothingS
True_atom _ -> Expression $ mkQName thingS
Predication p -> Expression $ mkQName $ tokStr p
Negation nf _ -> ObjectComplementOf $ mapFormula nf
Conjunction fl _ -> ObjectJunction IntersectionOf $ map mapFormula fl
Disjunction fl _ -> ObjectJunction UnionOf $ map mapFormula fl
Implication a b _ -> ObjectJunction UnionOf [ObjectComplementOf
$ mapFormula a, mapFormula b]
Equivalence a b _ -> ObjectJunction IntersectionOf $ map mapFormula
[Implication a b nullRange, Implication b a nullRange]
mapPredDecl :: PRED_ITEM -> [Axiom]
mapPredDecl (Pred_item il _) = map (mkOWLDeclaration . Expression
. mkQName . tokStr) il
mapAxiomItems :: Annoted FORMULA -> Axiom
mapAxiomItems = mkOWLDeclaration . mapFormula . item
mapBasicItems :: BASIC_ITEMS -> [Axiom]
mapBasicItems bi = case bi of
Pred_decl p -> mapPredDecl p
Axiom_items al -> map mapAxiomItems al
mapBasicSpec :: BASIC_SPEC -> [Axiom]
mapBasicSpec (Basic_spec il) = concatMap (mapBasicItems . item) il
mapSign :: PSign.Sign -> OWLSign.Sign
mapSign ps = OWLSign.emptySign {OWLSign.concepts = Set.fromList
$ map (mkQName . tokStr . idToSimpleId) $ Set.toList $ PSign.items ps}
mapTheory :: (PSign.Sign, [Named FORMULA])
-> Result (OWLSign.Sign, [Named Axiom])
mapTheory (psig, fl) = return (mapSign psig, map (makeNamed "")
$ map (mkOWLDeclaration . mapFormula . sentence) fl)
mapSub :: PSL.PropSL -> OWLSub.ProfSub
mapSub _ = OWLSub.topS
|
nevrenato/Hets_Fork
|
OWL2/Propositional2OWL2.hs
|
gpl-2.0
| 3,988
| 0
| 13
| 748
| 945
| 500
| 445
| 90
| 8
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ExistentialQuantification #-}
--------------------------------------------------------------------------------
-- |
-- Module : Tct.Method.Combinator
-- Copyright : (c) Martin Avanzini <martin.avanzini@uibk.ac.at>,
-- Georg Moser <georg.moser@uibk.ac.at>,
-- Andreas Schnabl <andreas.schnabl@uibk.ac.at>,
-- License : LGPL (see COPYING)
--
-- Maintainer : Martin Avanzini <martin.avanzini@uibk.ac.at>
-- Stability : unstable
-- Portability : unportable
--
-- This module defines various processor combinators.
--------------------------------------------------------------------------------
module Tct.Method.Combinator
(
-- * Trivial Processors
success
, fail
, empty
, open
-- ** Proof Object
, TrivialProof (..)
, OpenProof (..)
-- ** Processor Definition
, Success
, successProcessor
, Fail
, failProcessor
, EmptyRules
, emptyProcessor
, Open
, openProcessor
-- * Parallel / Sequential Proof Search
, before
, orFaster
, orBetter
, sequentially
, fastest
, best
-- ** Proof Object
, OneOfProof (..)
-- ** Processor Definition
, OneOf (..)
, sequentiallyProcessor
, fastestProcessor
, bestProcessor
-- * Measure Time
, timed
-- ** Proof Object
, TimedProof (..)
-- ** Processor Definition
, Timed (..)
-- * Conditional
, ite
, IteProof (..)
, Ite
, iteProcessor
, iteProgress
, IteProgressProof
, IteProgress
)
where
import Prelude hiding (fail)
import System.CPUTime (getCPUTime)
import Text.PrettyPrint.HughesPJ hiding (parens, empty)
import Control.Concurrent.PFold (pfoldA, Return (..))
import Control.Monad (forM)
import Control.Monad.Trans (liftIO)
import Data.Time.Clock (getCurrentTime, diffUTCTime)
import qualified Termlib.Trs as Trs
import Termlib.Problem (strictComponents)
import Termlib.Utils (paragraph)
import qualified Tct.Processor as P
import Tct.Utils.PPrint
import qualified Tct.Utils.Xml as Xml
import Tct.Utils.Enum (enumeration')
import Tct.Certificate (certified, constant)
import qualified Tct.Processor.Standard as S
import qualified Tct.Processor.Transformations as T
import Tct.Processor.Args
import qualified Tct.Processor.Args as A
import Tct.Processor.Args.Instances
-- failure and success
data TrivialProof = Succeeded
| Failed
| Empty Bool
instance P.ComplexityProof TrivialProof where
answer Succeeded = P.yesAnswer
answer Failed = P.MaybeAnswer
answer (Empty True) = P.CertAnswer $ certified (constant,constant)
answer (Empty False) = P.MaybeAnswer
pprintProof Succeeded _ = text "Success"
pprintProof Failed _ = text "Fail"
pprintProof (Empty True) _ = text "Empty rules are trivially bounded"
pprintProof (Empty False) _ = text "Empty strict component of the problem is NOT empty."
toXml Succeeded = Xml.elt "success" [] []
toXml Failed = Xml.elt "failed" [] []
toXml (Empty True) = Xml.elt "empty" [] []
toXml (Empty False) = Xml.elt "nonempty" [] []
-- instance P.Verifiable TrivialProof where
-- verify _ _ = P.verifyOK
data Fail = Fail deriving (Show)
instance S.Processor Fail where
type ArgumentsOf Fail = Unit
type ProofOf Fail = TrivialProof
name Fail = "fail"
instanceName _ = "fail"
solve _ _ = return Failed
description Fail = ["Processor 'fail' always returns the answer 'No'."]
arguments Fail = Unit
data Success = Success deriving (Show)
instance S.Processor Success where
type ArgumentsOf Success = Unit
type ProofOf Success = TrivialProof
name Success = "success"
instanceName _ = "success"
solve _ _ = return Succeeded
description Success = ["Processor 'success' always returns the answer 'Yes(?,?)'."]
arguments Success = Unit
data EmptyRules = EmptyRules deriving (Show)
instance S.Processor EmptyRules where
type ArgumentsOf EmptyRules = Unit
type ProofOf EmptyRules = TrivialProof
name EmptyRules = "empty"
solve _ prob | Trs.isEmpty $ strictComponents prob = return $ Empty True
| otherwise = return $ Empty False
description EmptyRules = ["Processor 'empty' returns 'Yes(O(1),O(1))' if the strict component of the problem is empty."]
arguments EmptyRules = Unit
failProcessor :: S.StdProcessor Fail
failProcessor = S.StdProcessor Fail
successProcessor :: S.StdProcessor Success
successProcessor = S.StdProcessor Success
emptyProcessor :: S.StdProcessor EmptyRules
emptyProcessor = S.StdProcessor EmptyRules
-- | This processor always returns the answer @No@.
fail :: S.ProcessorInstance Fail
fail = S.StdProcessor Fail `S.withArgs` ()
-- | This processor always returns the answer @Yes(?,?)@.
success :: S.ProcessorInstance Success
success = S.StdProcessor Success `S.withArgs` ()
-- | This processor returns the answer @Yes(O(1),(1))@ if the strict component is empty.
empty :: S.ProcessorInstance EmptyRules
empty = S.StdProcessor EmptyRules `S.withArgs` ()
-- open
data OpenProof = OpenProof
instance P.ComplexityProof OpenProof
where answer _ = P.MaybeAnswer
pprintProof _ _ = paragraph "The problem remains open."
data Open = Open
instance S.Processor Open where
type ProofOf Open = OpenProof
type ArgumentsOf Open = A.Unit
name _ = "Open"
arguments _ = A.Unit
solve _ _ = return OpenProof
openProcessor :: S.StdProcessor Open
openProcessor = S.StdProcessor Open
-- | This processor always returns the answer @Maybe@.
open :: S.ProcessorInstance Open
open = openProcessor `S.withArgs` ()
-- if-then-else
data Ite g t e = Ite
data IteProof g t e = IteProof { guardProof :: P.Proof g
, branchProof :: Either (P.Proof t) (P.Proof e) }
instance ( P.Processor g
, P.Processor t
, P.Processor e
, P.ComplexityProof (P.ProofOf g)
, P.ComplexityProof (P.ProofOf t)
, P.ComplexityProof (P.ProofOf e)) => P.ComplexityProof (IteProof g t e) where
answer p = either (P.answer . P.result) (P.answer . P.result) (branchProof p)
pprintProof p mde@P.StrategyOutput =
paragraph ("a) We first check the conditional [" ++ (if suc then "Success" else "Fail") ++ "]:")
$+$ (indent $ P.pprintProof (P.result $ guardProof p) mde)
$+$ text ""
$+$ paragraph ("b) We continue with the " ++ (if suc then "then" else "else") ++ "-branch:")
$+$ case branchProof p of
Left bp -> P.pprintProof (P.result bp) mde
Right bp -> P.pprintProof (P.result bp) mde
where suc = P.succeeded $ P.result $ guardProof p
pprintProof p mde =
case branchProof p of
Left pb -> P.pprintProof (P.result pb) mde
Right pb -> P.pprintProof (P.result pb) mde
toXml p =
Xml.elt "ite" []
[ Xml.elt "guardProof" [] [P.toXml $ guardProof p]
, Xml.elt "guardSuccess" [] [Xml.text $ show $ P.succeeded $ guardProof p]
, Xml.elt "subProof" [] [sp]]
where sp = either P.toXml P.toXml $ branchProof p
instance ( P.Processor g
, P.Processor t
, P.Processor e)
=> S.Processor (Ite g t e) where
type ProofOf (Ite g t e) = IteProof g t e
type ArgumentsOf (Ite g t e) = Arg (Proc g) :+: Arg (Proc t) :+: Arg (Proc e)
name Ite = "ite"
instanceName inst = "Branch on whether processor '" ++ P.instanceName g ++ "' succeeds"
where g :+: _ :+: _ = S.processorArgs inst
description _ = ["This processor implements conditional branching"]
arguments _ = arg { A.name = "guard"
, A.description = "The guard processor. It succeeds if it returns 'Yes(*,*)'." }
:+:
arg { A.name = "then"
, A.description = "The processor that is applied if guard succeeds." }
:+:
arg { A.name = "else"
, A.description = "The processor that is applied if guard fails." }
solve inst prob =
do gproof <- P.apply g prob
if P.succeeded gproof
then do bproof <- P.apply t prob
return $ IteProof { guardProof = gproof
, branchProof = Left bproof }
else do bproof <- P.apply e prob
return $ IteProof { guardProof = gproof
, branchProof = Right bproof }
where g :+: t :+: e = S.processorArgs inst
-- | @ite g t e@ applies processor @t@ if processor @g@ succeeds, otherwise processor @e@ is applied.
ite :: (P.Processor g, P.Processor t, P.Processor e) => P.InstanceOf g -> P.InstanceOf t -> P.InstanceOf e -> S.ProcessorInstance (Ite g t e)
ite g t e = S.StdProcessor Ite `S.withArgs` (g :+: t :+: e)
iteProcessor :: S.StdProcessor (Ite P.AnyProcessor P.AnyProcessor P.AnyProcessor)
iteProcessor = S.StdProcessor Ite
-- branching on transformations
data IteProgress g t e = IteProgress (T.TheTransformer g)
data IteProgressProof g t e = IteProgressTransformed (T.Proof T.SomeTransformation P.SomeProcessor)
| IteProgressUntransformed (T.TheTransformer g) (T.ProofOf g) (P.Proof e)
instance ( T.Transformer g
, P.Processor t
, P.Processor e) => P.ComplexityProof (IteProgressProof g t e) where
answer (IteProgressTransformed p) = P.answer p
answer (IteProgressUntransformed _ _ p) = P.answer p
pprintProof (IteProgressTransformed p) mde = P.pprintProof p mde
pprintProof (IteProgressUntransformed tr tp p) mde =
case mde of
P.StrategyOutput ->
paragraph "Transformation of the input failed:"
$+$ text ""
$+$ T.pprintTProof tr (P.inputProblem p) tp mde
$+$ text ""
$+$ paragraph ("We continue with processor '" ++ P.instanceName (P.appliedProcessor p) ++ "'.")
$+$ text ""
$+$ indent (P.pprintProof (P.result p) mde )
_ -> P.pprintProof (P.result p) mde
toXml (IteProgressTransformed p) = P.toXml p
toXml (IteProgressUntransformed tr tp p) =
Xml.elt "iteProgress" []
[ Xml.elt "failedTransformation" []
[ Xml.elt n [] [T.transformerToXml tr
, Xml.elt "transformationProof" [] trxml]]
, Xml.elt "subProof" [] [P.toXml p]]
where (n,trxml) = T.tproofToXml tr (P.inputProblem p) tp
instance ( T.Transformer g
, P.Processor t
, P.Processor e) => S.Processor (IteProgress g t e) where
type ProofOf (IteProgress g t e) = IteProgressProof g t e
type ArgumentsOf (IteProgress g t e) = Arg (Proc t) :+: Arg (Proc e) :+: Arg Bool
name _ = "iteProgress"
arguments _ = arg { A.name = "then"
, A.description = "The processor that is applied if the transformation succeeds." }
:+:
arg { A.name = "else"
, A.description = "The processor that is applied if the transformation fails." }
:+:
opt { A.name = "parallel"
, A.description = "Decides whether the given subprocessor should be applied in parallel."
, A.defaultValue = False }
solve inst prob =
do res <- T.transform g prob
case res of
T.NoProgress tp ->
do sp <- P.apply e prob
return $ IteProgressUntransformed g tp sp
T.Progress _ ps ->
do esubproofs <- P.evalList par (P.succeeded . snd)
[P.apply t prob_i >>= \ r -> return (i,r)
| (i,prob_i) <- ps]
let subproofs = case esubproofs of {Left (fld,sps) -> fld:sps; Right sps -> sps}
proof = T.normalisedProof $
T.Proof { T.transformationResult = res
, T.inputProblem = prob
, T.appliedSubprocessor = t
, T.appliedTransformer = g
, T.subProofs = subproofs}
return $ IteProgressTransformed proof
where IteProgress g = S.processor inst
(t :+: e :+: par) = S.processorArgs inst
iteProgress :: (T.Transformer g, P.Processor t, P.Processor e) =>
T.TheTransformer g
-> P.InstanceOf t
-> P.InstanceOf e
-> S.ProcessorInstance (IteProgress g t e)
iteProgress g t e = S.StdProcessor (IteProgress g) `S.withArgs` (t :+: e :+: False)
-- additing time information
data Timed p = Timed
data TimedProof p =
TimedProof { tpCpu :: Double
, tpWall :: Double
, tpProof :: (P.ProofOf p) }
instance (P.Processor p) => P.ComplexityProof (TimedProof p) where
answer = P.answer . tpProof
pprintProof p mde =
P.pprintProof (tpProof p) mde
$+$ text ""
$+$ ( text "Wall-time:" <+> text (show (tpWall p)) <> text "s"
$+$ text "CPU-time:" <+> text (show (tpCpu p)) <> text "s")
toXml = P.toXml . tpProof
instance ( P.Processor p) => S.Processor (Timed p) where
type ProofOf (Timed p) = TimedProof p
type ArgumentsOf (Timed p) = Arg (Proc p)
name _ = "timed"
arguments _ = arg { A.name = "sub-processor"
, A.description = "The processor to measure execution time." }
solve inst prob = do
startCpuTime <- liftIO $ getCPUTime
startWallTime <- liftIO $ getCurrentTime
res <- P.solve_ sub prob
endCpuTime <- liftIO $ getCPUTime
endWallTime <- liftIO $ getCurrentTime
let cputime = fromInteger (endCpuTime - startCpuTime) / fromInteger ((10 :: Integer)^(12 :: Integer))
walltime = fromRational $ toRational $ diffUTCTime endWallTime startWallTime
return $ TimedProof { tpCpu = cputime
, tpWall = walltime
, tpProof = res }
where sub = S.processorArgs inst
timed :: P.Processor p => P.InstanceOf p -> S.ProcessorInstance (Timed p)
timed p = S.StdProcessor Timed `S.withArgs` p
-- parallel combinators
data OneOf p = Best | Fastest | Sequentially deriving (Eq, Show)
data OneOfProof p = OneOfFailed (OneOf p) [P.Proof p]
| OneOfSucceeded (OneOf p) (P.Proof p)
instance (P.Processor p) => P.ComplexityProof (OneOfProof p) where
pprintProof proof mde =
case proof of
(OneOfFailed _ failures) -> text "None of the processors succeeded."
$+$ text ""
$+$ detailsFailed (enumeration' failures)
(OneOfSucceeded o p)
| mde == P.StrategyOutput -> case o of
Sequentially -> paragraph (procName p ++ " succeeded:")
Fastest -> paragraph (procName p ++ " proved the goal fastest:")
Best -> paragraph (procName p ++ " proved the best result:")
$+$ text ""
$+$ P.pprintProof (P.result p) mde
| otherwise -> P.pprintProof (P.result p) mde
where procName p = "'" ++ P.instanceName (P.appliedProcessor p) ++ "'"
detailsFailed ps = block "Details of failed attempt(s)"
$ [ (a, paragraph (procName p ++ " failed due to the following reason:")
$+$ text ""
$+$ (P.pprintProof (P.result p) mde))
| (a,p) <- ps]
toXml (OneOfFailed p failures) =
Xml.elt "oneOf" []
[ Xml.elt "combinator" [] [ Xml.text (show p) ]
, Xml.elt "failures" [] [ P.toXml p_i | p_i <- failures ]]
toXml (OneOfSucceeded p p_i) =
Xml.elt "oneOf" []
[ Xml.elt "combinator" [] [ Xml.text (show p) ]
, Xml.elt "subProof" [] [ P.toXml p_i ]]
answer (OneOfFailed _ _) = P.MaybeAnswer
answer (OneOfSucceeded _ p) = P.answer p
instance (P.Processor p) => S.Processor (OneOf p) where
type ArgumentsOf (OneOf p) = Arg [Proc p]
type ProofOf (OneOf p) = OneOfProof p
name Fastest = "fastest"
name Sequentially = "sequentially"
name Best = "best"
instanceName inst = c (S.processor inst) -- ++ " of " ++ (concat $ intersperse ", " [ "'" ++ P.instanceName p ++ "'" | p <- S.processorArgs inst])
where c Best = "Best"
c Fastest = "Fastest"
c Sequentially = "Sequentially"
description Best = ["Processor 'Best' applies the given list of processors in parallel and returns the proof admitting the lowest complexity certificate."]
description Fastest = ["Processor 'Fastest' applies the given list of processors in parallel and returns the first successful proof."]
description Sequentially = ["Processor 'Sequentially' applies the given list of processors sequentially and returns the first successful proof."]
arguments _ = arg { A.name = "subprocessors"
, A.description = "a list of subprocessors"}
solve theproc prob | proc == Sequentially = solveSeq (S.processorArgs theproc) []
| proc == Best = solveBest (S.processorArgs theproc)
| otherwise = solveFast (S.processorArgs theproc)
where proc = S.processor theproc
mkActions ps = forM ps $ \ p -> P.mkIO $ P.apply p prob
ofResult o (Left faileds) = OneOfFailed o faileds
ofResult o (Right proof) = OneOfSucceeded o proof
solveSeq [] failures = return $ OneOfFailed Sequentially (reverse failures)
solveSeq (p:ps) failures = do r <- P.apply p prob
if P.succeeded r
then return $ OneOfSucceeded Sequentially r
else solveSeq ps (r:failures)
solveBest = solvePar betterThan final
where p1 `betterThan` p2 = P.certificate p1 < P.certificate p2
final = const False
solveFast= solvePar betterThan final
where _ `betterThan` _ = True
final = const True
solvePar better final ps = do actions <- mkActions ps
let sel (Left ps') proof | P.succeeded proof = ret proof
| otherwise = Continue $ Left (proof : ps')
sel (Right p1) p2 | p1 `better` p2 = ret p1
| otherwise = ret p2
ret proof | final proof = Stop $ Right proof
| otherwise = Continue $ Right proof
r <- liftIO $ pfoldA sel (Left []) actions
return $ ofResult proc r
bestProcessor :: S.StdProcessor (OneOf P.AnyProcessor)
bestProcessor = S.StdProcessor Best
fastestProcessor :: S.StdProcessor (OneOf P.AnyProcessor)
fastestProcessor = S.StdProcessor Fastest
sequentiallyProcessor :: S.StdProcessor (OneOf P.AnyProcessor)
sequentiallyProcessor = S.StdProcessor Sequentially
-- | The processor @p1 `orFaster` p2@ applies processor @p1@ and @p2@ in parallel. Returns the
-- proof of that processor that finishes fastest.
orFaster :: (P.Processor a, P.Processor b) =>
P.InstanceOf a -> P.InstanceOf b -> S.ProcessorInstance (OneOf P.SomeProcessor)
a `orFaster` b = fastest [P.someInstance a, P.someInstance b]
-- | The processor @p1 `orBetter` p2@ applies processor @p1@ and @p2@ in parallel. Returns the
-- proof that gives the better certificate.
orBetter :: (P.Processor a, P.Processor b) =>
P.InstanceOf a -> P.InstanceOf b -> S.ProcessorInstance (OneOf P.SomeProcessor)
a `orBetter` b = best [P.someInstance a, P.someInstance b]
-- | The processor @p1 `before` p2@ first applies processor @p1@, and if that fails processor @p2@.
before :: (P.Processor a, P.Processor b) =>
P.InstanceOf a -> P.InstanceOf b -> S.ProcessorInstance (OneOf P.SomeProcessor)
a `before` b = sequentially [P.someInstance a, P.someInstance b]
-- | List version of 'orBetter'.
-- Note that the type of all given processors need to agree. To mix processors
-- of different type, use 'some' on the individual arguments.
best :: (P.Processor p) => [P.InstanceOf p] -> S.ProcessorInstance (OneOf p)
best ps = S.StdProcessor Best `S.withArgs` ps
-- | List version of 'orFaster'.
-- Note that the type of all given processors need to agree. To mix processors
-- of different type, use 'some' on the individual arguments.
fastest :: (P.Processor p) => [P.InstanceOf p] -> S.ProcessorInstance (OneOf p)
fastest ps = S.StdProcessor Fastest `S.withArgs` ps
-- | List version of 'before'.
sequentially :: (P.Processor p) => [P.InstanceOf p] -> S.ProcessorInstance (OneOf p)
sequentially ps = S.StdProcessor Sequentially `S.withArgs` ps
|
mzini/TcT
|
source/Tct/Method/Combinator.hs
|
gpl-3.0
| 22,312
| 0
| 20
| 7,267
| 5,863
| 3,033
| 2,830
| 393
| 1
|
{-| Module : PhaseParser
License : GPL
Maintainer : helium@cs.uu.nl
Stability : experimental
Portability : portable
-}
module Helium.Main.PhaseParser(phaseParser) where
import Helium.Main.CompileUtils
import Helium.Parser.LexerToken(Token)
import Helium.Parser.Parser (module_)
import Helium.Parser.ParseLibrary(runHParser)
import Text.ParserCombinators.Parsec.Error (ParseError)
import Helium.Syntax.UHA_Syntax(Name(..), MaybeName(..))
import Helium.Syntax.UHA_Range(noRange)
phaseParser ::
String -> [Token] -> [Option] ->
Phase ParseError Module
phaseParser fullName tokens options = do
enterNewPhase "Parsing" options
let (_, baseName, _) = splitFilePath fullName
case runHParser module_ fullName tokens True of
Left parseError ->
return (Left [parseError])
Right m ->
do let fixedm = fixModuleName m baseName
return (Right fixedm)
-- | Make sure the module has a name. If there is no name (module without
-- header) insert the base name of the file name as name.
fixModuleName :: Module -> String -> Module
fixModuleName original@(Module_Module r name es b) baseName =
case name of
MaybeName_Nothing ->
Module_Module r (MaybeName_Just (Name_Identifier noRange [] [] baseName)) es b -- !!!Name
_ -> original
|
Helium4Haskell/helium
|
src/Helium/Main/PhaseParser.hs
|
gpl-3.0
| 1,359
| 0
| 15
| 300
| 327
| 177
| 150
| 26
| 2
|
-- import Hack.Contrib.Middleware.Debug
-- import Hack.Contrib.Request (inputs)
-- import Hack.Contrib.Middleware.Inspect
import Hack.Contrib.Middleware.SimpleAccessLogger
-- import Hack.Contrib.Middleware.Lambda
-- import Hack.Contrib.Middleware.Lucky
-- import Hack.Contrib.Middleware.ShowStatus
import Bamboo
import Bamboo.Theme.MiniHTML5
import Hack
import Hack.Contrib.Middleware.BounceFavicon
import Hack.Contrib.Middleware.ContentLength
import Hack.Contrib.Middleware.ContentType
-- import Hack.Contrib.Middleware.ETag
import Hack.Contrib.Middleware.ShowExceptions
import Hack.Contrib.Middleware.Static
import Hack.Contrib.Middleware.URLMap
import Hack.Contrib.Middleware.UTF8Body
-- import Hack.Contrib.Middleware.TokyoCabinetCache
-- import Hack.Middleware.Gzip
import qualified Data.ByteString.Char8 as B
import Hack.Contrib.Request (url)
import Hack.Contrib.Utils
import Hack.Handler.Happstack
import MPS.Env hiding (concat)
import Prelude ()
-- import qualified Hack.Contrib.Middleware.Head as H
import Bamboo.Plugin.Photo
import Bamboo.Plugin.Highlight
import Data.Default
import Data.ByteString.Lazy.Char8 (pack, concat)
import Control.Concurrent.MVar
default_content_type :: String
default_content_type = "text/plain; charset=UTF-8"
stack :: MVar a -> [Middleware]
stack mv =
[ dummy_middleware
-- filter
, bounce_favicon
-- setup
--, parse_multipart
-- log
, simple_access_logger Nothing
-- debug
-- , inspect
-- , debug (\e r -> e.print)
-- completeness
-- debuging
-- , show_status
-- optimization
-- for fun
-- , lucky
-- , lambda
, show_exceptions Nothing
, content_length
, content_type default_content_type
-- , etag
, url_map [("", bamboo)]
]
where
bamboo = use
[ bamboo_serve
-- , tokyocabinet_cache mv "cache.tcb" (url > unescape_uri > B.pack > Just > return)
, no_rss
, bamboo_with_theme theme
] dummy_app
bamboo_serve = static (Just "db/public")
["/theme", "/images", "/plugin", "/favicon.ico", "/media"]
no_rss app' = \env -> do
if env.path_info.ends_with "rss.xml"
then app' env
else use [html5, utf8_body, photo, highlight] app' env
html5 :: Middleware
html5 app = \env -> do
r <- app env
let b = r.body
return r {body = concat [doc_type, b]}
where
doc_type = pack "<!doctype html>\n\n"
-- test_app = \env -> return $ def .set_body (env.inputs.show)
webapp :: MVar a -> Application
webapp mv = use (stack mv) dummy_app
main :: IO ()
main = do
mv <- newMVar ()
runWithConfig def {port = 3000} (webapp mv)
-- run 3000 (webapp mv)
|
nfjinjing/bamboo
|
src/Main.hs
|
gpl-3.0
| 2,647
| 0
| 13
| 486
| 519
| 310
| 209
| 56
| 2
|
-- UUAGC 0.9.52.1 (Helium/StaticAnalysis/Directives/TS_Apply.ag)
module Helium.StaticAnalysis.Directives.TS_Apply where
import Top.Types
import Helium.Syntax.UHA_Syntax
import Helium.StaticAnalysis.Miscellaneous.TypeConstraints
import Helium.StaticAnalysis.Miscellaneous.ConstraintInfo
import Data.List
import Helium.Utils.Utils (internalError)
import Helium.StaticAnalysis.Messages.Messages
import Helium.StaticAnalysis.Messages.TypeErrors
import Helium.ModuleSystem.ImportEnvironment
import Helium.StaticAnalysis.Inferencers.BindingGroupAnalysis (Assumptions, combine, noAssumptions)
import Helium.Parser.OperatorTable (OperatorTable)
import Helium.Parser.Parser (exp_)
import Helium.Parser.Lexer (strategiesLexer)
import Helium.Parser.ParseLibrary (runHParser)
import qualified Helium.Parser.ResolveOperators as ResolveOperators
import qualified Data.Map as M
import Helium.StaticAnalysis.Directives.TS_Attributes
import Helium.StaticAnalysis.Directives.TS_CoreSyntax
import Top.Ordering.Tree
type Core_TypingStrategies = [Core_TypingStrategy]
applyTypingStrategy :: Core_TypingStrategy -> MetaVariableInfo -> MetaVariableTable -> Int
-> (Assumptions, ConstraintSet, IO (), Int)
applyTypingStrategy strategy infoTuple metaVar unique =
let res = wrap_Core_TypingStrategy (sem_Core_TypingStrategy strategy)
Inh_Core_TypingStrategy {
infoTuple_Inh_Core_TypingStrategy = infoTuple,
metaVariableTable_Inh_Core_TypingStrategy = metaVar,
unique_Inh_Core_TypingStrategy = unique }
in (assumptions_Syn_Core_TypingStrategy res, constraintSet_Syn_Core_TypingStrategy res, debugIO_Syn_Core_TypingStrategy res, unique_Syn_Core_TypingStrategy res)
matchInformation :: ImportEnvironment -> Core_TypingStrategy -> [(Expression, [String])]
matchInformation importEnvironment typingStrategy =
case typingStrategy of
TypingStrategy _ (TypeRule premises conclusion) _ ->
let Judgement exprstring _ = conclusion
expression = expressionParser (operatorTable importEnvironment) exprstring
metas = [ s | Judgement s _ <- premises ]
in [(expression, metas)]
_ -> []
expressionParser :: OperatorTable -> String -> Expression
expressionParser theOperatorTable string =
case strategiesLexer [] "TS_Apply" string of
Left _ -> intErr
Right (tokens, _) ->
case runHParser exp_ "TS_Apply" tokens True {- wait for EOF -} of
Left _ -> intErr
Right expression ->
ResolveOperators.expression theOperatorTable expression
where
intErr = internalError "TS_Apply.ag" "n/a" ("unparsable expression: "++show string)
exactlyOnce :: Eq a => [a] -> [a]
exactlyOnce [] = []
exactlyOnce (x:xs) | x `elem` xs = exactlyOnce . filter (/= x) $ xs
| otherwise = x : exactlyOnce xs
-- Core_Judgement ----------------------------------------------
-- cata
sem_Core_Judgement :: Core_Judgement ->
T_Core_Judgement
sem_Core_Judgement (Judgement _expression _type) =
(sem_Core_Judgement_Judgement _expression _type)
-- semantic domain
type T_Core_Judgement = MetaVariableInfo ->
MetaVariableTable ->
MapSubstitution ->
( ([Int]),([(String, Tp)]))
data Inh_Core_Judgement = Inh_Core_Judgement {infoTuple_Inh_Core_Judgement :: MetaVariableInfo,metaVariableTable_Inh_Core_Judgement :: MetaVariableTable,substitution_Inh_Core_Judgement :: MapSubstitution}
data Syn_Core_Judgement = Syn_Core_Judgement {ftv_Syn_Core_Judgement :: ([Int]),judgements_Syn_Core_Judgement :: ([(String, Tp)])}
wrap_Core_Judgement :: T_Core_Judgement ->
Inh_Core_Judgement ->
Syn_Core_Judgement
wrap_Core_Judgement sem (Inh_Core_Judgement _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution) =
(let ( _lhsOftv,_lhsOjudgements) = sem _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution
in (Syn_Core_Judgement _lhsOftv _lhsOjudgements))
sem_Core_Judgement_Judgement :: String ->
Tp ->
T_Core_Judgement
sem_Core_Judgement_Judgement expression_ type_ =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOjudgements :: ([(String, Tp)])
_lhsOftv =
ftv type_
_lhsOjudgements =
[(expression_, type_)]
in ( _lhsOftv,_lhsOjudgements)))
-- Core_Judgements ---------------------------------------------
-- cata
sem_Core_Judgements :: Core_Judgements ->
T_Core_Judgements
sem_Core_Judgements list =
(Prelude.foldr sem_Core_Judgements_Cons sem_Core_Judgements_Nil (Prelude.map sem_Core_Judgement list))
-- semantic domain
type T_Core_Judgements = MetaVariableInfo ->
MetaVariableTable ->
MapSubstitution ->
( ([Int]),([(String, Tp)]))
data Inh_Core_Judgements = Inh_Core_Judgements {infoTuple_Inh_Core_Judgements :: MetaVariableInfo,metaVariableTable_Inh_Core_Judgements :: MetaVariableTable,substitution_Inh_Core_Judgements :: MapSubstitution}
data Syn_Core_Judgements = Syn_Core_Judgements {ftv_Syn_Core_Judgements :: ([Int]),judgements_Syn_Core_Judgements :: ([(String, Tp)])}
wrap_Core_Judgements :: T_Core_Judgements ->
Inh_Core_Judgements ->
Syn_Core_Judgements
wrap_Core_Judgements sem (Inh_Core_Judgements _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution) =
(let ( _lhsOftv,_lhsOjudgements) = sem _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution
in (Syn_Core_Judgements _lhsOftv _lhsOjudgements))
sem_Core_Judgements_Cons :: T_Core_Judgement ->
T_Core_Judgements ->
T_Core_Judgements
sem_Core_Judgements_Cons hd_ tl_ =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOjudgements :: ([(String, Tp)])
_hdOinfoTuple :: MetaVariableInfo
_hdOmetaVariableTable :: MetaVariableTable
_hdOsubstitution :: MapSubstitution
_tlOinfoTuple :: MetaVariableInfo
_tlOmetaVariableTable :: MetaVariableTable
_tlOsubstitution :: MapSubstitution
_hdIftv :: ([Int])
_hdIjudgements :: ([(String, Tp)])
_tlIftv :: ([Int])
_tlIjudgements :: ([(String, Tp)])
_lhsOftv =
_hdIftv `union` _tlIftv
_lhsOjudgements =
_hdIjudgements ++ _tlIjudgements
_hdOinfoTuple =
_lhsIinfoTuple
_hdOmetaVariableTable =
_lhsImetaVariableTable
_hdOsubstitution =
_lhsIsubstitution
_tlOinfoTuple =
_lhsIinfoTuple
_tlOmetaVariableTable =
_lhsImetaVariableTable
_tlOsubstitution =
_lhsIsubstitution
( _hdIftv,_hdIjudgements) =
hd_ _hdOinfoTuple _hdOmetaVariableTable _hdOsubstitution
( _tlIftv,_tlIjudgements) =
tl_ _tlOinfoTuple _tlOmetaVariableTable _tlOsubstitution
in ( _lhsOftv,_lhsOjudgements)))
sem_Core_Judgements_Nil :: T_Core_Judgements
sem_Core_Judgements_Nil =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOjudgements :: ([(String, Tp)])
_lhsOftv =
[]
_lhsOjudgements =
[]
in ( _lhsOftv,_lhsOjudgements)))
-- Core_TypeRule -----------------------------------------------
-- cata
sem_Core_TypeRule :: Core_TypeRule ->
T_Core_TypeRule
sem_Core_TypeRule (TypeRule _premises _conclusion) =
(sem_Core_TypeRule_TypeRule (sem_Core_Judgements _premises) (sem_Core_Judgement _conclusion))
-- semantic domain
type T_Core_TypeRule = MetaVariableInfo ->
MetaVariableTable ->
MapSubstitution ->
( (TypeConstraints ConstraintInfo),([Int]),([(String, Tp)]))
data Inh_Core_TypeRule = Inh_Core_TypeRule {infoTuple_Inh_Core_TypeRule :: MetaVariableInfo,metaVariableTable_Inh_Core_TypeRule :: MetaVariableTable,substitution_Inh_Core_TypeRule :: MapSubstitution}
data Syn_Core_TypeRule = Syn_Core_TypeRule {constraints_Syn_Core_TypeRule :: (TypeConstraints ConstraintInfo),ftv_Syn_Core_TypeRule :: ([Int]),judgements_Syn_Core_TypeRule :: ([(String, Tp)])}
wrap_Core_TypeRule :: T_Core_TypeRule ->
Inh_Core_TypeRule ->
Syn_Core_TypeRule
wrap_Core_TypeRule sem (Inh_Core_TypeRule _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution) =
(let ( _lhsOconstraints,_lhsOftv,_lhsOjudgements) = sem _lhsIinfoTuple _lhsImetaVariableTable _lhsIsubstitution
in (Syn_Core_TypeRule _lhsOconstraints _lhsOftv _lhsOjudgements))
sem_Core_TypeRule_TypeRule :: T_Core_Judgements ->
T_Core_Judgement ->
T_Core_TypeRule
sem_Core_TypeRule_TypeRule premises_ conclusion_ =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIsubstitution ->
(let _lhsOconstraints :: (TypeConstraints ConstraintInfo)
_lhsOftv :: ([Int])
_lhsOjudgements :: ([(String, Tp)])
_premisesOinfoTuple :: MetaVariableInfo
_premisesOmetaVariableTable :: MetaVariableTable
_premisesOsubstitution :: MapSubstitution
_conclusionOinfoTuple :: MetaVariableInfo
_conclusionOmetaVariableTable :: MetaVariableTable
_conclusionOsubstitution :: MapSubstitution
_premisesIftv :: ([Int])
_premisesIjudgements :: ([(String, Tp)])
_conclusionIftv :: ([Int])
_conclusionIjudgements :: ([(String, Tp)])
_lhsOconstraints =
let conclusionSource = self (getLocalInfo _lhsIinfoTuple)
conclusionType = getType _lhsIinfoTuple
in [ (stp1 .==. conclusionType)
(addProperty FolkloreConstraint $ defaultConstraintInfo (conclusionSource, Nothing))
| (_, tp1) <- _conclusionIjudgements
, let stp1 = _lhsIsubstitution |-> tp1
, stp1 /= conclusionType
] ++
[ (getType mvinfo .==. stp1)
(defaultConstraintInfo (conclusionSource, Just (self (getLocalInfo mvinfo))))
| (s1, tp1) <- _premisesIjudgements
, (s2, mvinfo) <- _lhsImetaVariableTable
, s1 == s2
, let stp1 = _lhsIsubstitution |-> tp1
, getType mvinfo /= stp1
]
_lhsOftv =
_premisesIftv `union` _conclusionIftv
_lhsOjudgements =
_premisesIjudgements ++ _conclusionIjudgements
_premisesOinfoTuple =
_lhsIinfoTuple
_premisesOmetaVariableTable =
_lhsImetaVariableTable
_premisesOsubstitution =
_lhsIsubstitution
_conclusionOinfoTuple =
_lhsIinfoTuple
_conclusionOmetaVariableTable =
_lhsImetaVariableTable
_conclusionOsubstitution =
_lhsIsubstitution
( _premisesIftv,_premisesIjudgements) =
premises_ _premisesOinfoTuple _premisesOmetaVariableTable _premisesOsubstitution
( _conclusionIftv,_conclusionIjudgements) =
conclusion_ _conclusionOinfoTuple _conclusionOmetaVariableTable _conclusionOsubstitution
in ( _lhsOconstraints,_lhsOftv,_lhsOjudgements)))
-- Core_TypingStrategy -----------------------------------------
-- cata
sem_Core_TypingStrategy :: Core_TypingStrategy ->
T_Core_TypingStrategy
sem_Core_TypingStrategy (Siblings _functions) =
(sem_Core_TypingStrategy_Siblings _functions)
sem_Core_TypingStrategy (TypingStrategy _typeEnv _typerule _statements) =
(sem_Core_TypingStrategy_TypingStrategy _typeEnv (sem_Core_TypeRule _typerule) (sem_Core_UserStatements _statements))
-- semantic domain
type T_Core_TypingStrategy = MetaVariableInfo ->
MetaVariableTable ->
Int ->
( Assumptions,ConstraintSet,(IO ()),Int)
data Inh_Core_TypingStrategy = Inh_Core_TypingStrategy {infoTuple_Inh_Core_TypingStrategy :: MetaVariableInfo,metaVariableTable_Inh_Core_TypingStrategy :: MetaVariableTable,unique_Inh_Core_TypingStrategy :: Int}
data Syn_Core_TypingStrategy = Syn_Core_TypingStrategy {assumptions_Syn_Core_TypingStrategy :: Assumptions,constraintSet_Syn_Core_TypingStrategy :: ConstraintSet,debugIO_Syn_Core_TypingStrategy :: (IO ()),unique_Syn_Core_TypingStrategy :: Int}
wrap_Core_TypingStrategy :: T_Core_TypingStrategy ->
Inh_Core_TypingStrategy ->
Syn_Core_TypingStrategy
wrap_Core_TypingStrategy sem (Inh_Core_TypingStrategy _lhsIinfoTuple _lhsImetaVariableTable _lhsIunique) =
(let ( _lhsOassumptions,_lhsOconstraintSet,_lhsOdebugIO,_lhsOunique) = sem _lhsIinfoTuple _lhsImetaVariableTable _lhsIunique
in (Syn_Core_TypingStrategy _lhsOassumptions _lhsOconstraintSet _lhsOdebugIO _lhsOunique))
sem_Core_TypingStrategy_Siblings :: ([String]) ->
T_Core_TypingStrategy
sem_Core_TypingStrategy_Siblings functions_ =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIunique ->
(let _lhsOassumptions :: Assumptions
_lhsOconstraintSet :: ConstraintSet
_lhsOdebugIO :: (IO ())
_lhsOunique :: Int
_lhsOassumptions =
noAssumptions
_lhsOconstraintSet =
emptyTree
_lhsOdebugIO =
return ()
_lhsOunique =
_lhsIunique
in ( _lhsOassumptions,_lhsOconstraintSet,_lhsOdebugIO,_lhsOunique)))
sem_Core_TypingStrategy_TypingStrategy :: ([(String, Tp)]) ->
T_Core_TypeRule ->
T_Core_UserStatements ->
T_Core_TypingStrategy
sem_Core_TypingStrategy_TypingStrategy typeEnv_ typerule_ statements_ =
(\ _lhsIinfoTuple
_lhsImetaVariableTable
_lhsIunique ->
(let _lhsOassumptions :: Assumptions
_lhsOconstraintSet :: ConstraintSet
_lhsOunique :: Int
_lhsOdebugIO :: (IO ())
_statementsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_statementsOcurrentPhase :: (Maybe Int)
_statementsOcurrentPosition :: ((Int, Int))
_statementsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_statementsOfromAttribute :: (Attribute -> MessageBlock)
_typeruleOinfoTuple :: MetaVariableInfo
_typeruleOmetaVariableTable :: MetaVariableTable
_typeruleOsubstitution :: MapSubstitution
_statementsOinfoTuple :: MetaVariableInfo
_statementsOmetaVariableTable :: MetaVariableTable
_statementsOsubstitution :: MapSubstitution
_typeruleIconstraints :: (TypeConstraints ConstraintInfo)
_typeruleIftv :: ([Int])
_typeruleIjudgements :: ([(String, Tp)])
_statementsIcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_statementsIcurrentPhase :: (Maybe Int)
_statementsIcurrentPosition :: ((Int, Int))
_statementsIftv :: ([Int])
_statementsImetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOassumptions =
foldr combine noAssumptions (map (getAssumptions . snd) _lhsImetaVariableTable)
_lhsOconstraintSet =
Node _allConstraintTrees
_lhsOunique =
length _normalTV + _lhsIunique
_lhsOdebugIO =
putStrLn "applying typing strategy"
_substitution =
listToSubstitution (_standardSubst ++ _specialSubst)
_allTV =
_typeruleIftv `union` _statementsIftv `union` ftv (map snd typeEnv_)
_specialTV =
concat . exactlyOnce . map ftv . filter isTVar . map snd $ _typeruleIjudgements
_normalTV =
_allTV \\ _specialTV
_standardSubst =
zip _normalTV (map TVar [_lhsIunique..])
_specialSubst =
let conclusionVar = case snd (last _typeruleIjudgements) of
TVar i -> Just i
_ -> Nothing
find' i | Just i == conclusionVar = [ (i, getType _lhsIinfoTuple) ]
| otherwise = [ (i, getType infoTuple)
| (s1, TVar j) <- _typeruleIjudgements
, i == j
, (s2,infoTuple) <- _lhsImetaVariableTable
, s1 == s2
]
in concatMap find' _specialTV
_allConstraintTrees =
listTree (reverse _typeruleIconstraints) :
Phase 999 _patchConstraints :
(map snd _statementsImetavarConstraints) ++
(reverse _statementsIcollectConstraints)
_patchConstraints =
let parent = concat (M.elems (getAssumptions _lhsIinfoTuple))
children = concat (concatMap (M.elems . getAssumptions . snd) _lhsImetaVariableTable)
(ns, tps1) = unzip (parent \\ children)
(ss, tps2) = unzip typeEnv_
zipF t1 t2 = (t1 .==. _substitution |-> t2) infoF
infoF = emptyConstraintInfo
{ location = "Typing Strategy (patch)" }
err = internalError "TS_Apply.ag" "n/a" "the type environments do not match"
in if (map show ns /= ss) then err else
zipWith zipF tps1 tps2
_statementsOcollectConstraints =
[]
_statementsOcurrentPhase =
Nothing
_statementsOcurrentPosition =
(_lhsIunique, 0)
_statementsOmetavarConstraints =
[ (s, getConstraintSet info) | (s, info) <- _lhsImetaVariableTable ]
_statementsOfromAttribute =
let locals = map f (dom _substitution)
f i = (show i, MessageType (toTpScheme (lookupInt i _substitution)))
in toMessageBlock locals _lhsIinfoTuple _lhsImetaVariableTable
_typeruleOinfoTuple =
_lhsIinfoTuple
_typeruleOmetaVariableTable =
_lhsImetaVariableTable
_typeruleOsubstitution =
_substitution
_statementsOinfoTuple =
_lhsIinfoTuple
_statementsOmetaVariableTable =
_lhsImetaVariableTable
_statementsOsubstitution =
_substitution
( _typeruleIconstraints,_typeruleIftv,_typeruleIjudgements) =
typerule_ _typeruleOinfoTuple _typeruleOmetaVariableTable _typeruleOsubstitution
( _statementsIcollectConstraints,_statementsIcurrentPhase,_statementsIcurrentPosition,_statementsIftv,_statementsImetavarConstraints) =
statements_ _statementsOcollectConstraints _statementsOcurrentPhase _statementsOcurrentPosition _statementsOfromAttribute _statementsOinfoTuple _statementsOmetaVariableTable _statementsOmetavarConstraints _statementsOsubstitution
in ( _lhsOassumptions,_lhsOconstraintSet,_lhsOdebugIO,_lhsOunique)))
-- Core_UserStatement ------------------------------------------
-- cata
sem_Core_UserStatement :: Core_UserStatement ->
T_Core_UserStatement
sem_Core_UserStatement (Equal _leftType _rightType _message) =
(sem_Core_UserStatement_Equal _leftType _rightType _message)
sem_Core_UserStatement (Pred _predClass _predType _message) =
(sem_Core_UserStatement_Pred _predClass _predType _message)
sem_Core_UserStatement (MetaVariableConstraints _name) =
(sem_Core_UserStatement_MetaVariableConstraints _name)
sem_Core_UserStatement (CorePhase _phase) =
(sem_Core_UserStatement_CorePhase _phase)
-- semantic domain
type T_Core_UserStatement = (Trees (TypeConstraint ConstraintInfo)) ->
(Maybe Int) ->
((Int, Int)) ->
(Attribute -> MessageBlock) ->
MetaVariableInfo ->
MetaVariableTable ->
([(String,Tree (TypeConstraint ConstraintInfo))]) ->
MapSubstitution ->
( (Trees (TypeConstraint ConstraintInfo)),(Maybe Int),((Int, Int)),([Int]),([(String,Tree (TypeConstraint ConstraintInfo))]))
data Inh_Core_UserStatement = Inh_Core_UserStatement {collectConstraints_Inh_Core_UserStatement :: (Trees (TypeConstraint ConstraintInfo)),currentPhase_Inh_Core_UserStatement :: (Maybe Int),currentPosition_Inh_Core_UserStatement :: ((Int, Int)),fromAttribute_Inh_Core_UserStatement :: (Attribute -> MessageBlock),infoTuple_Inh_Core_UserStatement :: MetaVariableInfo,metaVariableTable_Inh_Core_UserStatement :: MetaVariableTable,metavarConstraints_Inh_Core_UserStatement :: ([(String,Tree (TypeConstraint ConstraintInfo))]),substitution_Inh_Core_UserStatement :: MapSubstitution}
data Syn_Core_UserStatement = Syn_Core_UserStatement {collectConstraints_Syn_Core_UserStatement :: (Trees (TypeConstraint ConstraintInfo)),currentPhase_Syn_Core_UserStatement :: (Maybe Int),currentPosition_Syn_Core_UserStatement :: ((Int, Int)),ftv_Syn_Core_UserStatement :: ([Int]),metavarConstraints_Syn_Core_UserStatement :: ([(String,Tree (TypeConstraint ConstraintInfo))])}
wrap_Core_UserStatement :: T_Core_UserStatement ->
Inh_Core_UserStatement ->
Syn_Core_UserStatement
wrap_Core_UserStatement sem (Inh_Core_UserStatement _lhsIcollectConstraints _lhsIcurrentPhase _lhsIcurrentPosition _lhsIfromAttribute _lhsIinfoTuple _lhsImetaVariableTable _lhsImetavarConstraints _lhsIsubstitution) =
(let ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints) = sem _lhsIcollectConstraints _lhsIcurrentPhase _lhsIcurrentPosition _lhsIfromAttribute _lhsIinfoTuple _lhsImetaVariableTable _lhsImetavarConstraints _lhsIsubstitution
in (Syn_Core_UserStatement _lhsOcollectConstraints _lhsOcurrentPhase _lhsOcurrentPosition _lhsOftv _lhsOmetavarConstraints))
sem_Core_UserStatement_Equal :: Tp ->
Tp ->
String ->
T_Core_UserStatement
sem_Core_UserStatement_Equal leftType_ rightType_ message_ =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOcurrentPosition :: ((Int, Int))
_lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOcurrentPhase :: (Maybe Int)
_lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOftv =
ftv [leftType_, rightType_]
_lhsOcurrentPosition =
(\(x, y) -> (x, y+1)) _lhsIcurrentPosition
_lhsOcollectConstraints =
case _lhsIcurrentPhase of
Just phase | phase /= 5
-> Phase phase [ _newConstraint ] : _lhsIcollectConstraints
_ -> unitTree _newConstraint : _lhsIcollectConstraints
_newConstraint =
let cinfo = setTypeError (TypeError [] message [] [])
$ addProperty (uncurry IsUserConstraint _lhsIcurrentPosition)
$ inPhase emptyConstraintInfo
inPhase = case _lhsIcurrentPhase of
Just phase | phase /= 5
-> addProperty (ConstraintPhaseNumber phase)
_ -> id
message = let f = MessageOneLiner . substituteAttributes _lhsIfromAttribute
in map f (lines message_)
in (_lhsIsubstitution |-> leftType_ .==. _lhsIsubstitution |-> rightType_) cinfo
_lhsOcurrentPhase =
_lhsIcurrentPhase
_lhsOmetavarConstraints =
_lhsImetavarConstraints
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
sem_Core_UserStatement_Pred :: String ->
Tp ->
String ->
T_Core_UserStatement
sem_Core_UserStatement_Pred predClass_ predType_ message_ =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOftv :: ([Int])
_lhsOcurrentPhase :: (Maybe Int)
_lhsOcurrentPosition :: ((Int, Int))
_lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOcollectConstraints =
unitTree _newConstraint : _lhsIcollectConstraints
_newConstraint =
let cinfo = setTypeError (TypeError [] message [] [])
$ addProperty (ReductionErrorInfo thePred)
$ emptyConstraintInfo
thePred = Predicate predClass_ (_lhsIsubstitution |-> predType_)
message = let f = MessageOneLiner . substituteAttributes _lhsIfromAttribute
in map f (lines message_)
in predicate thePred cinfo
_lhsOftv =
[]
_lhsOcurrentPhase =
_lhsIcurrentPhase
_lhsOcurrentPosition =
_lhsIcurrentPosition
_lhsOmetavarConstraints =
_lhsImetavarConstraints
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
sem_Core_UserStatement_MetaVariableConstraints :: String ->
T_Core_UserStatement
sem_Core_UserStatement_MetaVariableConstraints name_ =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOftv :: ([Int])
_lhsOcurrentPhase :: (Maybe Int)
_lhsOcurrentPosition :: ((Int, Int))
_lhsOmetavarConstraints =
filter ((name_ /=) . fst) _lhsImetavarConstraints
_lhsOcollectConstraints =
case lookup name_ _lhsImetavarConstraints of
Just tree -> tree : _lhsIcollectConstraints
Nothing -> internalError "TS_Apply.ag" "n/a" "unknown constraint set"
_lhsOftv =
[]
_lhsOcurrentPhase =
_lhsIcurrentPhase
_lhsOcurrentPosition =
_lhsIcurrentPosition
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
sem_Core_UserStatement_CorePhase :: Int ->
T_Core_UserStatement
sem_Core_UserStatement_CorePhase phase_ =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOcurrentPhase :: (Maybe Int)
_lhsOftv :: ([Int])
_lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOcurrentPosition :: ((Int, Int))
_lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOcurrentPhase =
Just phase_
_lhsOftv =
[]
_lhsOcollectConstraints =
_lhsIcollectConstraints
_lhsOcurrentPosition =
_lhsIcurrentPosition
_lhsOmetavarConstraints =
_lhsImetavarConstraints
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
-- Core_UserStatements -----------------------------------------
-- cata
sem_Core_UserStatements :: Core_UserStatements ->
T_Core_UserStatements
sem_Core_UserStatements list =
(Prelude.foldr sem_Core_UserStatements_Cons sem_Core_UserStatements_Nil (Prelude.map sem_Core_UserStatement list))
-- semantic domain
type T_Core_UserStatements = (Trees (TypeConstraint ConstraintInfo)) ->
(Maybe Int) ->
((Int, Int)) ->
(Attribute -> MessageBlock) ->
MetaVariableInfo ->
MetaVariableTable ->
([(String,Tree (TypeConstraint ConstraintInfo))]) ->
MapSubstitution ->
( (Trees (TypeConstraint ConstraintInfo)),(Maybe Int),((Int, Int)),([Int]),([(String,Tree (TypeConstraint ConstraintInfo))]))
data Inh_Core_UserStatements = Inh_Core_UserStatements {collectConstraints_Inh_Core_UserStatements :: (Trees (TypeConstraint ConstraintInfo)),currentPhase_Inh_Core_UserStatements :: (Maybe Int),currentPosition_Inh_Core_UserStatements :: ((Int, Int)),fromAttribute_Inh_Core_UserStatements :: (Attribute -> MessageBlock),infoTuple_Inh_Core_UserStatements :: MetaVariableInfo,metaVariableTable_Inh_Core_UserStatements :: MetaVariableTable,metavarConstraints_Inh_Core_UserStatements :: ([(String,Tree (TypeConstraint ConstraintInfo))]),substitution_Inh_Core_UserStatements :: MapSubstitution}
data Syn_Core_UserStatements = Syn_Core_UserStatements {collectConstraints_Syn_Core_UserStatements :: (Trees (TypeConstraint ConstraintInfo)),currentPhase_Syn_Core_UserStatements :: (Maybe Int),currentPosition_Syn_Core_UserStatements :: ((Int, Int)),ftv_Syn_Core_UserStatements :: ([Int]),metavarConstraints_Syn_Core_UserStatements :: ([(String,Tree (TypeConstraint ConstraintInfo))])}
wrap_Core_UserStatements :: T_Core_UserStatements ->
Inh_Core_UserStatements ->
Syn_Core_UserStatements
wrap_Core_UserStatements sem (Inh_Core_UserStatements _lhsIcollectConstraints _lhsIcurrentPhase _lhsIcurrentPosition _lhsIfromAttribute _lhsIinfoTuple _lhsImetaVariableTable _lhsImetavarConstraints _lhsIsubstitution) =
(let ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints) = sem _lhsIcollectConstraints _lhsIcurrentPhase _lhsIcurrentPosition _lhsIfromAttribute _lhsIinfoTuple _lhsImetaVariableTable _lhsImetavarConstraints _lhsIsubstitution
in (Syn_Core_UserStatements _lhsOcollectConstraints _lhsOcurrentPhase _lhsOcurrentPosition _lhsOftv _lhsOmetavarConstraints))
sem_Core_UserStatements_Cons :: T_Core_UserStatement ->
T_Core_UserStatements ->
T_Core_UserStatements
sem_Core_UserStatements_Cons hd_ tl_ =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOcurrentPhase :: (Maybe Int)
_lhsOcurrentPosition :: ((Int, Int))
_lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_hdOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_hdOcurrentPhase :: (Maybe Int)
_hdOcurrentPosition :: ((Int, Int))
_hdOfromAttribute :: (Attribute -> MessageBlock)
_hdOinfoTuple :: MetaVariableInfo
_hdOmetaVariableTable :: MetaVariableTable
_hdOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_hdOsubstitution :: MapSubstitution
_tlOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_tlOcurrentPhase :: (Maybe Int)
_tlOcurrentPosition :: ((Int, Int))
_tlOfromAttribute :: (Attribute -> MessageBlock)
_tlOinfoTuple :: MetaVariableInfo
_tlOmetaVariableTable :: MetaVariableTable
_tlOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_tlOsubstitution :: MapSubstitution
_hdIcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_hdIcurrentPhase :: (Maybe Int)
_hdIcurrentPosition :: ((Int, Int))
_hdIftv :: ([Int])
_hdImetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_tlIcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_tlIcurrentPhase :: (Maybe Int)
_tlIcurrentPosition :: ((Int, Int))
_tlIftv :: ([Int])
_tlImetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOftv =
_hdIftv `union` _tlIftv
_lhsOcollectConstraints =
_tlIcollectConstraints
_lhsOcurrentPhase =
_tlIcurrentPhase
_lhsOcurrentPosition =
_tlIcurrentPosition
_lhsOmetavarConstraints =
_tlImetavarConstraints
_hdOcollectConstraints =
_lhsIcollectConstraints
_hdOcurrentPhase =
_lhsIcurrentPhase
_hdOcurrentPosition =
_lhsIcurrentPosition
_hdOfromAttribute =
_lhsIfromAttribute
_hdOinfoTuple =
_lhsIinfoTuple
_hdOmetaVariableTable =
_lhsImetaVariableTable
_hdOmetavarConstraints =
_lhsImetavarConstraints
_hdOsubstitution =
_lhsIsubstitution
_tlOcollectConstraints =
_hdIcollectConstraints
_tlOcurrentPhase =
_hdIcurrentPhase
_tlOcurrentPosition =
_hdIcurrentPosition
_tlOfromAttribute =
_lhsIfromAttribute
_tlOinfoTuple =
_lhsIinfoTuple
_tlOmetaVariableTable =
_lhsImetaVariableTable
_tlOmetavarConstraints =
_hdImetavarConstraints
_tlOsubstitution =
_lhsIsubstitution
( _hdIcollectConstraints,_hdIcurrentPhase,_hdIcurrentPosition,_hdIftv,_hdImetavarConstraints) =
hd_ _hdOcollectConstraints _hdOcurrentPhase _hdOcurrentPosition _hdOfromAttribute _hdOinfoTuple _hdOmetaVariableTable _hdOmetavarConstraints _hdOsubstitution
( _tlIcollectConstraints,_tlIcurrentPhase,_tlIcurrentPosition,_tlIftv,_tlImetavarConstraints) =
tl_ _tlOcollectConstraints _tlOcurrentPhase _tlOcurrentPosition _tlOfromAttribute _tlOinfoTuple _tlOmetaVariableTable _tlOmetavarConstraints _tlOsubstitution
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
sem_Core_UserStatements_Nil :: T_Core_UserStatements
sem_Core_UserStatements_Nil =
(\ _lhsIcollectConstraints
_lhsIcurrentPhase
_lhsIcurrentPosition
_lhsIfromAttribute
_lhsIinfoTuple
_lhsImetaVariableTable
_lhsImetavarConstraints
_lhsIsubstitution ->
(let _lhsOftv :: ([Int])
_lhsOcollectConstraints :: (Trees (TypeConstraint ConstraintInfo))
_lhsOcurrentPhase :: (Maybe Int)
_lhsOcurrentPosition :: ((Int, Int))
_lhsOmetavarConstraints :: ([(String,Tree (TypeConstraint ConstraintInfo))])
_lhsOftv =
[]
_lhsOcollectConstraints =
_lhsIcollectConstraints
_lhsOcurrentPhase =
_lhsIcurrentPhase
_lhsOcurrentPosition =
_lhsIcurrentPosition
_lhsOmetavarConstraints =
_lhsImetavarConstraints
in ( _lhsOcollectConstraints,_lhsOcurrentPhase,_lhsOcurrentPosition,_lhsOftv,_lhsOmetavarConstraints)))
|
roberth/uu-helium
|
src/Helium/StaticAnalysis/Directives/TS_Apply.hs
|
gpl-3.0
| 38,680
| 0
| 24
| 11,795
| 6,718
| 3,762
| 2,956
| 666
| 3
|
{-# LANGUAGE TypeOperators, LambdaCase #-}
module Typedrat.Auth (Authenticated, IsAdmin, userFromSession, authHook, adminHook, redirectView, callbackView, logoutView) where
import Control.Lens
import Control.Monad.IO.Class
import Data.Aeson
import Data.Aeson.Lens
import qualified Data.ByteString.Char8 as BS
import Data.HVect (HVect(..), ListContains)
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Text as T
import Data.Text.Encoding
import Network.HTTP.Types.Status
import Network.Wai (requestHeaderReferer)
import Network.Wreq as Wreq
import Opaleye
import System.Environment
import Web.Spock hiding (params, post, header)
import Web.Spock.Action (request)
import Typedrat.DB
import Typedrat.Types
data Authenticated = Authenticated
data IsAdmin = IsAdmin
-- Has the side effect of logging out invalid users
userFromSession :: RatActionCtx ctx st (Maybe (User Hask))
userFromSession = readSession >>= \(RatSession mToken _) ->
case mToken of
Just token -> userWithToken token
Nothing -> return Nothing
verifyUser :: User Hask -> RatActionCtx ctx st Bool
verifyUser User{ _userAuthToken = Just token } = do
clientId <- liftIO $ getEnv "GITHUB_CLIENT_ID"
clientSecret <- liftIO $ getEnv "GITHUB_CLIENT_SECRET"
let url = concat ["https://api.github.com/applications/", clientId, "/tokens/", T.unpack token]
let opts = defaults & auth ?~ basicAuth (BS.pack clientId) (BS.pack clientSecret)
r <- liftIO $ getWith opts url
if r & views responseStatus (== notFound404)
then clearAuthToken token >> return False
else return True
verifyUser User{ _userAuthToken = Nothing } = return False
authHook :: RatActionCtx (HVect xs) st (HVect (Authenticated ': xs))
authHook = do
oldCtx <- getContext
mUser <- userFromSession
if isJust mUser -- && valid
then return (Authenticated :&: oldCtx)
else do
modifySession (\rs -> rs { _githubAuth = Nothing })
setStatus unauthorized401
bytes "Authentication required."
adminHook :: ListContains n Authenticated xs => RatActionCtx (HVect xs) st (HVect (IsAdmin ': xs))
adminHook = do
oldCtx <- getContext
Just User { _userName = name } <- userFromSession
if name == "typedrat"
then return (IsAdmin :&: oldCtx)
else do
setStatus unauthorized401
bytes "Administrator priviliges required."
redirectView :: RatActionCtx ctx st ()
redirectView = do
clientId <- liftIO $ getEnv "GITHUB_CLIENT_ID"
ref <- fmap decodeUtf8 . requestHeaderReferer <$> request
modifySession (\s -> s { _authRedirect = ref })
redirect $ "https://github.com/login/oauth/authorize?client_id=" `T.append` (T.pack clientId)
callbackView :: RatActionCtx ctx st ()
callbackView = do
clientId <- liftIO $ getEnv "GITHUB_CLIENT_ID"
clientSecret <- liftIO $ getEnv "GITHUB_CLIENT_SECRET"
code <- param' "code"
let url = "https://github.com/login/oauth/access_token"
let opts = defaults
& header "Accept" .~ ["application/json"]
r <- liftIO . postWith opts url . toJSON . M.fromList $
([ ("client_id", clientId)
, ("client_secret", clientSecret)
, ("code", T.unpack code)
] :: [(String, String)])
let tok = r ^. responseBody . key "access_token" . _String
sess <- modifyReadSession (\s -> s { _githubAuth = Just tok })
let url' = "https://api.github.com/user"
let opts' = opts & auth ?~ oauth2Token (encodeUtf8 tok)
user <- liftIO $ getWith opts' url'
let Just id' = user ^? responseBody . key "id" . _Integral
let name = user ^. responseBody . key "login" . _String
let profile = user ^. responseBody . key "html_url" . _String
let avatar = user ^. responseBody . key "avatar_url" . _String
let dbUser = pgUser id' name (Just tok) profile (avatar `T.append` "&s=48")
old <- userWithId $ UserId id'
runPostgres $ \conn -> if isJust old
then runUpdate conn userTable (const dbUser) (\User {_userId = oldId } -> oldId .=== UserId (pgInt4 id'))
else runInsertMany conn userTable [dbUser]
redirect . fromMaybe "/" . _authRedirect $ sess
logoutView :: (ListContains n Authenticated xs) => RatActionCtx (HVect xs) st ()
logoutView = do
modifySession (\rs -> rs { _githubAuth = Nothing })
redirect "/"
|
typedrat/typedrat-site
|
app/Typedrat/Auth.hs
|
gpl-3.0
| 4,382
| 0
| 16
| 940
| 1,374
| 705
| 669
| -1
| -1
|
{-# LANGUAGE TemplateHaskell, FlexibleInstances#-}
module Tar where
import Test.QuickCheck
import DeriveArbitrary
import Codec.Archive.Tar
import System.Posix.Types
import qualified Data.ByteString.Lazy as L
import Data.DeriveTH
import Data.Word(Word32)
import Vector
import ByteString
$(devArbitrary ''Entry)
mencode :: [Entry] -> L.ByteString
mencode = write
|
fcostantini/QuickFuzz
|
src/Tar.hs
|
gpl-3.0
| 370
| 0
| 8
| 47
| 86
| 53
| 33
| 14
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FirebaseHosting.Sites.Versions.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified version.
--
-- /See:/ <https://firebase.google.com/docs/hosting/ Firebase Hosting API Reference> for @firebasehosting.sites.versions.delete@.
module Network.Google.Resource.FirebaseHosting.Sites.Versions.Delete
(
-- * REST Resource
SitesVersionsDeleteResource
-- * Creating a Request
, sitesVersionsDelete
, SitesVersionsDelete
-- * Request Lenses
, svdXgafv
, svdUploadProtocol
, svdAccessToken
, svdUploadType
, svdName
, svdCallback
) where
import Network.Google.FirebaseHosting.Types
import Network.Google.Prelude
-- | A resource alias for @firebasehosting.sites.versions.delete@ method which the
-- 'SitesVersionsDelete' request conforms to.
type SitesVersionsDeleteResource =
"v1beta1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes the specified version.
--
-- /See:/ 'sitesVersionsDelete' smart constructor.
data SitesVersionsDelete =
SitesVersionsDelete'
{ _svdXgafv :: !(Maybe Xgafv)
, _svdUploadProtocol :: !(Maybe Text)
, _svdAccessToken :: !(Maybe Text)
, _svdUploadType :: !(Maybe Text)
, _svdName :: !Text
, _svdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SitesVersionsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'svdXgafv'
--
-- * 'svdUploadProtocol'
--
-- * 'svdAccessToken'
--
-- * 'svdUploadType'
--
-- * 'svdName'
--
-- * 'svdCallback'
sitesVersionsDelete
:: Text -- ^ 'svdName'
-> SitesVersionsDelete
sitesVersionsDelete pSvdName_ =
SitesVersionsDelete'
{ _svdXgafv = Nothing
, _svdUploadProtocol = Nothing
, _svdAccessToken = Nothing
, _svdUploadType = Nothing
, _svdName = pSvdName_
, _svdCallback = Nothing
}
-- | V1 error format.
svdXgafv :: Lens' SitesVersionsDelete (Maybe Xgafv)
svdXgafv = lens _svdXgafv (\ s a -> s{_svdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
svdUploadProtocol :: Lens' SitesVersionsDelete (Maybe Text)
svdUploadProtocol
= lens _svdUploadProtocol
(\ s a -> s{_svdUploadProtocol = a})
-- | OAuth access token.
svdAccessToken :: Lens' SitesVersionsDelete (Maybe Text)
svdAccessToken
= lens _svdAccessToken
(\ s a -> s{_svdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
svdUploadType :: Lens' SitesVersionsDelete (Maybe Text)
svdUploadType
= lens _svdUploadType
(\ s a -> s{_svdUploadType = a})
-- | Required. The name of the version to be deleted, in the format:
-- 'sites\/site-name\/versions\/versionID'
svdName :: Lens' SitesVersionsDelete Text
svdName = lens _svdName (\ s a -> s{_svdName = a})
-- | JSONP
svdCallback :: Lens' SitesVersionsDelete (Maybe Text)
svdCallback
= lens _svdCallback (\ s a -> s{_svdCallback = a})
instance GoogleRequest SitesVersionsDelete where
type Rs SitesVersionsDelete = Empty
type Scopes SitesVersionsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/firebase"]
requestClient SitesVersionsDelete'{..}
= go _svdName _svdXgafv _svdUploadProtocol
_svdAccessToken
_svdUploadType
_svdCallback
(Just AltJSON)
firebaseHostingService
where go
= buildClient
(Proxy :: Proxy SitesVersionsDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-firebasehosting/gen/Network/Google/Resource/FirebaseHosting/Sites/Versions/Delete.hs
|
mpl-2.0
| 4,685
| 0
| 15
| 1,096
| 699
| 409
| 290
| 101
| 1
|
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE PatternSynonyms #-}
module Dyno.DirectCollocation.Formulate
( CollProblem(..), CollProblem'
, DirCollOptions(..)
, MapStrategy(..)
, makeCollProblem
, mkTaus
, makeGuess
, makeGuessSim
, ocpPhaseBx
, ocpPhaseBg
, Default(..) -- for default options
) where
import GHC.Generics ( Generic, Generic1 )
import Control.Monad.State ( StateT(..), runStateT )
import Data.Default.Class ( Default(..) )
import qualified Data.Map as M
import Data.Maybe ( fromMaybe )
import Data.Proxy ( Proxy(..) )
import Data.Singletons.TypeLits (KnownNat, natVal, withKnownNat, pattern SNat)
import Data.Singletons.Prelude.Num (PNum((+)), (%+))
import Data.Vector ( Vector )
import qualified Data.Foldable as F
import qualified Data.Traversable as T
import qualified Numeric.LinearAlgebra as Mat
import Linear hiding ( dot )
import Casadi.GenericType ( GType(..) )
import Casadi.Matrix ( CMatrix )
import Casadi.MX ( MX )
import Casadi.SX ( SX )
import Dyno.Integrate ( InitialTime(..), TimeStep(..), rk45 )
import Dyno.View.View
( View(..), JTuple(..), J, S, JV
, splitJV, catJV, jfill )
import Dyno.View.M ( M, vcat, vsplit )
import qualified Dyno.View.M as M
import Dyno.View.HList ( (:*:)(..) )
import Dyno.View.Fun ( Fun, callSym, callV, expandFun, toFun )
import Dyno.View.MapFun
import Dyno.View.JVec( JVec(..), jreplicate )
import Dyno.View.Scheme ( Scheme )
import Dyno.Vectorize ( Vectorize(..), Id(..), vlength, unId )
import Dyno.TypeVecs ( Vec )
import qualified Dyno.TypeVecs as TV
import Dyno.LagrangePolynomials ( lagrangeDerivCoeffs )
import Dyno.Nlp ( Nlp(..), NlpIn(..), Bounds )
import Dyno.Ocp
import Dyno.DirectCollocation.Types
import Dyno.DirectCollocation.Dynamic ( MetaProxy(..), DynPlotPoints, dynPlotPoints )
import Dyno.DirectCollocation.Quadratures ( QuadratureRoots(..), mkTaus, interpolate )
data CollProblem x z u p r o c h q qo po fp n deg =
CollProblem
{ cpNlp :: Nlp (CollTraj x z u p n deg)
(JV fp)
(CollOcpConstraints x p r c h n deg) MX
, cpOcp :: OcpPhase x z u p r o c h q qo po fp
, cpPlotPoints :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (DynPlotPoints Double)
, cpHellaOutputs :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO ( DynPlotPoints Double
, Vec n (StageOutputs x o h q qo po deg Double)
, Quadratures q qo Double
)
, cpConstraints :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (J (CollOcpConstraints x p r c h n deg) (Vector Double))
, cpOutputs :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (Vec n (StageOutputs x o h q qo po deg Double))
, cpTaus :: Vec deg Double
, cpDirCollOpts :: DirCollOptions
, cpEvalQuadratures :: Vec n (Vec deg Double) -> Double -> IO Double
, cpMetaProxy :: MetaProxy x z u p o q qo po h
-- , cpJacSparsitySpy :: String
-- , cpHessSparsitySpy :: String
}
type CollProblem' ocp =
CollProblem
(X ocp)
(Z ocp)
(U ocp)
(P ocp)
(R ocp)
(O ocp)
(C ocp)
(H ocp)
(Q ocp)
(QO ocp)
(PO ocp)
(FP ocp)
data DirCollOptions =
DirCollOptions
{ collocationRoots :: QuadratureRoots -- ^ which collocation roots to use
, mapStrategy :: MapStrategy
, mapOptions :: M.Map String GType
, unrollMapInHaskell :: Bool -- TODO(greg): remove this sooner or later
} deriving Show
instance Default DirCollOptions where
def =
DirCollOptions
{ mapStrategy = Unroll
, mapOptions = M.empty
, collocationRoots = Radau
, unrollMapInHaskell = False
}
data QuadraturePlottingIn x z u p o q qo fp a =
-- x0 xF x z u p fp o q qo t T
QuadraturePlottingIn (J x a) (J x a) (J x a) (J z a) (J u a) (J p a) (J o a) (J q a) (J qo a) (J fp a)
(S a) (S a)
deriving (Generic, Generic1)
data QuadratureIn x z u p fp a =
-- x' x z u p fp t T
QuadratureIn (J x a) (J x a) (J z a) (J u a) (J p a) (J fp a)
(S a) (S a)
deriving (Generic, Generic1)
data QuadratureStageIn x z u p fp deg a =
-- k xzusptf fp
QuadratureStageIn (S a) (J (CollStage x z u p deg) a) (J fp a)
deriving (Generic, Generic1)
data QuadratureStageOut q deg a =
-- qdots qs qNext
QuadratureStageOut (J (JVec deg q) a) (J (JVec deg q) a) (J q a)
deriving (Generic, Generic1)
data PathCIn x z u p fp a =
-- x' x z u p t
PathCIn (J x a) (J x a) (J z a) (J u a) (J p a) (J fp a) (S a)
deriving (Generic, Generic1)
data PathCStageIn x z u p fp deg a =
-- xzusp fp ts
PathCStageIn (J (CollStage x z u p deg) a) (J fp a) (J (JV (Vec deg)) a)
deriving (Generic, Generic1)
data DaeIn x z u p fp a =
-- t p fp x' (CollPoint x z u)
DaeIn (S a) (J p a) (J fp a) (J x a) (J (CollPoint x z u) a)
deriving (Generic, Generic1)
data DaeOut r o a =
-- r o
DaeOut (J r a) (J o a)
deriving (Generic, Generic1)
instance (View x, View z, View u, View p, View o, View q, View qo, View fp)
=> Scheme (QuadraturePlottingIn x z u p o q qo fp)
instance (View x, View z, View u, View p, View fp) => Scheme (QuadratureIn x z u p fp)
instance (View x, View z, View u, View p, View fp, KnownNat deg) => Scheme (QuadratureStageIn x z u p fp deg)
instance (View q, KnownNat deg) => Scheme (QuadratureStageOut q deg)
instance (View x, View z, View u, View p, View fp) => Scheme (PathCIn x z u p fp)
instance (View x, View z, View u, View p, View fp, KnownNat deg) => Scheme (PathCStageIn x z u p fp deg)
instance (View x, View z, View u, View p, View fp) => Scheme (DaeIn x z u p fp)
instance (View r, View o) => Scheme (DaeOut r o)
makeCollProblem ::
forall x z u p r o c h q qo po fp deg n .
( KnownNat deg, KnownNat n
, Vectorize x, Vectorize p, Vectorize u, Vectorize z
, Vectorize r, Vectorize o, Vectorize h, Vectorize c, Vectorize q
, Vectorize po, Vectorize fp, Vectorize qo
)
=> DirCollOptions
-> OcpPhase x z u p r o c h q qo po fp
-> OcpPhaseInputs x z u p c h fp
-> J (CollTraj x z u p n deg) (Vector Double)
-> IO (CollProblem x z u p r o c h q qo po fp n deg)
makeCollProblem dirCollOpts ocp ocpInputs guess = do
let -- the collocation points
roots = collocationRoots dirCollOpts
taus :: Vec deg Double
taus = mkTaus roots
n = fromIntegral (natVal (Proxy :: Proxy n))
-- coefficients for getting xdot by lagrange interpolating polynomials
cijs :: Vec (deg + 1) (Vec (deg + 1) Double)
cijs =
withKnownNat (SNat @deg %+ SNat @1) $
lagrangeDerivCoeffs (0 TV.<| taus)
interpolate' :: View f => (J f :*: J (JVec deg f)) MX -> J f MX
interpolate' (x0 :*: xs) = case roots of
Legendre -> interpolate taus x0 (unJVec (split xs))
Radau -> TV.tvlast $ unJVec $ split xs
dynamicsFunction :: DaeIn (JV x) (JV z) (JV u) (JV p) (JV fp) SX -> DaeOut (JV r) (JV o) SX
dynamicsFunction (DaeIn t parm fixedParm x' collPoint) = DaeOut (vcat r) (vcat o)
where
CollPoint x z u = split collPoint
(r,o) = ocpDae ocp
(vsplit x') (vsplit x) (vsplit z) (vsplit u)
(vsplit parm) (vsplit fixedParm) (unId (vsplit t))
interpolateFun <- toFun "interpolate_JV_x" interpolate' mempty >>= expandFun
interpolateQFun <- toFun "interpolate_JV_q" interpolate' mempty >>= expandFun
interpolateQoFun <- toFun "interpolate_JV_qo" interpolate' mempty >>= expandFun
interpolateScalarFun <- toFun "interpolate_JV_Id" interpolate' mempty >>= expandFun
let callInterpolateScalar :: S MX -> Vec deg (S MX) -> S MX
callInterpolateScalar x0 xs = callSym interpolateScalarFun (x0 :*: cat (JVec xs))
callInterpolate :: J (JV x) MX -> Vec deg (J (JV x) MX) -> J (JV x) MX
callInterpolate x0 xs = callSym interpolateFun (x0 :*: cat (JVec xs))
callInterpolateQ :: J (JV q) MX -> Vec deg (J (JV q) MX) -> J (JV q) MX
callInterpolateQ q0 qs = callSym interpolateQFun (q0 :*: cat (JVec qs))
callInterpolateQo :: J (JV qo) MX -> Vec deg (J (JV qo) MX) -> J (JV qo) MX
callInterpolateQo q0 qs = callSym interpolateQoFun (q0 :*: cat (JVec qs))
let quadFun :: QuadratureIn (JV x) (JV z) (JV u) (JV p) (JV fp) SX -> J (JV q) SX
quadFun (QuadratureIn x' x z u p fp t tf) = quad
where
daeIn = DaeIn t p fp x' (cat (CollPoint x z u))
DaeOut _ o = dynamicsFunction daeIn
quad :: J (JV q) SX
quad = vcat $ ocpQuadratures ocp
(vsplit x) (vsplit z) (vsplit u) (vsplit p) (vsplit fp) (vsplit o)
(unId (vsplit t)) (unId (vsplit tf))
let quadOutFun :: QuadratureIn (JV x) (JV z) (JV u) (JV p) (JV fp) SX -> J (JV qo) SX
quadOutFun (QuadratureIn x' x z u p fp t tf) = quad
where
daeIn = DaeIn t p fp x' (cat (CollPoint x z u))
DaeOut _ o = dynamicsFunction daeIn
quad :: J (JV qo) SX
quad = vcat $ ocpQuadratureOutputs ocp
(vsplit x) (vsplit z) (vsplit u) (vsplit p) (vsplit fp) (vsplit o)
(unId (vsplit t)) (unId (vsplit tf))
let lagFun :: QuadratureIn (JV x) (JV z) (JV u) (JV p) (JV fp) SX -> S SX
lagFun (QuadratureIn x' x z u p fp t tf) = lag
where
daeIn = DaeIn t p fp x' (cat (CollPoint x z u))
DaeOut _ o = dynamicsFunction daeIn
lag :: S SX
lag = vcat $ Id $ ocpLagrange ocp
(vsplit x) (vsplit z) (vsplit u) (vsplit p) (vsplit fp) (vsplit o)
(unId (vsplit t)) (unId (vsplit tf))
let pathCFun :: PathCIn (JV x) (JV z) (JV u) (JV p) (JV fp) SX -> J (JV h) SX
pathCFun (PathCIn x' x z u p fp t) = h
where
daeIn = DaeIn t p fp x' (cat (CollPoint x z u))
DaeOut _ o = dynamicsFunction daeIn
h :: J (JV h) SX
h = vcat $ ocpPathC ocp
(vsplit x) (vsplit z) (vsplit u) (vsplit p) (vsplit fp) (vsplit o)
(unId (vsplit t))
quadFunSX <- toFun "quadFun" quadFun mempty
quadOutFunSX <- toFun "quadOutFun" quadOutFun mempty
lagFunSX <- toFun "lagFun" lagFun mempty
pathCFunSX <- toFun "pathCFun" pathCFun mempty
let quadraturePlottingFun ::
QuadraturePlottingIn (JV x) (JV z) (JV u) (JV p) (JV o) (JV q) (JV qo) (JV fp) SX
-> J (JV po) SX
quadraturePlottingFun (QuadraturePlottingIn x0 xF x z u p o q qo fp t tf) =
vcat $ ocpPlotOutputs ocp (vsplit x0, vsplit xF)
(vsplit x) (vsplit z) (vsplit u) (vsplit p)
(vsplit o) (vsplit q) (vsplit qo) (vsplit fp)
(unId (vsplit t)) (unId (vsplit tf))
quadPlotFunSX <- toFun "quadPlotFun" quadraturePlottingFun mempty
let -- later we could use the intermediate points as outputs, or in path cosntraints
lagrangeStageFun :: QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX
-> QuadratureStageOut (JV Id) deg MX
lagrangeStageFun qIn = QuadratureStageOut (cat (JVec qdots)) (cat (JVec qs)) qNext
where
(qdots,qs,qNext) = toQuadratureFun n taus cijs callInterpolateScalar (callSym lagFunSX) qIn
quadratureStageFun :: QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX
-> QuadratureStageOut (JV q) deg MX
quadratureStageFun qIn = QuadratureStageOut (cat (JVec qdots)) (cat (JVec qs)) qNext
where
(qdots,qs,qNext) = toQuadratureFun n taus cijs callInterpolateQ (callSym quadFunSX) qIn
quadratureOutStageFun :: QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX
-> QuadratureStageOut (JV qo) deg MX
quadratureOutStageFun qIn = QuadratureStageOut (cat (JVec qdots)) (cat (JVec qs)) qNext
where
(qdots,qs,qNext) = toQuadratureFun n taus cijs callInterpolateQo (callSym quadOutFunSX) qIn
pathCStageFun pcIn = cat (JVec hs)
where
hs = toPathCFun n cijs (callSym pathCFunSX) pcIn
lagrangeStageFunMX <- toFun "lagrangeStageFun"
((\(QuadratureStageOut _ _ q) -> q) . lagrangeStageFun) mempty
quadratureStageFunMX <- toFun "quadratureStageFun"
((\(QuadratureStageOut _ _ q) -> q) . quadratureStageFun) mempty
pathCStageFunMX <- toFun "pathCStageFun" pathCStageFun mempty
bcFun <- toFun "bc" (\(x0:*:x1:*:x2:*:x3:*:x4:*:x5) -> vcat $ ocpBc ocp (vsplit x0) (vsplit x1) (vsplit x2) (vsplit x3) (vsplit x4) (unId (vsplit x5))) mempty
mayerFun <- toFun "mayer"
(\(x0:*:x1:*:x2:*:x3:*:x4:*:x5) ->
vcat $ Id $ ocpMayer ocp (unId (vsplit x0)) (vsplit x1) (vsplit x2) (vsplit x3) (vsplit x4) (vsplit x5)) mempty
dynFun <- toFun "dynamics" dynamicsFunction mempty
dynamicsStageFun <-
toFun "dynamicsStageFunction" (toDynamicsStage callInterpolate cijs dynFun) mempty
>>= expandFun
:: IO (Fun
(J (JV x)
:*: J (JVec deg (JTuple (JV x) (JV z)))
:*: J (JVec deg (JV u))
:*: S
:*: J (JV p)
:*: J (JV fp)
:*: J (JV (Vec deg))
)
(J (JVec deg (JV r))
:*: J (JV x)
)
)
-- let callDynamicsStageFun = callSym dynamicsStageFun
-- fixedParm has to be repeated
-- that is why it is a row matrix
let stageFun :: ( S
:*: M (JV Id) (CollStage (JV x) (JV z) (JV u) (JV p) deg)
:*: M (JV Id) (JV fp)
) MX ->
( M (JV Id) (JVec deg (JV r))
:*: M (JV Id) (JVec deg (JV h))
:*: M (JV Id) (JV x)
) MX
stageFun (k :*: collStageRow :*: fixedParm') =
(M.trans dc :*: M.trans stageHs :*: M.trans interpolatedX')
where
fixedParm = M.trans fixedParm'
dt = tf / fromIntegral n
stageTimes :: J (JV (Vec deg)) MX
stageTimes = vcat stageTimes'
where
stageT0 = k * dt
stageTimes' = pure stageT0 ^+^ dt *^ fmap realToFrac taus
collStage = M.trans collStageRow
CollStage x0 xzus parm tf = split collStage
dc :*: interpolatedX' =
callSym dynamicsStageFun
(x0 :*: xzs :*: us :*: dt :*: parm :*: fixedParm :*: stageTimes)
pathCStageIn = PathCStageIn collStage fixedParm stageTimes
stageHs = pathCStageFun pathCStageIn
xzs = cat (JVec xzs') :: J (JVec deg (JTuple (JV x) (JV z))) MX
us = cat (JVec us') :: J (JVec deg (JV u)) MX
(xzs', us') = TV.tvunzip $ fmap toTuple $ unJVec (split xzus)
toTuple xzu = (cat (JTuple x z), u)
where
CollPoint x z u = split xzu
stageFunMX <- toFun "stageFun" stageFun mempty
mapStageFunMX <- mapFun' (Proxy :: Proxy n) stageFunMX "mapDynamicsStageFun"
(mapStrategy dirCollOpts) (mapOptions dirCollOpts)
-- use repeated outputs for now
:: IO (Fun
( M (JV Id) (JV (Vec n))
:*: M (JV Id) (JVec n (CollStage (JV x) (JV z) (JV u) (JV p) deg))
:*: M (JV Id) (JVec n (JV fp))
)
( M (JV Id) (JVec n (JVec deg (JV r)))
:*: M (JV Id) (JVec n (JVec deg (JV h)))
:*: M (JV Id) (JVec n (JV x))
)
)
---- non-repeated outputs don't work yet, and we need them for exact hessian
-- :: IO (Fun
-- (S
-- :*: M (JV Id) (JVec n (CollStage (JV x) (JV z) (JV u) deg))
-- :*: M (JV Id) (JVec n (JV (Vec deg)))
-- :*: M (JV Id) (JV p)
-- :*: M (JV Id) (JV fp)
-- )
-- (M (JV Id) (JVec n (JVec deg (JV r)))
-- :*: M (JV Id) (JVec n (JVec deg (JV h)))
-- :*: M (JV Id) (JVec n (JV x))
-- )
-- )
let mapStageFun ::
( Vec n (S MX)
, J (JVec n (CollStage (JV x) (JV z) (JV u) (JV p) deg)) MX
, J (JV fp) MX
)
-> ( J (JVec n (JVec deg (JV r))) MX
, J (JVec n (JVec deg (JV h))) MX
, J (JVec n (JV x)) MX
)
mapStageFun (ks, stages, fixedParm')
| unrollMapInHaskell dirCollOpts =
let fixedParm = M.trans fixedParm'
(dcs, hs, xnexts) =
TV.tvunzip3 $ f <$> ks <*> unJVec (split stages)
f k stage = (M.trans dc, M.trans h, M.trans xnext)
where
dc :*: h :*: xnext =
callSym stageFunMX
(k :*: M.trans stage :*: fixedParm)
-- dc :*: h :*: xnext =
-- stageFun
-- (dt :*: (M.trans stage) :*: (M.trans stageTimes) :*: fixedParm)
in (cat (JVec dcs), cat (JVec hs), cat (JVec xnexts))
| otherwise =
let fixedParm = jreplicate fixedParm' :: J (JVec n (JV fp)) MX
dcs :*: hs :*: xnexts =
callSym mapStageFunMX $
M.trans (vcat ks) :*: M.trans stages :*: M.trans fixedParm
in (M.trans dcs, M.trans hs, M.trans xnexts)
let nlp :: Nlp (CollTraj x z u p n deg) (JV fp) (CollOcpConstraints x p r c h n deg) MX
nlp = Nlp {
nlpFG =
getFg
(bcFun :: Fun ( J (JV x)
:*: J (JV x)
:*: J (JV q)
:*: J (JV p)
:*: J (JV fp)
:*: S
)
(J (JV c))
)
(mayerFun :: Fun ( S
:*: J (JV x)
:*: J (JV x)
:*: J (JV q)
:*: J (JV p)
:*: J (JV fp)
)
S
)
(callSym lagrangeStageFunMX)
(callSym quadratureStageFunMX)
mapStageFun
, nlpIn =
NlpIn
{ nlpBX = cat (ocpPhaseBx ocpInputs)
, nlpBG = cat (ocpPhaseBg ocpInputs)
, nlpX0 = guess :: J (CollTraj x z u p n deg) (Vector Double)
, nlpP = catJV (ocpFixedP ocpInputs)
, nlpLamX0 = Nothing
, nlpLamG0 = Nothing
}
, nlpScaleF = ocpObjScale ocp
, nlpScaleX = Just $ cat $ fillCollTraj
(fromMaybe (pure 1) (ocpXScale ocp))
(fromMaybe (pure 1) (ocpZScale ocp))
(fromMaybe (pure 1) (ocpUScale ocp))
(fromMaybe (pure 1) (ocpPScale ocp))
(fromMaybe 1 (ocpTScale ocp))
, nlpScaleG = Just $ cat $ fillCollConstraints
(fromMaybe (pure 1) (ocpXScale ocp))
(fromMaybe (pure 1) (ocpPScale ocp))
(fromMaybe (pure 1) (ocpResidualScale ocp))
(fromMaybe (pure 1) (ocpBcScale ocp))
(fromMaybe (pure 1) (ocpPathCScale ocp))
(fromMaybe 1 (ocpTScale ocp))
}
-- callbacks and quadrature outputs
lagrangeStageFunFullMX <- toFun "lagrangeStageFunFull" lagrangeStageFun mempty
quadratureStageFunFullMX <- toFun "quadratureStageFunFull" quadratureStageFun mempty
quadratureOutStageFunFullMX <- toFun "quadratureOutStageFunFull" quadratureOutStageFun mempty
outputFun <- toFun "stageOutputs" (outputFunction n callInterpolate cijs taus dynFun) mempty
genericQuadraturesFun <- toFun "generic_quadratures"
(genericQuadraturesFunction callInterpolateScalar cijs n)
mempty
let (getHellaOutputs, getPlotPoints, getOutputs) = toCallbacks n roots taus outputFun pathCStageFunMX lagrangeStageFunFullMX quadratureStageFunFullMX quadratureOutStageFunFullMX quadPlotFunSX
evalQuadratures :: Vec n (Vec deg Double) -> Double -> IO Double
evalQuadratures qs' tf = do
let qs :: Vec n (J (JV (Vec deg)) (Vector Double))
qs = fmap catJV qs'
evalq :: J (JV (Vec deg)) (Vector Double) -> IO Double
evalq q = (unId . splitJV) <$> callV genericQuadraturesFun (q :*: catJV (Id tf))
stageIntegrals <- T.mapM evalq qs :: IO (Vec n Double)
return (F.sum stageIntegrals)
nlpConstraints <- toFun "nlp_constraints" (\(x:*:p) -> snd (nlpFG nlp x p)) mempty
let evalConstraints x p = callV nlpConstraints (x :*: p)
return $ CollProblem { cpNlp = nlp
, cpOcp = ocp
, cpPlotPoints = getPlotPoints
, cpHellaOutputs = getHellaOutputs
, cpConstraints = evalConstraints
, cpOutputs = getOutputs
, cpTaus = taus
, cpDirCollOpts = dirCollOpts
, cpEvalQuadratures = evalQuadratures
, cpMetaProxy = MetaProxy
}
toCallbacks ::
forall x z u p fp r o h q qo po n deg
. ( Vectorize x, Vectorize z, Vectorize u, Vectorize p
, Vectorize o, Vectorize h, Vectorize r, Vectorize q
, Vectorize po, Vectorize qo
, Vectorize fp
, KnownNat n, KnownNat deg
)
=> Int
-> QuadratureRoots
-> Vec deg Double
-> Fun ( J (CollStage (JV x) (JV z) (JV u) (JV p) deg)
:*: J (JV fp)
:*: S
)
( J (JVec deg (JV r))
:*: J (JVec deg (JV x))
:*: J (JVec deg (JV o))
:*: J (JV x)
)
-> Fun (PathCStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg) (J (JVec deg (JV h)))
-> Fun (QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg) (QuadratureStageOut (JV Id) deg)
-> Fun (QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg) (QuadratureStageOut (JV q) deg)
-> Fun (QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg) (QuadratureStageOut (JV qo) deg)
-> Fun (QuadraturePlottingIn (JV x) (JV z) (JV u) (JV p) (JV o) (JV q) (JV qo) (JV fp)) (J (JV po))
-> ( J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO ( DynPlotPoints Double
, Vec n (StageOutputs x o h q qo po deg Double)
, Quadratures q qo Double
)
, J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (DynPlotPoints Double)
, J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (Vec n (StageOutputs x o h q qo po deg Double))
)
toCallbacks n roots taus outputFun pathStageConFun lagQuadFun quadFun quadOutFun quadPlotFun =
(getHellaOutputs, getPlotPoints, getOutputs)
where
-- prepare callbacks
f :: Double
-> J (JV o) (Vector Double) -> J (JV x) (Vector Double)
-> J (JV h) (Vector Double) -> J (JV po) (Vector Double)
-> Quadratures q qo Double -> Quadratures q qo Double
-> StageOutputsPoint x o h q qo po Double
f t o' x' h' po' q q' =
StageOutputsPoint
{ sopT = t
, sopO = o'
, sopXDot = x'
, sopH = h'
, sopPo = po'
, sopQs = q
, sopQDots = q'
}
callOutputFun :: (J (JV x) (Vector Double), J (JV x) (Vector Double))
-> J (JV fp) (Vector Double)
-> Quadratures q qo Double
-> ( J (CollStage (JV x) (JV z) (JV u) (JV p) deg) (Vector Double)
, Double
)
-> IO ( StageOutputs x o h q qo po deg Double
, Quadratures q qo Double
)
callOutputFun (x0, xF) fp previousQuadratures (stage, k) = do
let CollStage stageX0 xzus p tf' = split stage
Id tf = splitJV tf'
h = tf / fromIntegral n
(_ :*: xdot :*: out :*: xnext) <- callV outputFun $ stage :*: fp :*: (catJV (Id k))
let stageTimes :: Vec deg Double
stageTimes = fmap (\tau -> stageT0 + tau * h) taus
stageT0 = h * k
stageTimes' :: J (JV (Vec deg)) (Vector Double)
stageTimes' = catJV stageTimes
pathCStageIn = PathCStageIn stage fp stageTimes'
quadratureStageIn = QuadratureStageIn (catJV (Id k)) stage fp
hs <- callV pathStageConFun pathCStageIn
QuadratureStageOut lagrQdots lagrQs lagrQNext <- callV lagQuadFun quadratureStageIn
QuadratureStageOut userQdots userQs userQNext <- callV quadFun quadratureStageIn
QuadratureStageOut outQdots outQs outQNext <- callV quadOutFun quadratureStageIn
let outs0 = unJVec (split out) :: Vec deg (J (JV o) (Vector Double))
xdots0 = unJVec (split xdot) :: Vec deg (J (JV x) (Vector Double))
hs0 = unJVec (split hs) :: Vec deg (J (JV h) (Vector Double))
lagrQs0 = fmap (unId . splitJV) $ unJVec (split lagrQs) :: Vec deg Double
userQs0 = fmap splitJV $ unJVec (split userQs) :: Vec deg (q Double)
outQs0 = fmap splitJV $ unJVec (split outQs) :: Vec deg (qo Double)
lagrQdots0 = fmap (unId . splitJV) $ unJVec (split lagrQdots) :: Vec deg Double
userQdots0 = fmap splitJV $ unJVec (split userQdots) :: Vec deg (q Double)
outQdots0 = fmap splitJV $ unJVec (split outQdots) :: Vec deg (qo Double)
qdots = TV.tvzipWith3 Quadratures lagrQdots0 userQdots0 outQdots0
qs = fmap (previousQuadratures ^+^) $ TV.tvzipWith3 Quadratures lagrQs0 userQs0 outQs0
nextQuadratures =
Quadratures
{ qLagrange = unId (splitJV lagrQNext)
, qUser = splitJV userQNext
, qOutputs = splitJV outQNext
} ^+^ previousQuadratures
let quadPlotInputs ::
Vec deg
(QuadraturePlottingIn (JV x) (JV z) (JV u) (JV p) (JV o) (JV q) (JV qo) (JV fp) (Vector Double))
quadPlotInputs =
toQuadPlotIn <$> xs <*> zs <*> us <*> outs0 <*> qUsers <*> qOuts <*> stageTimes
qUsers = fmap (catJV . qUser) qs
qOuts = fmap (catJV . qOutputs) qs
(xs,zs,us) = TV.tvunzip3 $ fmap (toXzu . split) (unJVec (split xzus))
where
toXzu (CollPoint x z u) = (x, z, u)
toQuadPlotIn x z u o q qo t = QuadraturePlottingIn x0 xF x z u p o q qo fp (catJV (Id t)) tf'
pos <- T.mapM (callV quadPlotFun) quadPlotInputs
let stageOutputs =
StageOutputs
{ soX0 = stageX0
, soT0 = stageT0
, soVec = f <$> stageTimes <*> outs0 <*> xdots0 <*> hs0 <*> pos <*> qs <*> qdots
, soXNext = xnext
, soQNext = nextQuadratures
}
return (stageOutputs, nextQuadratures)
mapOutputFun :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO ( Vec n (StageOutputs x o h q qo po deg Double)
, Quadratures q qo Double
)
mapOutputFun ct fp = do
let CollTraj _ _ stages xF = split ct
vstages = unJVec (split stages)
:: Vec n (J (CollStage (JV x) (JV z) (JV u) (JV p) deg) (Vector Double))
ks :: Vec n Double
ks = TV.mkVec' $ map realToFrac (take n [(0::Int)..])
CollStage x0 _ _ _ = split (TV.tvhead vstages)
quadratures0 :: Quadratures q qo Double
quadratures0 = pure 0
mapAccumM (callOutputFun (x0, xF) fp) quadratures0 (TV.tvzip vstages ks)
getHellaOutputs ::
J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO ( DynPlotPoints Double
, Vec n (StageOutputs x o h q qo po deg Double)
, Quadratures q qo Double
)
getHellaOutputs traj fp = do
(outputs, quadratures) <- mapOutputFun traj fp
return (dynPlotPoints roots (split traj) outputs, outputs, quadratures)
getPlotPoints :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (DynPlotPoints Double)
getPlotPoints traj fp = do
(dpp, _, _) <- getHellaOutputs traj fp
return dpp
getOutputs :: J (CollTraj x z u p n deg) (Vector Double)
-> J (JV fp) (Vector Double)
-> IO (Vec n (StageOutputs x o h q qo po deg Double))
getOutputs traj fp = do
(outputs, _) <- mapOutputFun traj fp
return outputs
getFg ::
forall x z u p r c h q fp n deg .
( KnownNat deg, KnownNat n
, Vectorize x, Vectorize z, Vectorize u, Vectorize p
, Vectorize r, Vectorize c, Vectorize h, Vectorize q, Vectorize fp
)
-- bcFun
=> Fun ( J (JV x)
:*: J (JV x)
:*: J (JV q)
:*: J (JV p)
:*: J (JV fp)
:*: S
)
(J (JV c))
-- mayerFun
-> Fun ( S
:*: J (JV x)
:*: J (JV x)
:*: J (JV q)
:*: J (JV p)
:*: J (JV fp)
)
S
-- lagQuadFun
-> (QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX -> S MX)
-- quadFun
-> (QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX -> J (JV q) MX)
-- stageFun
-> ( ( Vec n (S MX)
, J (JVec n (CollStage (JV x) (JV z) (JV u) (JV p) deg)) MX
, J (JV fp) MX
)
-> ( J (JVec n (JVec deg (JV r))) MX
, J (JVec n (JVec deg (JV h))) MX
, J (JVec n (JV x)) MX
)
)
-- collTraj
-> J (CollTraj x z u p n deg) MX
-- parameter
-> J (JV fp) MX
-- (objective, constraints)
-> (S MX, J (CollOcpConstraints x p r c h n deg) MX)
getFg bcFun mayerFun lagQuadFun quadFun
mapStageFun collTraj fixedParm = (obj, cat g)
where
-- split up the design vars
CollTraj masterTf masterParm stages' xf = split collTraj
stages = unJVec (split stages') :: Vec n (J (CollStage (JV x) (JV z) (JV u) (JV p) deg) MX)
spstages = fmap split stages :: Vec n (CollStage (JV x) (JV z) (JV u) (JV p) deg MX)
obj = objLagrange + objMayer
objMayer = callSym mayerFun (masterTf :*: x0 :*: xf :*: finalQuadratures :*: masterParm :*: fixedParm)
objLagrange :: S MX
objLagrange = F.sum $ oneQuadStage lagQuadFun <$> ks <*> stages
finalQuadratures :: J (JV q) MX
finalQuadratures = F.sum $ oneQuadStage quadFun <$> ks <*> stages
oneQuadStage ::
(QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX -> J qOrSomething MX)
-> S MX
-> J (CollStage (JV x) (JV z) (JV u) (JV p) deg) MX
-> J qOrSomething MX
oneQuadStage qfun k collStage = qfun qInputs
where
qInputs :: QuadratureStageIn (JV x) (JV z) (JV u) (JV p) (JV fp) deg MX
qInputs = QuadratureStageIn k collStage fixedParm
ks :: Vec n (S MX)
ks = fmap realToFrac $ TV.mkVec' $ take n [(0::Int)..]
where
n = fromIntegral (natVal (Proxy :: Proxy n))
-- initial point at each stage
x0s :: Vec n (J (JV x) MX)
-- parameter at each stage
ps :: Vec n (J (JV p) MX)
-- trajectory end time from each stage
tfs :: Vec n (S MX)
(x0s, ps, tfs) = TV.tvunzip3 $ fmap (\(CollStage x0' _ p' tf') -> (x0', p', tf')) spstages
-- final point at each stage (for matching constraint)
xfs :: Vec n (J (JV x) MX)
xfs = TV.tvshiftl x0s xf
x0 = (\(CollStage x0' _ _ _) -> x0') (TV.tvhead spstages)
g = CollOcpConstraints
{ coCollPoints = dcs
, coContinuity = integratorMatchingConstraints
, coPathC = hs
, coBc = callSym bcFun (x0 :*: xf :*: finalQuadratures :*: masterParm :*: fixedParm :*: masterTf)
, coParams = cat $ JVec $ fmap (masterParm -) ps
, coTfs = cat $ JVec $ fmap (masterTf -) tfs
}
integratorMatchingConstraints :: J (JVec n (JV x)) MX -- THIS SHOULD BE A NONLINEAR FUNCTION
integratorMatchingConstraints = interpolatedXs - (cat (JVec xfs))
dcs :: J (JVec n (JVec deg (JV r))) MX
hs :: J (JVec n (JVec deg (JV h))) MX
interpolatedXs :: J (JVec n (JV x)) MX
(dcs, hs, interpolatedXs) = mapStageFun (ks, stages', fixedParm)
ocpPhaseBx :: forall x z u p c h fp n deg .
( KnownNat n, KnownNat deg
, Vectorize x, Vectorize z, Vectorize u, Vectorize p
)
=> OcpPhaseInputs x z u p c h fp
-> CollTraj x z u p n deg (Vector Bounds)
ocpPhaseBx ocpInputs =
CollTraj
{ ctTf = catJV (Id (ocpTbnd ocpInputs))
, ctP = catJV (ocpPbnd ocpInputs)
, ctStages = jreplicate (cat stageBounds)
, ctXf = jfill (Nothing, Nothing)
}
where
stageBounds :: CollStage (JV x) (JV z) (JV u) (JV p) deg (Vector Bounds)
stageBounds = CollStage
(jfill (Nothing, Nothing))
(jreplicate (cat pointBounds))
(jfill (Nothing, Nothing))
(jfill (Nothing, Nothing))
pointBounds :: CollPoint (JV x) (JV z) (JV u) (Vector Bounds)
pointBounds = CollPoint
(catJV (ocpXbnd ocpInputs))
(catJV (ocpZbnd ocpInputs))
(catJV (ocpUbnd ocpInputs))
ocpPhaseBg :: forall x z u p r c h fp n deg .
( KnownNat n, KnownNat deg
, Vectorize x, Vectorize p, Vectorize r, Vectorize c, Vectorize h
)
=> OcpPhaseInputs x z u p c h fp
-> CollOcpConstraints x p r c h n deg (Vector Bounds)
ocpPhaseBg ocpInputs =
CollOcpConstraints
{ coCollPoints = jreplicate (jfill (Just 0, Just 0)) -- dae residual constraint
, coContinuity = jreplicate (jfill (Just 0, Just 0)) -- continuity constraint
, coPathC = jreplicate (jreplicate hbnds)
, coBc = catJV (ocpBcBnds ocpInputs)
, coParams = jreplicate (jfill (Just 0, Just 0))
, coTfs = jreplicate (jfill (Just 0, Just 0))
}
where
hbnds :: J (JV h) (Vector Bounds)
hbnds = catJV (ocpPathCBnds ocpInputs)
toQuadratureFun ::
forall x z u p fp q deg
. ( View q, View x, View z, View u, View p, KnownNat deg
)
=> Int
-> Vec deg Double
-> Vec (deg + 1) (Vec (deg + 1) Double)
-> (J q MX -> Vec deg (J q MX) -> J q MX)
-> (QuadratureIn x z u p fp MX -> J q MX)
-> QuadratureStageIn x z u p fp deg MX
-> (Vec deg (J q MX), Vec deg (J q MX), J q MX)
toQuadratureFun n taus cijs interpolate' evalQuadDeriv (QuadratureStageIn k collStage fp) =
(qdots, qs, qnext)
where
CollStage x0 xzus' p tf = split collStage
xzus = fmap split (unJVec (split xzus')) :: Vec deg (CollPoint x z u MX)
h = tf / fromIntegral n
xs :: Vec deg (J x MX)
xs = fmap (\(CollPoint x _ _) -> x) xzus
-- state derivatives, maybe these could be useful as outputs
xdots :: Vec deg (J x MX)
xdots = fmap (`M.ms` (1/h)) $ interpolateXDots cijs (x0 TV.<| xs)
quadratureIns :: Vec deg (QuadratureIn x z u p fp MX)
quadratureIns = TV.tvzipWith3 (\x' (CollPoint x z u) t -> QuadratureIn x' x z u p fp t tf)
xdots xzus stageTimes
qdots :: Vec deg (J q MX)
qdots = fmap evalQuadDeriv quadratureIns
stageTimes :: Vec deg (S MX)
stageTimes = pure stageT0 ^+^ h *^ fmap realToFrac taus
where
stageT0 = k * h
qnext :: J q MX
qnext = interpolate' (0 :: J q MX) qs
qs = fmap timesH qsOverH
where
timesH q = M.ms q h
qsOverH :: Vec deg (J q MX)
qsOverH = cijInvFr !* qdots
cijs' :: Vec deg (Vec deg Double)
cijs' = TV.tvtail $ fmap TV.tvtail cijs
cijMat :: Mat.Matrix Double
cijMat = Mat.fromLists $ F.toList $ fmap F.toList cijs'
cijInv' :: Mat.Matrix Double
cijInv' = Mat.inv cijMat
cijInv :: Vec deg (Vec deg Double)
cijInv = TV.mkVec' (map TV.mkVec' (Mat.toLists cijInv'))
cijInvFr :: Vec deg (Vec deg (J q MX))
cijInvFr = fmap (fmap realToFrac) cijInv
toPathCFun ::
forall x z u p fp h deg
. ( View x, View z, View u, View p, KnownNat deg
)
=> Int
-> Vec (deg + 1) (Vec (deg + 1) Double)
-> (PathCIn x z u p fp MX -> J h MX)
-> PathCStageIn x z u p fp deg MX
-> Vec deg (J h MX)
toPathCFun n cijs evalPathC (PathCStageIn collStage fp stageTimes') = hs
where
CollStage x0 xzus' p tf = split collStage
h = tf / fromIntegral n
xzus = fmap split (unJVec (split xzus')) :: Vec deg (CollPoint x z u MX)
xs :: Vec deg (J x MX)
xs = fmap (\(CollPoint x _ _) -> x) xzus
-- state derivatives, maybe these could be useful as outputs
xdots :: Vec deg (J x MX)
xdots = fmap (`M.ms` (1/h)) $ interpolateXDots cijs (x0 TV.<| xs)
pathCIns :: Vec deg (PathCIn x z u p fp MX)
pathCIns = TV.tvzipWith3 (\x' (CollPoint x z u) t -> PathCIn x' x z u p fp t)
xdots xzus stageTimes
hs :: Vec deg (J h MX)
hs = fmap evalPathC pathCIns
stageTimes :: Vec deg (S MX)
stageTimes = vsplit stageTimes'
-- todo: merging this with evaluateQuadraturesFunction would reduce duplication,
-- but could be inefficient
genericQuadraturesFunction ::
forall deg
. KnownNat deg
=> (S MX -> Vec deg (S MX) -> S MX)
-> Vec (deg + 1) (Vec (deg + 1) Double)
-> Int
-> (J (JV (Vec deg)) :*: S) MX
-> S MX
genericQuadraturesFunction interpolate' cijs' n (qdots' :*: tf) =
dt * qnext
where
dt = tf / fromIntegral n
qdots :: Vec deg (S MX)
qdots = vsplit qdots'
qnext :: S MX
qnext = interpolate' 0 qs
qs = cijInvFr !* qdots
cijs :: Vec deg (Vec deg Double)
cijs = TV.tvtail $ fmap TV.tvtail cijs'
cijMat :: Mat.Matrix Double
cijMat = Mat.fromLists $ F.toList $ fmap F.toList cijs
cijInv' :: Mat.Matrix Double
cijInv' = Mat.inv cijMat
cijInv :: Vec deg (Vec deg Double)
cijInv = TV.mkVec' (map TV.mkVec' (Mat.toLists cijInv'))
cijInvFr :: Vec deg (Vec deg (S MX))
cijInvFr = fmap (fmap realToFrac) cijInv
-- todo: code duplication
dot :: forall x deg a b. (View x, CMatrix a, Real b, KnownNat deg)
=> Vec deg b -> Vec deg (J x a) -> J x a
dot cks xs = F.sum $ TV.unVec elemwise
where
elemwise :: Vec deg (J x a)
elemwise = TV.tvzipWith smul cks xs
smul :: b -> J x a -> J x a
smul x y = realToFrac x * y
-- todo: code duplication
interpolateXDots' :: (Real b, View x, CMatrix a, KnownNat deg)
=> Vec deg (Vec deg b) -> Vec deg (J x a) -> Vec deg (J x a)
interpolateXDots' cjks xs = fmap (`dot` xs) cjks
interpolateXDots ::
forall b deg x a
. (Real b, KnownNat deg, View x, CMatrix a)
=> Vec (deg + 1) (Vec (deg + 1) b)
-> Vec (deg + 1) (J x a)
-> Vec deg (J x a)
interpolateXDots cjks xs =
withKnownNat (SNat @deg %+ SNat @1) $
TV.tvtail $ interpolateXDots' cjks xs
-- return dynamics constraints and interpolated state
toDynamicsStage ::
forall x z u p fp r o deg . (KnownNat deg, View x, View z, View u, View p, View fp, View r, View o)
=> (J x MX -> Vec deg (J x MX) -> J x MX)
-> Vec (deg + 1) (Vec (deg + 1) Double)
-> Fun (DaeIn x z u p fp) (DaeOut r o)
-> (J x :*: J (JVec deg (JTuple x z)) :*: J (JVec deg u) :*: S :*: J p :*: J fp :*: J (JV (Vec deg))) MX
-> (J (JVec deg r) :*: J x) MX
toDynamicsStage interpolate' cijs dynFun (x0 :*: xzs' :*: us' :*: h :*: p :*: fp :*: stageTimes') =
cat (JVec dynConstrs) :*: xnext
where
xzs = fmap split (unJVec (split xzs')) :: Vec deg (JTuple x z MX)
us = unJVec (split us') :: Vec deg (J u MX)
-- interpolated final state
xnext :: J x MX
xnext = interpolate' x0 xs
stageTimes = vsplit stageTimes'
-- dae constraints (dynamics)
dynConstrs :: Vec deg (J r MX)
(dynConstrs, _) = TV.tvunzip $ TV.tvzipWith4 applyDae xdots xzs us stageTimes
applyDae :: J x MX -> JTuple x z MX -> J u MX -> S MX -> (J r MX, J o MX)
applyDae x' (JTuple x z) u t = (r, o)
where
DaeOut r o = callSym dynFun (DaeIn t p fp x' collPoint)
collPoint = cat (CollPoint x z u)
-- state derivatives, maybe these could be useful as outputs
xdots :: Vec deg (J x MX)
xdots = fmap (`M.ms` (1/h)) $ interpolateXDots cijs (x0 TV.<| xs)
xs :: Vec deg (J x MX)
xs = fmap (\(JTuple x _) -> x) xzs
-- outputs
outputFunction ::
forall x z u p fp r o deg . (KnownNat deg, View x, View z, View u, View p, View fp, View r, View o)
=> Int
-> (J x MX -> Vec deg (J x MX) -> J x MX)
-> Vec (deg + 1) (Vec (deg + 1) Double) -> Vec deg Double
-> Fun (DaeIn x z u p fp) (DaeOut r o)
-> (J (CollStage x z u p deg) :*: J fp :*: S) MX
-> (J (JVec deg r) :*: J (JVec deg x) :*: J (JVec deg o) :*: J x) MX
outputFunction n callInterpolate cijs taus dynFun (collStage :*: fp :*: k) =
cat (JVec dynConstrs) :*: cat (JVec xdots) :*: cat (JVec outputs) :*: xnext
where
xzus = unJVec (split xzus') :: Vec deg (J (CollPoint x z u) MX)
CollStage x0 xzus' p tf = split collStage
-- times at each collocation point
stageTimes :: Vec deg (S MX)
stageTimes = fmap (\tau -> t0 + realToFrac tau * h) taus
t0 = k*h
h = tf / fromIntegral n
xnext = callInterpolate x0 xs
-- dae constraints (dynamics)
dynConstrs :: Vec deg (J r MX)
outputs :: Vec deg (J o MX)
(dynConstrs, outputs) = TV.tvunzip $ TV.tvzipWith3 applyDae xdots xzus stageTimes
applyDae :: J x MX -> J (CollPoint x z u) MX -> S MX -> (J r MX, J o MX)
applyDae x' xzu t = (r, o)
where
DaeOut r o = callSym dynFun (DaeIn t p fp x' xzu)
-- state derivatives, maybe these could be useful as outputs
xdots :: Vec deg (J x MX)
xdots = fmap (`M.ms` (1/h)) $ interpolateXDots cijs (x0 TV.<| xs)
xs :: Vec deg (J x MX)
xs = fmap ((\(CollPoint x _ _) -> x) . split) xzus
-- | make an initial guess
makeGuess ::
forall x z u p deg n .
( KnownNat n, KnownNat deg
, Vectorize x, Vectorize z, Vectorize u, Vectorize p
)
=> QuadratureRoots
-> Double -> (Double -> x Double) -> (Double -> z Double) -> (Double -> u Double)
-> p Double
-> CollTraj x z u p n deg (Vector Double)
makeGuess quadratureRoots tf guessX guessZ guessU parm =
CollTraj (catJV (Id tf)) (catJV parm) guesses (catJV (guessX tf))
where
-- timestep
dt = tf / fromIntegral n
n = vlength (Proxy :: Proxy (Vec n))
-- initial time at each collocation stage
t0s :: Vec n Double
t0s = TV.mkVec' $ take n [dt * fromIntegral k | k <- [(0::Int)..]]
-- times at each collocation point
times :: Vec n (Double, Vec deg Double)
times = fmap (\t0 -> (t0, fmap (\tau -> t0 + tau*dt) taus)) t0s
mkGuess' :: (Double, Vec deg Double) -> CollStage (JV x) (JV z) (JV u) (JV p) deg (Vector Double)
mkGuess' (t,ts) =
CollStage (catJV (guessX t))
(cat $ JVec $ fmap (\t' -> cat (CollPoint (catJV (guessX t')) (catJV (guessZ t')) (catJV (guessU t')))) ts)
(catJV parm)
(catJV (Id tf))
guesses :: J (JVec n (CollStage (JV x) (JV z) (JV u) (JV p) deg)) (Vector Double)
guesses = cat $ JVec $ fmap (cat . mkGuess') times
-- the collocation points
taus :: Vec deg Double
taus = mkTaus quadratureRoots
-- | make an initial guess
makeGuessSim ::
forall x z u p deg n .
( KnownNat n, KnownNat deg
, Vectorize x, Vectorize z, Vectorize u, Vectorize p
)
=> QuadratureRoots
-> Double
-> x Double
-> (Double -> x Double -> u Double -> x Double)
-> (Double -> x Double -> u Double)
-> p Double
-> CollTraj x z u p n deg (Vector Double)
makeGuessSim quadratureRoots tf x00 ode guessU p =
CollTraj (jfill tf) (catJV p) (cat (JVec stages)) (catJV xf)
where
-- timestep
dt = tf / fromIntegral n
n = vlength (Proxy :: Proxy (Vec n))
-- initial time at each collocation stage
t0s :: Vec n Double
t0s = TV.mkVec' $ take n [dt * fromIntegral k | k <- [(0::Int)..]]
xf :: x Double
stages :: Vec n (J (CollStage (JV x) (JV z) (JV u) (JV p) deg) (Vector Double))
(xf, stages) = T.mapAccumL stageGuess x00 t0s
stageGuess :: x Double -> Double
-> (x Double, J (CollStage (JV x) (JV z) (JV u) (JV p) deg) (Vector Double))
stageGuess x0 t0 = (fst (integrate 1), cat (CollStage (catJV x0) points (catJV p) (catJV (Id tf))))
where
points = cat $ JVec $ fmap (toCollPoint . integrate) taus
f :: Double -> x Double -> x Double
f t x = ode t x u
where
u = guessU t x
toCollPoint (x,u) = cat $ CollPoint (catJV x) (catJV (pure 0 :: z Double)) (catJV u)
integrate localTau = (x, u)
where
t = localTau * dt
x = rk45 f (InitialTime t0) (TimeStep t) x0
u = guessU t x
-- the collocation points
taus :: Vec deg Double
taus = mkTaus quadratureRoots
-- http://stackoverflow.com/questions/11652809/how-to-implement-mapaccumm
-- thanks rconner
mapAccumM :: (Monad m, T.Traversable t) => (a -> b -> m (c, a)) -> a -> t b -> m (t c, a)
mapAccumM f = flip (runStateT . (T.traverse (StateT . (flip f))))
|
ghorn/dynobud
|
dynobud/src/Dyno/DirectCollocation/Formulate.hs
|
lgpl-3.0
| 45,228
| 0
| 23
| 14,428
| 18,773
| 9,601
| 9,172
| 877
| 2
|
module CLITest (tests) where
import Test.Tasty
import Test.Tasty.HUnit
import DMSS.CLI.Internal
import DMSS.CLI ( process
, Cli (Cli)
, FlagSilent (SilentOff)
)
import DMSS.Daemon.Common ( cliPort )
import DMSS.CLI.Command ( Command (Id)
, IdCommand (IdCreate, IdList))
import System.Directory (doesPathExist, doesFileExist)
import Common
tests :: [TestTree]
tests =
[ testCase "create_user_prompt" createUserTest
, testCase "remove_user_prompt" removeUserTest
, testCase "data directory created if needed" datadirCreated
]
tempDir :: FilePath
tempDir = "cliTest"
createUserTest :: Assertion
createUserTest = withTemporaryTestDirectory tempDir ( \_ -> do
-- Simulate a create user command.
-- For example: dmss-cli id create -n "joe blow"
_ <- processIdCreate "joe blow" "password"
-- Simply check that the header and one entry is returned
l <- processIdList
length (lines l) @?= 2
)
removeUserTest :: Assertion
removeUserTest = withTemporaryTestDirectory tempDir ( \_ -> do
-- Simulate a create user and then removing a user.
-- For example:
-- $ dmss-cli id create -n "donald_trump" -p "password"
-- $ dmss-cli id list
-- NAME
-- donald_trum...
-- $ dmss-cli id remove donald_trump
_ <- processIdCreate "donald_trump" "topSecret!"
-- Get the fingerprint from result of `processIdList`
l <- processIdList
length (lines l) @?= 2
-- Remove the created user ID
_ <- processIdRemove "donald_trump"
-- Simply check that no results are returned
l' <- processIdList
length (lines l') @?= 1
)
datadirCreated :: Assertion
datadirCreated = withTemporaryTestDirectory tempDir
( \homedir -> do
let newDatadir = homedir ++ "/test"
-- Simulate a list user command which will tigger data directory creation.
process (Cli (Just newDatadir) cliPort SilentOff (Id IdList))
-- Verify data directory was created
pathExists <- doesPathExist newDatadir
assertBool (newDatadir ++ " directory has been created.") pathExists
-- Simulate a create user command which will tigger data database creation.
process (Cli (Just newDatadir) cliPort SilentOff (Id (IdCreate (Just "new user") (Just "Password"))))
-- Verify database was created
let sqlFile = newDatadir ++ "/.local/share/dmss/dmss.sqlite"
dataExists <- doesFileExist sqlFile
assertBool (sqlFile ++ " database has been created.") dataExists
)
|
dmp1ce/DMSS
|
src-test/CLITest.hs
|
unlicense
| 2,523
| 0
| 18
| 572
| 493
| 265
| 228
| 43
| 1
|
module Util (
module Data.List,
module Console,
module Util,
module Data.Char
)
where
import Database.HDBC
import Database.HDBC.PostgreSQL
import Console
import Data.List (intercalate)
import Data.Char (isSpace)
gs :: SqlValue -> String
gs y@(SqlByteString x) = fromSql y
gs SqlNull = ""
gb :: SqlValue -> Bool
gb y@(SqlBool x) = fromSql y
gi :: SqlValue -> Int
gi y@(SqlInt32 x) = fromSql y
data Comparison a = Equal a | LeftOnly a | RightOnly a | Unequal a a
sok = concat [ setColor dullGreen, [charCheck] , " "]
nok = concat [setColor dullRed, setAttr bold, [charNotEquals], " "]
trim [] = []
trim x@(a:y) = if (isSpace a) then trim y else x
compareIgnoringWhiteSpace :: String -> String -> Bool
compareIgnoringWhiteSpace x y = ciws (trim x) (trim y)
where ciws x@(a:p) y@(b:q) =
if (isSpace a && isSpace b) then ciws (trim p) (trim q) else
if (a == b) then ciws p q else False
ciws x [] = null (trim x)
ciws [] y = null (trim y)
count x a = foldl (flip ((+) . fromEnum . x)) 0 a
dcount x y = foldl (\(a,b) z -> if (x z) then (a+1,b) else (a,b+1)) (0,0) y
iseq x = case x of { Equal _ -> True; otherwise -> False }
class Ord a => Comparable a where
-- doDbCompare :: [a] -> [a] -> [Comparison a]
dbCompare :: [a] -> [a] -> [Comparison a]
dbCompare x@(a:r) [] = map LeftOnly x
dbCompare [] y@(a:r) = map RightOnly y
dbCompare [] [] = []
dbCompare x@(a:r) y@(b:s) = case compare a b of
EQ -> objCmp a b : dbCompare r s
LT -> LeftOnly a : dbCompare r y
GT -> RightOnly b : dbCompare x s
objCmp :: a -> a -> Comparison a
|
sourcewave/pg-schema-diff
|
Util.hs
|
unlicense
| 1,622
| 0
| 11
| 403
| 815
| 435
| 380
| 42
| 5
|
module RandomSpec where
import Control.Applicative
import qualified Data.Vector as V
import SpecUtils
import WaTor.Random
import Test.Hspec
import Test.Hspec.Expectations.Contrib
spec :: Spec
spec = do
describe "shuffle" $
it "should not return items in the same order." $ randomly $ \g -> do
let v = V.fromList [1..100]
shuffle v g `shouldNotReturn` v
let v = V.fromList [1..100]
describe "randomSwap" $ do
it "should do nothing for item 0." $ randomly $ \g -> do
v' <- V.thaw v
randomSwap g v' 0 `shouldReturn` 1
V.freeze v' `shouldReturn` v
it "should swap the first two for item 1." $ randomly $ \g -> do
v' <- V.thaw v
randomSwap g v' 1 `shouldReturn` 2
(take 5 . V.toList <$> V.freeze v') `shouldReturn` [2, 1, 3, 4, 5]
it "should randomly swap the given item with another." $ randomly $ \g -> do
v' <- V.thaw v
randomSwap g v' 2 `shouldReturnSatisfy` \y -> y == 1 || y == 2
(take 5 . V.toList <$> V.freeze v') `shouldReturnSatisfy` \list ->
list == [3, 2, 1, 4, 5] || list == [1, 3, 2, 4, 5]
|
erochest/wa-tor
|
specs/RandomSpec.hs
|
apache-2.0
| 1,278
| 0
| 19
| 463
| 429
| 223
| 206
| 28
| 1
|
module HW7.Buffer where
-- Type class for data structures that can represent the text buffer
-- of an editor.
class Buffer b where
-- | Convert a buffer to a String.
toString :: b -> String
-- | Create a buffer from a String.
fromString :: String -> b
-- | Extract the nth line (0-indexed) from a buffer. Return Nothing
-- for out-of-bounds indices.
line :: Int -> b -> Maybe String
-- | @replaceLine n ln buf@ returns a modified version of @buf@,
-- with the @n@th line replaced by @ln@. If the index is
-- out-of-bounds, the buffer should be returned unmodified.
replaceLine :: Int -> String -> b -> b
-- | Compute the number of lines in the buffer.
numLines :: b -> Int
-- | Compute the value of the buffer, i.e. the amount someone would
-- be paid for publishing the contents of the buffer.
value :: b -> Int
|
cgag/cis-194-solutions
|
src/HW7/Buffer.hs
|
bsd-3-clause
| 861
| 0
| 9
| 205
| 96
| 57
| 39
| 8
| 0
|
import qualified Pt.StateMachineTest as SM
import Test.HUnit
import Control.Monad (void,mapM_)
main :: IO ()
main = void $ mapM_ runTestTT [ SM.tests ]
|
mrak/ptui
|
test/Spec.hs
|
bsd-3-clause
| 154
| 0
| 8
| 25
| 56
| 32
| 24
| 5
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Render.OpenGL.GLFWbGLDriver ( glfwbGLDriver
, gldInit
, gldSetMode
, gldShutdown
, gldBeginFrame
, gldEndFrame
, gldAppActivate
, gldEnableLogging
, gldLogNewFrame
, gldGetModeList
, gldUpdateScreen
, gldSetSwapInterval
) where
import Control.Lens (use, (.=), (^.), zoom, _1, _2)
import Control.Monad (when)
import Data.Maybe (isNothing, isJust, fromJust)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.Vector as V
import qualified Graphics.UI.GLFW as GLFW
import Types
import QuakeState
import QCommon.XCommandT
import Render.OpenGL.GLDriver
import Render.VideoMode
import qualified Constants
import qualified Client.VID as VID
import qualified Render.RenderAPIConstants as RenderAPIConstants
glfwbGLDriver :: GLDriver
glfwbGLDriver =
GLDriver { _gldInit = glDriverInit
, _gldSetMode = glDriverSetMode
, _gldShutdown = glDriverShutdown
, _gldBeginFrame = glDriverBeginFrame
, _gldEndFrame = glDriverEndFrame
, _gldAppActivate = glDriverAppActivate
, _gldEnableLogging = glDriverEnableLogging
, _gldLogNewFrame = glDriverLogNewFrame
, _gldGetModeList = glDriverGetModeList
, _gldUpdateScreen = glDriverUpdateScreen
, _gldSetSwapInterval = glDriverSetSwapInterval
}
glDriverSetSwapInterval :: Int -> Quake ()
glDriverSetSwapInterval v = io $ GLFW.swapInterval v
glDriverGetModeList :: Quake (V.Vector VideoMode)
glDriverGetModeList = do
Just oldMode <- use $ glfwbGlobals.glfwbOldDisplayMode
Just monitor <- io $ GLFW.getPrimaryMonitor
vm <- io $ GLFW.getVideoModes monitor
case vm of
Nothing -> return $ V.fromList (fmap GLFWbVideoMode [oldMode])
Just modes -> do
-- TODO: implement this stuff:
{-
int j = 0;
DisplayMode ml = null;
for (j = 0; j < l.size(); j++) {
ml = (DisplayMode)l.get(j);
if (ml.getWidth() > m.getWidth()) break;
if (ml.getWidth() == m.getWidth() && ml.getHeight() >= m.getHeight()) break;
}
if (j == l.size()) {
l.addLast(m);
} else if (ml.getWidth() > m.getWidth() || ml.getHeight() > m.getHeight()) {
l.add(j, m);
} else if (m.getRefreshRate() > ml.getRefreshRate()){
l.remove(j);
l.add(j, m);
}
-}
return $ V.fromList $ fmap GLFWbVideoMode $ filter (validDisplayMode oldMode) modes
where validDisplayMode :: GLFW.VideoMode -> GLFW.VideoMode -> Bool
validDisplayMode oldMode newMode =
GLFW.videoModeRedBits oldMode == GLFW.videoModeRedBits newMode &&
GLFW.videoModeGreenBits oldMode == GLFW.videoModeGreenBits newMode &&
GLFW.videoModeBlueBits oldMode == GLFW.videoModeBlueBits newMode &&
GLFW.videoModeRefreshRate oldMode >= GLFW.videoModeRefreshRate newMode &&
GLFW.videoModeWidth newMode >= 320 &&
GLFW.videoModeHeight newMode >= 240
-- first param is dim but it is not used anywhere for some reason
glDriverSetMode :: (Int, Int) -> Int -> Bool -> Quake Int
glDriverSetMode _ mode fullscreen = do
VID.printf Constants.printAll "Initializing OpenGL display\n"
VID.printf Constants.printAll $ "...setting mode " `B.append` BC.pack (show mode) `B.append` ":" -- IMPROVE?
(use $ glfwbGlobals.glfwbOldDisplayMode) >>= \oldMode ->
when (isNothing oldMode) $ do
Just monitor <- io $ GLFW.getPrimaryMonitor
videoMode <- io $ GLFW.getVideoMode monitor
glfwbGlobals.glfwbOldDisplayMode .= videoMode
ok <- VID.getModeInfo mode
case ok of
Nothing -> do
VID.printf Constants.printAll " invalid mode\n"
return RenderAPIConstants.rsErrInvalidMode
Just newDim -> do
VID.printf Constants.printAll $ BC.pack (show newDim)
-- destroy the existing window
glDriverShutdown
-- TODO: handle errors (Nothing)
Just monitor <- io $ GLFW.getPrimaryMonitor
if fullscreen
then do
-- TODO: handle errors (Nothing)
Just window <- io $ GLFW.createWindow (newDim^._1) (newDim^._2) "Hake2 (GLFWb)" (Just monitor) Nothing
glfwbGlobals.glfwbWindow .= Just window
io $ GLFW.makeContextCurrent (Just window)
else do
-- TODO: handle errors (Nothing)
Just window <- io $ GLFW.createWindow (newDim^._1) (newDim^._2) "Hake2 (GLFWb)" Nothing Nothing
glfwbGlobals.glfwbWindow .= Just window
io $ GLFW.makeContextCurrent (Just window)
Just currentMode <- io $ GLFW.getVideoMode monitor
when fullscreen $
VID.printf Constants.printAll ("...setting fullscreen " `B.append` getModeString currentMode `B.append` "\n")
zoom (fastRenderAPIGlobals.frVid) $ do
vdNewWidth .= newDim^._1
vdNewHeight .= newDim^._2
-- let the sound and input subsystems know about the new window
VID.newWindow (newDim^._1) (newDim^._2)
return RenderAPIConstants.rsErrOk
getModeString :: GLFW.VideoMode -> B.ByteString
getModeString vm =
BC.pack (show $ GLFW.videoModeWidth vm) `B.append`
"x" `B.append`
BC.pack (show $ GLFW.videoModeHeight vm) `B.append`
"x" `B.append`
BC.pack (show $ GLFW.videoModeRedBits vm) `B.append` -- TODO: ensure this is what we think it is
"@" `B.append`
BC.pack (show $ GLFW.videoModeRefreshRate vm) `B.append`
"Hz"
glDriverShutdown :: Quake ()
glDriverShutdown =
(use $ glfwbGlobals.glfwbWindow) >>= \w ->
when (isJust w) $ do
io (GLFW.destroyWindow (fromJust w))
glfwbGlobals.glfwbWindow .= Nothing
glDriverInit :: Int -> Int -> Quake Bool
glDriverInit xpos ypos = do
-- do nothing
glfwbGlobals.glfwbWindowXPos .= xpos
glfwbGlobals.glfwbWindowYPos .= ypos
return True
glDriverBeginFrame :: Float -> Quake ()
glDriverBeginFrame _ = return () -- do nothing
glDriverEndFrame :: Quake ()
glDriverEndFrame = do
Just window <- use $ glfwbGlobals.glfwbWindow
io $ GLFW.swapBuffers window
glDriverAppActivate :: Bool -> Quake ()
glDriverAppActivate _ = return () -- do nothing
glDriverEnableLogging :: Bool -> Quake ()
glDriverEnableLogging _ = return () -- do nothing
glDriverLogNewFrame :: Quake ()
glDriverLogNewFrame = return () -- do nothing
glDriverUpdateScreen :: XCommandT -> Quake ()
glDriverUpdateScreen callback = callback^.xcCmd
|
ksaveljev/hake-2
|
src/Render/OpenGL/GLFWbGLDriver.hs
|
bsd-3-clause
| 7,051
| 0
| 19
| 2,060
| 1,558
| 805
| 753
| -1
| -1
|
{-# LANGUAGE ParallelListComp #-}
-- | 'Utils' is a module of assorted utilities.
module Utils where
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
-- | 'filterLines' @lns@ filters empty lines and lines containing only
-- space characters from @lns@.
--
-- > filterLines ["hello", " \t ", "goodbye"] == ["hello","goodbye"]
filterLines :: [String] -> [String]
filterLines lns = [ ln | ln <- lns, trim ln /= "" ]
-- | 'replace' @c str@ replaces newlines in @str@ with @c@
--
-- > replace "hello\ngoodbye" == "hello,goodbye"
replace :: Char -> String -> String
replace c =
map loop
where loop '\n' = c
loop c = c
-- | 'indentation' @ln@ is the number of space characters at the
-- begining of @ln@.
--
-- > indentation "\t hello" == 2
indentation :: String -> Int
indentation ln = length $ takeWhile isSpace ln
-- | 'join' @str1 str2@ appends @str1@ and @str2@ ensuring that is
-- only a single newline space character between them.
--
-- > join "hello \n" "\t goodbye" == "hello\ngoodbye"
join :: String -> String -> String
join str1 str2 = dropWhileEnd isSpace str1 ++ "\n" ++ dropWhile isSpace str2
-- | 'prefix' @pre str@ prepends all lines in 'str' with 'pre'.
--
-- > prefix "->" "hello\ngoodbye" == "->hello\n->goodbye"
prefix :: String -> String -> String
prefix pre str =
pre ++ prefix' str
where prefix' =
concatMap (\c -> case c of
'\n' -> '\n':pre
_ -> [c])
-- | 'push' @x xs@ adds @x@ to @xs@ only if the first element in @xs@
-- is different from @x@.
--
-- > push 1 [2,3] == [1,2,3]
-- > push 1 [1,3] == [1,3]
push :: Eq a => a -> [a] -> [a]
push x (y:ys) | x /= y = x:y:ys
push _ xs = xs
-- | 'trim' @str@ removes leading and trailing space characters from
-- @str@.
--
-- > trim "\t hello \n" == "hello"
trim :: String -> String
trim = dropWhileEnd isSpace . dropWhile isSpace
|
jabolopes/fmark
|
src/Utils.hs
|
bsd-3-clause
| 1,923
| 0
| 13
| 459
| 373
| 207
| 166
| 27
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Web.RTBBidder.Types.Request.Device (Device(..)) where
import qualified Data.Aeson as AESON
import Data.Aeson ((.=), (.:), (.:?), (.!=))
import qualified Data.Text as TX
import Web.RTBBidder.Types.Request.Geo (Geo(..))
data Device = Device
{ devUa :: Maybe TX.Text
, devGeo :: Maybe Geo
, devDnt :: Maybe Int
, devLmt :: Maybe Int
, devIp :: Maybe TX.Text
, devIpv6 :: Maybe TX.Text
, devDevicetype :: Maybe Int
, devMake :: Maybe TX.Text
, devModel :: Maybe TX.Text
, devOs :: Maybe TX.Text
, devOsv :: Maybe TX.Text
, devHwv :: Maybe TX.Text
, devH :: Maybe Int
, devW :: Maybe Int
, devPpi :: Maybe Int
, devPxratio :: Maybe Double
, devJs :: Maybe Int
, devGeofetch :: Maybe Int
, devFlashver :: Maybe TX.Text
, devLanguage :: Maybe TX.Text
, devCarrier :: Maybe TX.Text
, devMcccmnc :: Maybe TX.Text
, devConnectiontype :: Maybe Int
, devIfa :: Maybe TX.Text
, devDidsha1 :: Maybe TX.Text
, devDidmd5 :: Maybe TX.Text
, devDpidsha1 :: Maybe TX.Text
, devDpidmd5 :: Maybe TX.Text
, devMacsha1 :: Maybe TX.Text
, devMacmd5 :: Maybe TX.Text
, devExt :: Maybe AESON.Value
} deriving (Show, Eq)
instance AESON.FromJSON Device where
parseJSON = AESON.withObject "device" $ \o -> do
devUa <- o .:? "ua"
devGeo <- o .:? "geo"
devDnt <- o .:? "dnt"
devLmt <- o .:? "lmt"
devIp <- o .:? "ip"
devIpv6 <- o .:? "ipv6"
devDevicetype <- o .:? "devicetype"
devMake <- o .:? "make"
devModel <- o .:? "model"
devOs <- o .:? "os"
devOsv <- o .:? "osv"
devHwv <- o .:? "hwv"
devH <- o .:? "h"
devW <- o .:? "w"
devPpi <- o .:? "ppi"
devPxratio <- o .:? "pxratio"
devJs <- o .:? "js"
devGeofetch <- o .:? "geofetch"
devFlashver <- o .:? "flashver"
devLanguage <- o .:? "language"
devCarrier <- o .:? "carrier"
devMcccmnc <- o .:? "mccmnc"
devConnectiontype <- o .:? "connectiontype"
devIfa <- o .:? "ifa"
devDidsha1 <- o .:? "didsha1"
devDidmd5 <- o .:? "didmd5"
devDpidsha1 <- o .:? "dpidsha1"
devDpidmd5 <- o .:? "dpidmd5"
devMacsha1 <- o .:? "macsha1"
devMacmd5 <- o .:? "macmd5"
devExt <- o .:? "ext"
return Device{..}
instance AESON.ToJSON Device where
toJSON Device{..} = AESON.object
[ "ua" .= devUa
, "geo" .= devGeo
, "dnt" .= devDnt
, "lmt" .= devLmt
, "ip" .= devIp
, "ipv6" .= devIpv6
, "devicetype" .= devDevicetype
, "make" .= devMake
, "model" .= devModel
, "os" .= devOs
, "osv" .= devOsv
, "hwv" .= devHwv
, "h" .= devH
, "w" .= devW
, "ppi" .= devPpi
, "pxratio" .= devPxratio
, "js" .= devJs
, "geofetch" .= devGeofetch
, "flashver" .= devFlashver
, "language" .= devLanguage
, "carrier" .= devCarrier
, "mccmnc" .= devMcccmnc
, "connectiontype" .= devConnectiontype
, "ifa" .= devIfa
, "didsha1" .= devDidsha1
, "didmd5" .= devDidmd5
, "dpidsha1" .= devDpidsha1
, "dpidmd5" .= devDpidmd5
, "macsha1" .= devMacsha1
, "macmd5" .= devMacmd5
, "ext" .= devExt
]
|
hiratara/hs-rtb-bidder
|
src/Web/RTBBidder/Types/Request/Device.hs
|
bsd-3-clause
| 3,167
| 0
| 11
| 826
| 1,046
| 558
| 488
| 107
| 0
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE NamedFieldPuns, RecordWildCards, BangPatterns,
StandaloneDeriving, GeneralizedNewtypeDeriving #-}
module Distribution.Server.Features.EditCabalFiles (
initEditCabalFilesFeature
, diffCabalRevisions
, Change(..)
) where
import Distribution.Server.Framework
import Distribution.Server.Framework.Templating
import Distribution.Server.Features.Users
import Distribution.Server.Features.Core
import Distribution.Server.Packages.Types
import Distribution.Server.Features.Upload
import Distribution.Package
import Distribution.Text (display)
import Distribution.Version (intersectVersionRanges)
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
(parsePackageDescription, sourceRepoFieldDescrs)
import Distribution.PackageDescription.Check
import Distribution.ParseUtils
( ParseResult(..), locatedErrorMsg, showPWarning )
import Distribution.Server.Util.Parse (unpackUTF8)
import Distribution.ParseUtils (FieldDescr(..))
import Distribution.Text (Text(..))
import Text.PrettyPrint as Doc
(nest, empty, isEmpty, (<+>), colon, (<>), text, vcat, ($+$), Doc)
import Text.StringTemplate (ToSElem(..))
import Data.List
import qualified Data.Char as Char
import Data.ByteString.Lazy (ByteString)
import qualified Data.Map as Map
import Control.Monad.Error (ErrorT, runErrorT)
import Control.Monad.Writer (MonadWriter(..), Writer, runWriter)
import Control.Applicative
import Data.Time (getCurrentTime)
import qualified Data.ByteString.Lazy.Char8 as BS -- TODO: Verify that we don't need to worry about UTF8
-- | A feature to allow editing cabal files without uploading new tarballs.
--
initEditCabalFilesFeature :: ServerEnv
-> IO (UserFeature
-> CoreFeature
-> UploadFeature
-> IO HackageFeature)
initEditCabalFilesFeature env@ServerEnv{ serverTemplatesDir,
serverTemplatesMode } = do
-- Page templates
templates <- loadTemplates serverTemplatesMode
[serverTemplatesDir, serverTemplatesDir </> "EditCabalFile"]
["cabalFileEditPage.html", "cabalFilePublished.html"]
return $ \user core upload -> do
let feature = editCabalFilesFeature env templates user core upload
return feature
editCabalFilesFeature :: ServerEnv -> Templates
-> UserFeature -> CoreFeature -> UploadFeature
-> HackageFeature
editCabalFilesFeature _env templates
UserFeature{guardAuthorised}
CoreFeature{..}
UploadFeature{maintainersGroup, trusteesGroup} =
(emptyHackageFeature "edit-cabal-files") {
featureResources =
[ editCabalFileResource
]
, featureState = []
, featureReloadFiles = reloadTemplates templates
}
where
CoreResource{..} = coreResource
editCabalFileResource =
(resourceAt "/package/:package/:cabal.cabal/edit") {
resourceDesc = [(GET, "Page to edit package metadata")
,(POST, "Modify the package metadata")],
resourceGet = [("html", serveEditCabalFileGet)],
resourcePost = [("html", serveEditCabalFilePost)]
}
serveEditCabalFileGet :: DynamicPath -> ServerPartE Response
serveEditCabalFileGet dpath = do
template <- getTemplate templates "cabalFileEditPage.html"
pkg <- packageInPath dpath >>= lookupPackageId
let pkgname = packageName pkg
pkgid = packageId pkg
-- check that the cabal name matches the package
guard (lookup "cabal" dpath == Just (display pkgname))
ok $ toResponse $ template
[ "pkgid" $= pkgid
, "cabalfile" $= insertRevisionField (pkgNumRevisions pkg)
(cabalFileByteString (pkgLatestCabalFileText pkg))
]
serveEditCabalFilePost :: DynamicPath -> ServerPartE Response
serveEditCabalFilePost dpath = do
template <- getTemplate templates "cabalFileEditPage.html"
pkg <- packageInPath dpath >>= lookupPackageId
let pkgname = packageName pkg
pkgid = packageId pkg
-- check that the cabal name matches the package
guard (lookup "cabal" dpath == Just (display pkgname))
uid <- guardAuthorised [ InGroup (maintainersGroup pkgname)
, InGroup trusteesGroup ]
let oldVersion = cabalFileByteString (pkgLatestCabalFileText pkg)
newRevision <- getCabalFile
shouldPublish <- getPublish
case diffCabalRevisions pkgid oldVersion newRevision of
Left errs ->
responseTemplate template pkgid newRevision
shouldPublish [errs] []
Right changes
| shouldPublish && not (null changes) -> do
template' <- getTemplate templates "cabalFilePublished.html"
time <- liftIO getCurrentTime
updateAddPackageRevision pkgid (CabalFileText newRevision)
(time, uid)
ok $ toResponse $ template'
[ "pkgid" $= pkgid
, "cabalfile" $= newRevision
, "changes" $= changes
]
| otherwise ->
responseTemplate template pkgid newRevision
shouldPublish [] changes
where
getCabalFile = body (lookBS "cabalfile")
getPublish = body $ (look "review" >> return False) `mplus`
(look "publish" >> return True)
responseTemplate :: ([TemplateAttr] -> Template) -> PackageId
-> ByteString -> Bool -> [String] -> [Change]
-> ServerPartE Response
responseTemplate template pkgid cabalFile publish errors changes =
ok $ toResponse $ template
[ "pkgid" $= pkgid
, "cabalfile" $= cabalFile
, "publish" $= publish
, "errors" $= errors
, "changes" $= changes
]
instance ToSElem Change where
toSElem (Change change from to) =
toSElem (Map.fromList [("what", change)
,("from", from)
,("to", to)])
newtype CheckM a = CheckM { unCheckM :: ErrorT String (Writer [Change]) a } deriving (Functor, Applicative)
runCheck :: CheckM () -> Either String [Change]
runCheck c = case runWriter . runErrorT . unCheckM $ c of
(Left err, _ ) -> Left err
(Right (), changes) -> Right changes
instance Monad CheckM where
return = CheckM . return
CheckM m >>= f = CheckM (m >>= unCheckM . f)
fail = CheckM . throwError
data Change = Change String String String -- what, from, to
deriving Show
logChange :: Change -> CheckM ()
logChange change = CheckM (tell [change])
type Check a = a -> a -> CheckM ()
diffCabalRevisions :: PackageId -> ByteString -> ByteString
-> Either String [Change]
diffCabalRevisions pkgid oldVersion newRevision =
runCheck $ checkCabalFileRevision pkgid oldVersion newRevision
checkCabalFileRevision :: PackageId -> Check ByteString
checkCabalFileRevision pkgid old new = do
(pkg, warns) <- parseCabalFile old
(pkg', warns') <- parseCabalFile new
checkGenericPackageDescription pkg pkg'
checkParserWarnings warns warns'
checkPackageChecks pkg pkg'
where
filename = display pkgid ++ ".cabal"
parseCabalFile fileContent =
case parsePackageDescription . unpackUTF8 $ fileContent of
ParseFailed err -> fail (formatErrorMsg (locatedErrorMsg err))
ParseOk warnings pkg -> return (pkg, warnings)
formatErrorMsg (Nothing, msg) = msg
formatErrorMsg (Just n, msg) = "Line " ++ show n ++ ": " ++ msg
checkParserWarnings warns warns' =
case warns' \\ warns of
[] -> return ()
newwarns -> fail $ "New parse warning: "
++ unlines (map (showPWarning filename) newwarns)
checkPackageChecks pkg pkg' =
let checks = checkPackage pkg Nothing
checks' = checkPackage pkg' Nothing
in case checks' \\ checks of
[] -> return ()
newchecks -> fail $ unlines (map explanation newchecks)
checkGenericPackageDescription :: Check GenericPackageDescription
checkGenericPackageDescription
(GenericPackageDescription descrA flagsA libsA exesA testsA benchsA)
(GenericPackageDescription descrB flagsB libsB exesB testsB benchsB) = do
checkPackageDescriptions descrA descrB
checkSame "Sorry, cannot edit the package flags"
flagsA flagsB
checkMaybe "Cannot add or remove library sections"
(checkCondTree checkLibrary) libsA libsB
checkListAssoc "Cannot add or remove executable sections"
(checkCondTree checkExecutable) exesA exesB
checkListAssoc "Cannot add or remove test-suite sections"
(checkCondTree checkTestSuite) testsA testsB
checkListAssoc "Cannot add or remove benchmark sections"
(checkCondTree checkBenchmark) benchsA benchsB
checkPackageDescriptions :: Check PackageDescription
checkPackageDescriptions
(PackageDescription
packageIdA licenseA licenseFileA
copyrightA maintainerA authorA stabilityA testedWithA homepageA
pkgUrlA bugReportsA sourceReposA synopsisA descriptionA
categoryA customFieldsA _buildDependsA specVersionA buildTypeA
_libraryA _executablesA _testSuitesA _benchmarksA dataFilesA dataDirA
extraSrcFilesA extraTmpFilesA extraDocFilesA)
(PackageDescription
packageIdB licenseB licenseFileB
copyrightB maintainerB authorB stabilityB testedWithB homepageB
pkgUrlB bugReportsB sourceReposB synopsisB descriptionB
categoryB customFieldsB _buildDependsB specVersionB buildTypeB
_libraryB _executablesB _testSuitesB _benchmarksB dataFilesB dataDirB
extraSrcFilesB extraTmpFilesB extraDocFilesB)
= do
checkSame "Don't be silly! You can't change the package name!"
(packageName packageIdA) (packageName packageIdB)
checkSame "You can't change the package version!"
(packageVersion packageIdA) (packageVersion packageIdB)
checkSame "Cannot change the license"
(licenseA, licenseFileA) (licenseB, licenseFileB)
changesOk "copyright" id copyrightA copyrightB
changesOk "maintainer" id maintainerA maintainerB
changesOk "author" id authorA authorB
checkSame "The stability field is unused, don't bother changing it."
stabilityA stabilityB
checkSame "The tested-with field is unused, don't bother changing it."
testedWithA testedWithB
changesOk "homepage" id homepageA homepageB
checkSame "The package-url field is unused, don't bother changing it."
pkgUrlA pkgUrlB
changesOk "bug-reports" id bugReportsA bugReportsB
changesOkList changesOk "source-repository" (show . ppSourceRepo)
sourceReposA sourceReposB
changesOk "synopsis" id synopsisA synopsisB
changesOk "description" id descriptionA descriptionB
changesOk "category" id categoryA categoryB
checkSame "Cannot change the Cabal spec version"
specVersionA specVersionB
checkSame "Cannot change the build-type"
buildTypeA buildTypeB
checkSame "Cannot change the data files"
(dataFilesA, dataDirA) (dataFilesB, dataDirB)
checkSame "Changing extra-tmp-files is a bit pointless at this stage"
extraTmpFilesA extraTmpFilesB
checkSame "Changing extra-source-files would not make sense!"
extraSrcFilesA extraSrcFilesB
checkSame "You can't change the extra-doc-files."
extraDocFilesA extraDocFilesB
checkSame "Cannot change custom/extension fields"
(filter (\(f,_) -> f /= "x-revision") customFieldsA)
(filter (\(f,_) -> f /= "x-revision") customFieldsB)
checkRevision customFieldsA customFieldsB
checkRevision :: Check [(String, String)]
checkRevision customFieldsA customFieldsB =
checkSame ("The new x-revision must be " ++ show expectedRevision)
newRevision expectedRevision
where
oldRevision = getRevision customFieldsA
newRevision = getRevision customFieldsB
expectedRevision = oldRevision + 1
getRevision customFields =
case lookup "x-revision" customFields of
Just s | [(n,"")] <- reads s -> n :: Int
_ -> 0
checkCondTree :: Check a -> Check (CondTree ConfVar [Dependency] a)
checkCondTree checkElem
(CondNode dataA constraintsA componentsA)
(CondNode dataB constraintsB componentsB) = do
checkDependencies constraintsA constraintsB
checkList "Cannot add or remove 'if' conditionals"
checkComponent componentsA componentsB
checkElem dataA dataB
where
checkComponent (condA, ifPartA, thenPartA)
(condB, ifPartB, thenPartB) = do
checkSame "Cannot change the 'if' condition expressions"
condA condB
checkCondTree checkElem ifPartA ifPartB
checkMaybe "Cannot add or remove the 'else' part in conditionals"
(checkCondTree checkElem) thenPartA thenPartB
checkDependencies :: Check [Dependency]
-- Special case: there are some pretty weird broken packages out there, see
-- https://github.com/haskell/hackage-server/issues/303
checkDependencies [] [dep@(Dependency (PackageName "base") _)] =
logChange (Change ("added dependency on") (display dep) "")
checkDependencies ds1 ds2 =
fmapCheck canonicaliseDeps
(checkList "Cannot add or remove dependencies, \
\just change the version constraints"
checkDependency)
ds1 ds2
where
-- Allow a limited degree of adding and removing deps: only when they
-- are additional constraints on an existing package.
canonicaliseDeps :: [Dependency] -> [Dependency]
canonicaliseDeps =
map (\(pkgname, verrange) -> Dependency pkgname verrange)
. Map.toList
. Map.fromListWith (flip intersectVersionRanges)
. map (\(Dependency pkgname verrange) -> (pkgname, verrange))
checkDependency :: Check Dependency
checkDependency (Dependency pkgA verA) (Dependency pkgB verB)
| pkgA == pkgB = changesOk ("dependency on " ++ display pkgA) display
verA verB
| otherwise = fail "Cannot change which packages are dependencies, \
\just their version constraints."
checkLibrary :: Check Library
checkLibrary (Library modulesA reexportedA requiredSigsA exposedSigsA
exposedA buildInfoA)
(Library modulesB reexportedB requiredSigsB exposedSigsB
exposedB buildInfoB) = do
checkSame "Cannot change the exposed modules" modulesA modulesB
checkSame "Cannot change the re-exported modules" reexportedA reexportedB
checkSame "Cannot change the required signatures" requiredSigsA requiredSigsB
checkSame "Cannot change the exposed signatures" exposedSigsA exposedSigsB
checkSame "Cannot change the package exposed status" exposedA exposedB
checkBuildInfo buildInfoA buildInfoB
checkExecutable :: Check Executable
checkExecutable (Executable _nameA pathA buildInfoA)
(Executable _nameB pathB buildInfoB) = do
checkSame "Cannot change build information" pathA pathB
checkBuildInfo buildInfoA buildInfoB
checkTestSuite :: Check TestSuite
checkTestSuite (TestSuite _nameA interfaceA buildInfoA _enabledA)
(TestSuite _nameB interfaceB buildInfoB _enabledB) = do
checkSame "Cannot change test-suite type" interfaceA interfaceB
checkBuildInfo buildInfoA buildInfoB
checkBenchmark :: Check Benchmark
checkBenchmark (Benchmark _nameA interfaceA buildInfoA _enabledA)
(Benchmark _nameB interfaceB buildInfoB _enabledB) = do
checkSame "Cannot change benchmark type" interfaceA interfaceB
checkBuildInfo buildInfoA buildInfoB
checkBuildInfo :: Check BuildInfo
checkBuildInfo biA biB =
checkSame "Cannot change build information \
\(just the dependency version constraints)"
(biA { targetBuildDepends = [] })
(biB { targetBuildDepends = [] })
changesOk :: Eq a => String -> (a -> String) -> Check a
changesOk what render a b
| a == b = return ()
| otherwise = logChange (Change what (render a) (render b))
changesOkList :: (String -> (a -> String) -> Check a)
-> String -> (a -> String) -> Check [a]
changesOkList changesOkElem what render = go
where
go [] [] = return ()
go (a:_) [] = logChange (Change ("added " ++ what) (render a) "")
go [] (b:_) = logChange (Change ("removed " ++ what) "" (render b))
go (a:as) (b:bs) = changesOkElem what render a b >> go as bs
checkSame :: Eq a => String -> Check a
checkSame msg x y | x == y = return ()
| otherwise = fail msg
checkList :: String -> Check a -> Check [a]
checkList _ _ [] [] = return ()
checkList msg checkElem (x:xs) (y:ys) = checkElem x y
>> checkList msg checkElem xs ys
checkList msg _ _ _ = fail msg
checkListAssoc :: Eq b => String -> Check a -> Check [(b,a)]
checkListAssoc _ _ [] [] = return ()
checkListAssoc msg checkElem ((kx,x):xs) ((ky,y):ys)
| kx == ky = checkElem x y
>> checkListAssoc msg checkElem xs ys
| otherwise = fail msg
checkListAssoc msg _ _ _ = fail msg
checkMaybe :: String -> Check a -> Check (Maybe a)
checkMaybe _ _ Nothing Nothing = return ()
checkMaybe _ check (Just x) (Just y) = check x y
checkMaybe msg _ _ _ = fail msg
fmapCheck :: (b -> a) -> Check a -> Check b
fmapCheck f check a b =
check (f a) (f b)
--TODO: export from Cabal
ppSourceRepo :: SourceRepo -> Doc
ppSourceRepo repo =
emptyLine $ text "source-repository" <+> disp (repoKind repo) $+$
(nest 4 (ppFields sourceRepoFieldDescrs' repo))
where
sourceRepoFieldDescrs' =
filter (\fd -> fieldName fd /= "kind") sourceRepoFieldDescrs
emptyLine :: Doc -> Doc
emptyLine d = text " " $+$ d
ppFields :: [FieldDescr a] -> a -> Doc
ppFields fields x =
vcat [ ppField name (getter x)
| FieldDescr name getter _ <- fields]
ppField :: String -> Doc -> Doc
ppField name fielddoc | isEmpty fielddoc = Doc.empty
| otherwise = text name <> colon <+> fielddoc
insertRevisionField :: Int -> ByteString -> ByteString
insertRevisionField rev
| rev == 1 = BS.unlines . insertAfterVersion . BS.lines
| otherwise = BS.unlines . replaceRevision . BS.lines
where
replaceRevision [] = []
replaceRevision (ln:lns)
| isField (BS.pack "x-revision") ln
= BS.pack ("x-revision: " ++ show rev) : lns
| otherwise
= ln : replaceRevision lns
insertAfterVersion [] = []
insertAfterVersion (ln:lns)
| isField (BS.pack "version") ln
= ln : BS.pack ("x-revision: " ++ show rev) : lns
| otherwise
= ln : insertAfterVersion lns
isField nm ln
| BS.isPrefixOf nm (BS.map Char.toLower ln)
, let (_, t) = BS.span (\c -> c == ' ' || c == '\t')
(BS.drop (BS.length nm) ln)
, Just (':',_) <- BS.uncons t
= True
| otherwise = False
|
snoyberg/hackage-server
|
Distribution/Server/Features/EditCabalFiles.hs
|
bsd-3-clause
| 19,689
| 0
| 18
| 5,246
| 4,856
| 2,447
| 2,409
| 387
| 5
|
{-# OPTIONS -XDeriveDataTypeable
-XTypeSynonymInstances
-XMultiParamTypeClasses
-XExistentialQuantification
-XOverloadedStrings
-XFlexibleInstances
-XUndecidableInstances
-XFunctionalDependencies
#-}
{- |
A persistent, transactional collection with Queue interface as well as
indexed access by key.
Uses default persistence. See "Data.TCache.DefaultPersistence"
-}
{-
NOTES
TODO:
data.persistent collection
convertirlo en un tree
añadiendo elementos node Node (refQueue a)
implementar un query language
by key
by attribute (addAttibute)
by class
xpath
implementar un btree sobre el
-}
module Data.Persistent.Collection (
RefQueue(..), getQRef,
pop,popSTM,pick, flush, flushSTM,
pickAll, pickAllSTM, push,pushSTM,
pickElem, pickElemSTM, readAll, readAllSTM,
deleteElem, deleteElemSTM,updateElem,updateElemSTM,
unreadSTM,isEmpty,isEmptySTM
) where
import Data.Typeable
import Control.Concurrent.STM(STM,atomically, retry)
import Control.Monad
import Data.TCache.DefaultPersistence
import Data.TCache
import System.IO.Unsafe
import Data.RefSerialize
import Data.ByteString.Lazy.Char8
import Data.RefSerialize
import Debug.Trace
a !> b= trace b a
instance Indexable (Queue a) where
key (Queue k _ _) = queuePrefix ++ k
data Queue a
= Queue
{
name :: String
, imp :: [a]
, out :: [a]
}
deriving (Typeable)
instance Serialize a => Serialize (Queue a) where
showp (Queue n i o) = showp n >> showp i >> showp o
readp = return Queue `ap` readp `ap` readp `ap` readp
queuePrefix= "Queue#"
lenQPrefix= Prelude.length queuePrefix
instance Serialize a => Serializable (Queue a) where
serialize = runW . showp
deserialize = runR readp
-- | A queue reference
type RefQueue a
= DBRef (Queue a)
-- | push an element at the top of the queue
unreadSTM :: (Typeable a, Serialize a) => Persist -> RefQueue a -> a -> STM ()
unreadSTM store queue x = do
r <- readQRef store queue
writeDBRef store queue $ doit r
where
doit (Queue n imp out) = Queue n imp (x : out)
-- | Check if the queue is empty
isEmpty :: (Typeable a, Serialize a) => Persist -> RefQueue a -> IO Bool
isEmpty store = atomically . isEmptySTM store
isEmptySTM :: (Typeable a, Serialize a) => Persist -> RefQueue a -> STM Bool
isEmptySTM store queue = do
r <- readDBRef store queue
return $ case r of
Nothing -> True
Just (Queue _ [] []) -> True
_ -> False
-- | Get the reference to new or existing queue trough its name
getQRef :: (Typeable a, Serialize a) => String -> RefQueue a
getQRef n = getDBRef . key $ Queue n undefined undefined
-- | Empty the queue (factually, it is deleted)
flush :: (Typeable a, Serialize a) => Persist -> RefQueue a -> IO ()
flush store = atomically . flushSTM store
-- | Version in the STM monad
flushSTM :: (Typeable a, Serialize a) => Persist -> RefQueue a -> STM ()
flushSTM store tv = delDBRef store tv
-- | Read the first element in the queue and delete it (pop)
pop :: (Typeable a, Serialize a) => Persist -> RefQueue a -- ^ Queue name
-> IO a -- ^ the returned elems
pop store tv = atomically $ popSTM store tv
readQRef :: (Typeable a, Serialize a) => Persist -> RefQueue a -> STM (Queue a)
readQRef store tv = do
mdx <- readDBRef store tv
case mdx of
Nothing -> do
let q= Queue ( Prelude.drop lenQPrefix $ keyObjDBRef tv) [] []
writeDBRef store tv q
return q
Just dx ->
return dx
-- | Version in the STM monad
popSTM :: (Typeable a, Serialize a)
=> Persist -> RefQueue a -> STM a
popSTM store tv = do
dx <- readQRef store tv
doit dx
where
doit (Queue n [x] []) = do
writeDBRef store tv $ Queue n [] []
return x
doit (Queue _ [] []) = retry
doit (Queue n imp []) = doit $ Queue n [] $ Prelude.reverse imp
doit (Queue n imp list) = do
writeDBRef store tv $ Queue n imp $ Prelude.tail list
return $ Prelude.head list
-- | Read the first element in the queue but it does not delete it
pick :: (Typeable a, Serialize a) => Persist -> RefQueue a -- ^ Queue name
-> IO a -- ^ the returned elems
pick store tv = atomically $ do
dx <- readQRef store tv
doit dx
where
doit (Queue _ [x] []) = return x
doit (Queue _ [] []) = retry
doit (Queue n imp []) = doit $ Queue n [] $ Prelude.reverse imp
doit (Queue n imp list) = return $ Prelude.head list
-- | Push an element in the queue
push :: (Typeable a, Serialize a) => Persist -> RefQueue a -> a -> IO ()
push store tv v = atomically $ pushSTM store tv v
-- | Version in the STM monad
pushSTM :: (Typeable a, Serialize a) => Persist -> RefQueue a -> a -> STM ()
pushSTM store tv v =
readQRef store tv >>= \ ((Queue n imp out)) -> writeDBRef store tv $ Queue n (v : imp) out
-- | Return the list of all elements in the queue. The queue remains unchanged
pickAll :: (Typeable a, Serialize a) => Persist -> RefQueue a -> IO [a]
pickAll store = atomically . pickAllSTM store
-- | Version in the STM monad
pickAllSTM :: (Typeable a, Serialize a)
=> Persist -> RefQueue a -> STM [a]
pickAllSTM store tv = do
Queue name imp out <- readQRef store tv
return $ out ++ Prelude.reverse imp
-- | Return the first element in the queue that has the given key
pickElem ::(Indexable a,Typeable a, Serialize a)
=> Persist -> RefQueue a -> String -> IO(Maybe a)
pickElem store tv key = atomically $ pickElemSTM store tv key
-- | Version in the STM monad
pickElemSTM :: (Indexable a,Typeable a, Serialize a)
=> Persist -> RefQueue a -> String -> STM (Maybe a)
pickElemSTM store tv key1 = do
Queue name imp out <- readQRef store tv
let xs= out ++ Prelude.reverse imp
when (not $ Prelude.null imp) $ writeDBRef store tv $ Queue name [] xs
case Prelude.filter (\x -> key x == key1) xs of
[] -> return $ Nothing
x : _ -> return $ Just x
-- | Update the first element of the queue with a new element with the same key
updateElem :: (Indexable a,Typeable a, Serialize a)
=> Persist -> RefQueue a -> a -> IO ()
updateElem store tv x = atomically $ updateElemSTM store tv x
-- | Version in the STM monad
updateElemSTM :: (Indexable a,Typeable a, Serialize a)
=> Persist -> RefQueue a -> a -> STM ()
updateElemSTM store tv v = do
Queue name imp out <- readQRef store tv
let xs = out ++ Prelude.reverse imp
let xs' = Prelude.map (\x -> if key x == n then v else x) xs
writeDBRef store tv $ Queue name [] xs'
where
n = key v
-- | Return the list of all elements in the queue and empty it
readAll :: (Typeable a, Serialize a)
=> Persist -> RefQueue a -> IO [a]
readAll store = atomically . readAllSTM store
-- | A version in the STM monad
readAllSTM :: (Typeable a, Serialize a)
=> Persist -> RefQueue a -> STM [a]
readAllSTM store tv = do
Queue name imp out <- readQRef store tv
writeDBRef store tv $ Queue name [] []
return $ out ++ Prelude.reverse imp
-- | Delete all the elements of the queue that has the key of the parameter passed
deleteElem :: (Indexable a,Typeable a, Serialize a)
=> Persist -> RefQueue a -> a -> IO ()
deleteElem store tv x = atomically $ deleteElemSTM store tv x
-- | Verison in the STM monad
deleteElemSTM :: (Typeable a, Serialize a,Indexable a)
=> Persist -> RefQueue a-> a -> STM ()
deleteElemSTM store tv x = do
Queue name imp out <- readQRef store tv
let xs= out ++ Prelude.reverse imp
writeDBRef store tv $ Queue name [] $ Prelude.filter (\x-> key x /= k) xs
where
k = key x
|
ariep/TCache
|
src/Data/Persistent/Collection.hs
|
bsd-3-clause
| 7,627
| 0
| 19
| 1,835
| 2,620
| 1,309
| 1,311
| 146
| 4
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.Version11
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.Version11 (
-- * Types
GLbitfield,
GLboolean,
GLbyte,
GLclampd,
GLclampf,
GLdouble,
GLenum,
GLfloat,
GLint,
GLshort,
GLsizei,
GLubyte,
GLuint,
GLushort,
GLvoid,
-- * Enums
gl_2D,
gl_2_BYTES,
gl_3D,
gl_3D_COLOR,
gl_3D_COLOR_TEXTURE,
gl_3_BYTES,
gl_4D_COLOR_TEXTURE,
gl_4_BYTES,
gl_ACCUM,
gl_ACCUM_ALPHA_BITS,
gl_ACCUM_BLUE_BITS,
gl_ACCUM_BUFFER_BIT,
gl_ACCUM_CLEAR_VALUE,
gl_ACCUM_GREEN_BITS,
gl_ACCUM_RED_BITS,
gl_ADD,
gl_ALL_ATTRIB_BITS,
gl_ALPHA,
gl_ALPHA12,
gl_ALPHA16,
gl_ALPHA4,
gl_ALPHA8,
gl_ALPHA_BIAS,
gl_ALPHA_BITS,
gl_ALPHA_SCALE,
gl_ALPHA_TEST,
gl_ALPHA_TEST_FUNC,
gl_ALPHA_TEST_REF,
gl_ALWAYS,
gl_AMBIENT,
gl_AMBIENT_AND_DIFFUSE,
gl_AND,
gl_AND_INVERTED,
gl_AND_REVERSE,
gl_ATTRIB_STACK_DEPTH,
gl_AUTO_NORMAL,
gl_AUX0,
gl_AUX1,
gl_AUX2,
gl_AUX3,
gl_AUX_BUFFERS,
gl_BACK,
gl_BACK_LEFT,
gl_BACK_RIGHT,
gl_BITMAP,
gl_BITMAP_TOKEN,
gl_BLEND,
gl_BLEND_DST,
gl_BLEND_SRC,
gl_BLUE,
gl_BLUE_BIAS,
gl_BLUE_BITS,
gl_BLUE_SCALE,
gl_BYTE,
gl_C3F_V3F,
gl_C4F_N3F_V3F,
gl_C4UB_V2F,
gl_C4UB_V3F,
gl_CCW,
gl_CLAMP,
gl_CLEAR,
gl_CLIENT_ALL_ATTRIB_BITS,
gl_CLIENT_ATTRIB_STACK_DEPTH,
gl_CLIENT_PIXEL_STORE_BIT,
gl_CLIENT_VERTEX_ARRAY_BIT,
gl_CLIP_PLANE0,
gl_CLIP_PLANE1,
gl_CLIP_PLANE2,
gl_CLIP_PLANE3,
gl_CLIP_PLANE4,
gl_CLIP_PLANE5,
gl_COEFF,
gl_COLOR,
gl_COLOR_ARRAY,
gl_COLOR_ARRAY_POINTER,
gl_COLOR_ARRAY_SIZE,
gl_COLOR_ARRAY_STRIDE,
gl_COLOR_ARRAY_TYPE,
gl_COLOR_BUFFER_BIT,
gl_COLOR_CLEAR_VALUE,
gl_COLOR_INDEX,
gl_COLOR_INDEXES,
gl_COLOR_LOGIC_OP,
gl_COLOR_MATERIAL,
gl_COLOR_MATERIAL_FACE,
gl_COLOR_MATERIAL_PARAMETER,
gl_COLOR_WRITEMASK,
gl_COMPILE,
gl_COMPILE_AND_EXECUTE,
gl_CONSTANT_ATTENUATION,
gl_COPY,
gl_COPY_INVERTED,
gl_COPY_PIXEL_TOKEN,
gl_CULL_FACE,
gl_CULL_FACE_MODE,
gl_CURRENT_BIT,
gl_CURRENT_COLOR,
gl_CURRENT_INDEX,
gl_CURRENT_NORMAL,
gl_CURRENT_RASTER_COLOR,
gl_CURRENT_RASTER_DISTANCE,
gl_CURRENT_RASTER_INDEX,
gl_CURRENT_RASTER_POSITION,
gl_CURRENT_RASTER_POSITION_VALID,
gl_CURRENT_RASTER_TEXTURE_COORDS,
gl_CURRENT_TEXTURE_COORDS,
gl_CW,
gl_DECAL,
gl_DECR,
gl_DEPTH,
gl_DEPTH_BIAS,
gl_DEPTH_BITS,
gl_DEPTH_BUFFER_BIT,
gl_DEPTH_CLEAR_VALUE,
gl_DEPTH_COMPONENT,
gl_DEPTH_FUNC,
gl_DEPTH_RANGE,
gl_DEPTH_SCALE,
gl_DEPTH_TEST,
gl_DEPTH_WRITEMASK,
gl_DIFFUSE,
gl_DITHER,
gl_DOMAIN,
gl_DONT_CARE,
gl_DOUBLE,
gl_DOUBLEBUFFER,
gl_DRAW_BUFFER,
gl_DRAW_PIXEL_TOKEN,
gl_DST_ALPHA,
gl_DST_COLOR,
gl_EDGE_FLAG,
gl_EDGE_FLAG_ARRAY,
gl_EDGE_FLAG_ARRAY_POINTER,
gl_EDGE_FLAG_ARRAY_STRIDE,
gl_EMISSION,
gl_ENABLE_BIT,
gl_EQUAL,
gl_EQUIV,
gl_EVAL_BIT,
gl_EXP,
gl_EXP2,
gl_EXTENSIONS,
gl_EYE_LINEAR,
gl_EYE_PLANE,
gl_FALSE,
gl_FASTEST,
gl_FEEDBACK,
gl_FEEDBACK_BUFFER_POINTER,
gl_FEEDBACK_BUFFER_SIZE,
gl_FEEDBACK_BUFFER_TYPE,
gl_FILL,
gl_FLAT,
gl_FLOAT,
gl_FOG,
gl_FOG_BIT,
gl_FOG_COLOR,
gl_FOG_DENSITY,
gl_FOG_END,
gl_FOG_HINT,
gl_FOG_INDEX,
gl_FOG_MODE,
gl_FOG_START,
gl_FRONT,
gl_FRONT_AND_BACK,
gl_FRONT_FACE,
gl_FRONT_LEFT,
gl_FRONT_RIGHT,
gl_GEQUAL,
gl_GREATER,
gl_GREEN,
gl_GREEN_BIAS,
gl_GREEN_BITS,
gl_GREEN_SCALE,
gl_HINT_BIT,
gl_INCR,
gl_INDEX_ARRAY,
gl_INDEX_ARRAY_POINTER,
gl_INDEX_ARRAY_STRIDE,
gl_INDEX_ARRAY_TYPE,
gl_INDEX_BITS,
gl_INDEX_CLEAR_VALUE,
gl_INDEX_LOGIC_OP,
gl_INDEX_MODE,
gl_INDEX_OFFSET,
gl_INDEX_SHIFT,
gl_INDEX_WRITEMASK,
gl_INT,
gl_INTENSITY,
gl_INTENSITY12,
gl_INTENSITY16,
gl_INTENSITY4,
gl_INTENSITY8,
gl_INVALID_ENUM,
gl_INVALID_OPERATION,
gl_INVALID_VALUE,
gl_INVERT,
gl_KEEP,
gl_LEFT,
gl_LEQUAL,
gl_LESS,
gl_LIGHT0,
gl_LIGHT1,
gl_LIGHT2,
gl_LIGHT3,
gl_LIGHT4,
gl_LIGHT5,
gl_LIGHT6,
gl_LIGHT7,
gl_LIGHTING,
gl_LIGHTING_BIT,
gl_LIGHT_MODEL_AMBIENT,
gl_LIGHT_MODEL_LOCAL_VIEWER,
gl_LIGHT_MODEL_TWO_SIDE,
gl_LINE,
gl_LINEAR,
gl_LINEAR_ATTENUATION,
gl_LINEAR_MIPMAP_LINEAR,
gl_LINEAR_MIPMAP_NEAREST,
gl_LINES,
gl_LINE_BIT,
gl_LINE_LOOP,
gl_LINE_RESET_TOKEN,
gl_LINE_SMOOTH,
gl_LINE_SMOOTH_HINT,
gl_LINE_STIPPLE,
gl_LINE_STIPPLE_PATTERN,
gl_LINE_STIPPLE_REPEAT,
gl_LINE_STRIP,
gl_LINE_TOKEN,
gl_LINE_WIDTH,
gl_LINE_WIDTH_GRANULARITY,
gl_LINE_WIDTH_RANGE,
gl_LIST_BASE,
gl_LIST_BIT,
gl_LIST_INDEX,
gl_LIST_MODE,
gl_LOAD,
gl_LOGIC_OP,
gl_LOGIC_OP_MODE,
gl_LUMINANCE,
gl_LUMINANCE12,
gl_LUMINANCE12_ALPHA12,
gl_LUMINANCE12_ALPHA4,
gl_LUMINANCE16,
gl_LUMINANCE16_ALPHA16,
gl_LUMINANCE4,
gl_LUMINANCE4_ALPHA4,
gl_LUMINANCE6_ALPHA2,
gl_LUMINANCE8,
gl_LUMINANCE8_ALPHA8,
gl_LUMINANCE_ALPHA,
gl_MAP1_COLOR_4,
gl_MAP1_GRID_DOMAIN,
gl_MAP1_GRID_SEGMENTS,
gl_MAP1_INDEX,
gl_MAP1_NORMAL,
gl_MAP1_TEXTURE_COORD_1,
gl_MAP1_TEXTURE_COORD_2,
gl_MAP1_TEXTURE_COORD_3,
gl_MAP1_TEXTURE_COORD_4,
gl_MAP1_VERTEX_3,
gl_MAP1_VERTEX_4,
gl_MAP2_COLOR_4,
gl_MAP2_GRID_DOMAIN,
gl_MAP2_GRID_SEGMENTS,
gl_MAP2_INDEX,
gl_MAP2_NORMAL,
gl_MAP2_TEXTURE_COORD_1,
gl_MAP2_TEXTURE_COORD_2,
gl_MAP2_TEXTURE_COORD_3,
gl_MAP2_TEXTURE_COORD_4,
gl_MAP2_VERTEX_3,
gl_MAP2_VERTEX_4,
gl_MAP_COLOR,
gl_MAP_STENCIL,
gl_MATRIX_MODE,
gl_MAX_ATTRIB_STACK_DEPTH,
gl_MAX_CLIENT_ATTRIB_STACK_DEPTH,
gl_MAX_CLIP_PLANES,
gl_MAX_EVAL_ORDER,
gl_MAX_LIGHTS,
gl_MAX_LIST_NESTING,
gl_MAX_MODELVIEW_STACK_DEPTH,
gl_MAX_NAME_STACK_DEPTH,
gl_MAX_PIXEL_MAP_TABLE,
gl_MAX_PROJECTION_STACK_DEPTH,
gl_MAX_TEXTURE_SIZE,
gl_MAX_TEXTURE_STACK_DEPTH,
gl_MAX_VIEWPORT_DIMS,
gl_MODELVIEW,
gl_MODELVIEW_MATRIX,
gl_MODELVIEW_STACK_DEPTH,
gl_MODULATE,
gl_MULT,
gl_N3F_V3F,
gl_NAME_STACK_DEPTH,
gl_NAND,
gl_NEAREST,
gl_NEAREST_MIPMAP_LINEAR,
gl_NEAREST_MIPMAP_NEAREST,
gl_NEVER,
gl_NICEST,
gl_NONE,
gl_NOOP,
gl_NOR,
gl_NORMALIZE,
gl_NORMAL_ARRAY,
gl_NORMAL_ARRAY_POINTER,
gl_NORMAL_ARRAY_STRIDE,
gl_NORMAL_ARRAY_TYPE,
gl_NOTEQUAL,
gl_NO_ERROR,
gl_OBJECT_LINEAR,
gl_OBJECT_PLANE,
gl_ONE,
gl_ONE_MINUS_DST_ALPHA,
gl_ONE_MINUS_DST_COLOR,
gl_ONE_MINUS_SRC_ALPHA,
gl_ONE_MINUS_SRC_COLOR,
gl_OR,
gl_ORDER,
gl_OR_INVERTED,
gl_OR_REVERSE,
gl_OUT_OF_MEMORY,
gl_PACK_ALIGNMENT,
gl_PACK_LSB_FIRST,
gl_PACK_ROW_LENGTH,
gl_PACK_SKIP_PIXELS,
gl_PACK_SKIP_ROWS,
gl_PACK_SWAP_BYTES,
gl_PASS_THROUGH_TOKEN,
gl_PERSPECTIVE_CORRECTION_HINT,
gl_PIXEL_MAP_A_TO_A,
gl_PIXEL_MAP_A_TO_A_SIZE,
gl_PIXEL_MAP_B_TO_B,
gl_PIXEL_MAP_B_TO_B_SIZE,
gl_PIXEL_MAP_G_TO_G,
gl_PIXEL_MAP_G_TO_G_SIZE,
gl_PIXEL_MAP_I_TO_A,
gl_PIXEL_MAP_I_TO_A_SIZE,
gl_PIXEL_MAP_I_TO_B,
gl_PIXEL_MAP_I_TO_B_SIZE,
gl_PIXEL_MAP_I_TO_G,
gl_PIXEL_MAP_I_TO_G_SIZE,
gl_PIXEL_MAP_I_TO_I,
gl_PIXEL_MAP_I_TO_I_SIZE,
gl_PIXEL_MAP_I_TO_R,
gl_PIXEL_MAP_I_TO_R_SIZE,
gl_PIXEL_MAP_R_TO_R,
gl_PIXEL_MAP_R_TO_R_SIZE,
gl_PIXEL_MAP_S_TO_S,
gl_PIXEL_MAP_S_TO_S_SIZE,
gl_PIXEL_MODE_BIT,
gl_POINT,
gl_POINTS,
gl_POINT_BIT,
gl_POINT_SIZE,
gl_POINT_SIZE_GRANULARITY,
gl_POINT_SIZE_RANGE,
gl_POINT_SMOOTH,
gl_POINT_SMOOTH_HINT,
gl_POINT_TOKEN,
gl_POLYGON,
gl_POLYGON_BIT,
gl_POLYGON_MODE,
gl_POLYGON_OFFSET_FACTOR,
gl_POLYGON_OFFSET_FILL,
gl_POLYGON_OFFSET_LINE,
gl_POLYGON_OFFSET_POINT,
gl_POLYGON_OFFSET_UNITS,
gl_POLYGON_SMOOTH,
gl_POLYGON_SMOOTH_HINT,
gl_POLYGON_STIPPLE,
gl_POLYGON_STIPPLE_BIT,
gl_POLYGON_TOKEN,
gl_POSITION,
gl_PROJECTION,
gl_PROJECTION_MATRIX,
gl_PROJECTION_STACK_DEPTH,
gl_PROXY_TEXTURE_1D,
gl_PROXY_TEXTURE_2D,
gl_Q,
gl_QUADRATIC_ATTENUATION,
gl_QUADS,
gl_QUAD_STRIP,
gl_R,
gl_R3_G3_B2,
gl_READ_BUFFER,
gl_RED,
gl_RED_BIAS,
gl_RED_BITS,
gl_RED_SCALE,
gl_RENDER,
gl_RENDERER,
gl_RENDER_MODE,
gl_REPEAT,
gl_REPLACE,
gl_RETURN,
gl_RGB,
gl_RGB10,
gl_RGB10_A2,
gl_RGB12,
gl_RGB16,
gl_RGB4,
gl_RGB5,
gl_RGB5_A1,
gl_RGB8,
gl_RGBA,
gl_RGBA12,
gl_RGBA16,
gl_RGBA2,
gl_RGBA4,
gl_RGBA8,
gl_RGBA_MODE,
gl_RIGHT,
gl_S,
gl_SCISSOR_BIT,
gl_SCISSOR_BOX,
gl_SCISSOR_TEST,
gl_SELECT,
gl_SELECTION_BUFFER_POINTER,
gl_SELECTION_BUFFER_SIZE,
gl_SET,
gl_SHADE_MODEL,
gl_SHININESS,
gl_SHORT,
gl_SMOOTH,
gl_SPECULAR,
gl_SPHERE_MAP,
gl_SPOT_CUTOFF,
gl_SPOT_DIRECTION,
gl_SPOT_EXPONENT,
gl_SRC_ALPHA,
gl_SRC_ALPHA_SATURATE,
gl_SRC_COLOR,
gl_STACK_OVERFLOW,
gl_STACK_UNDERFLOW,
gl_STENCIL,
gl_STENCIL_BITS,
gl_STENCIL_BUFFER_BIT,
gl_STENCIL_CLEAR_VALUE,
gl_STENCIL_FAIL,
gl_STENCIL_FUNC,
gl_STENCIL_INDEX,
gl_STENCIL_PASS_DEPTH_FAIL,
gl_STENCIL_PASS_DEPTH_PASS,
gl_STENCIL_REF,
gl_STENCIL_TEST,
gl_STENCIL_VALUE_MASK,
gl_STENCIL_WRITEMASK,
gl_STEREO,
gl_SUBPIXEL_BITS,
gl_T,
gl_T2F_C3F_V3F,
gl_T2F_C4F_N3F_V3F,
gl_T2F_C4UB_V3F,
gl_T2F_N3F_V3F,
gl_T2F_V3F,
gl_T4F_C4F_N3F_V4F,
gl_T4F_V4F,
gl_TEXTURE,
gl_TEXTURE_1D,
gl_TEXTURE_2D,
gl_TEXTURE_ALPHA_SIZE,
gl_TEXTURE_BINDING_1D,
gl_TEXTURE_BINDING_2D,
gl_TEXTURE_BIT,
gl_TEXTURE_BLUE_SIZE,
gl_TEXTURE_BORDER,
gl_TEXTURE_BORDER_COLOR,
gl_TEXTURE_COMPONENTS,
gl_TEXTURE_COORD_ARRAY,
gl_TEXTURE_COORD_ARRAY_POINTER,
gl_TEXTURE_COORD_ARRAY_SIZE,
gl_TEXTURE_COORD_ARRAY_STRIDE,
gl_TEXTURE_COORD_ARRAY_TYPE,
gl_TEXTURE_ENV,
gl_TEXTURE_ENV_COLOR,
gl_TEXTURE_ENV_MODE,
gl_TEXTURE_GEN_MODE,
gl_TEXTURE_GEN_Q,
gl_TEXTURE_GEN_R,
gl_TEXTURE_GEN_S,
gl_TEXTURE_GEN_T,
gl_TEXTURE_GREEN_SIZE,
gl_TEXTURE_HEIGHT,
gl_TEXTURE_INTENSITY_SIZE,
gl_TEXTURE_INTERNAL_FORMAT,
gl_TEXTURE_LUMINANCE_SIZE,
gl_TEXTURE_MAG_FILTER,
gl_TEXTURE_MATRIX,
gl_TEXTURE_MIN_FILTER,
gl_TEXTURE_PRIORITY,
gl_TEXTURE_RED_SIZE,
gl_TEXTURE_RESIDENT,
gl_TEXTURE_STACK_DEPTH,
gl_TEXTURE_WIDTH,
gl_TEXTURE_WRAP_S,
gl_TEXTURE_WRAP_T,
gl_TRANSFORM_BIT,
gl_TRIANGLES,
gl_TRIANGLE_FAN,
gl_TRIANGLE_STRIP,
gl_TRUE,
gl_UNPACK_ALIGNMENT,
gl_UNPACK_LSB_FIRST,
gl_UNPACK_ROW_LENGTH,
gl_UNPACK_SKIP_PIXELS,
gl_UNPACK_SKIP_ROWS,
gl_UNPACK_SWAP_BYTES,
gl_UNSIGNED_BYTE,
gl_UNSIGNED_INT,
gl_UNSIGNED_SHORT,
gl_V2F,
gl_V3F,
gl_VENDOR,
gl_VERSION,
gl_VERTEX_ARRAY,
gl_VERTEX_ARRAY_POINTER,
gl_VERTEX_ARRAY_SIZE,
gl_VERTEX_ARRAY_STRIDE,
gl_VERTEX_ARRAY_TYPE,
gl_VIEWPORT,
gl_VIEWPORT_BIT,
gl_XOR,
gl_ZERO,
gl_ZOOM_X,
gl_ZOOM_Y,
-- * Functions
glAccum,
glAlphaFunc,
glAreTexturesResident,
glArrayElement,
glBegin,
glBindTexture,
glBitmap,
glBlendFunc,
glCallList,
glCallLists,
glClear,
glClearAccum,
glClearColor,
glClearDepth,
glClearIndex,
glClearStencil,
glClipPlane,
glColor3b,
glColor3bv,
glColor3d,
glColor3dv,
glColor3f,
glColor3fv,
glColor3i,
glColor3iv,
glColor3s,
glColor3sv,
glColor3ub,
glColor3ubv,
glColor3ui,
glColor3uiv,
glColor3us,
glColor3usv,
glColor4b,
glColor4bv,
glColor4d,
glColor4dv,
glColor4f,
glColor4fv,
glColor4i,
glColor4iv,
glColor4s,
glColor4sv,
glColor4ub,
glColor4ubv,
glColor4ui,
glColor4uiv,
glColor4us,
glColor4usv,
glColorMask,
glColorMaterial,
glColorPointer,
glCopyPixels,
glCopyTexImage1D,
glCopyTexImage2D,
glCopyTexSubImage1D,
glCopyTexSubImage2D,
glCullFace,
glDeleteLists,
glDeleteTextures,
glDepthFunc,
glDepthMask,
glDepthRange,
glDisable,
glDisableClientState,
glDrawArrays,
glDrawBuffer,
glDrawElements,
glDrawPixels,
glEdgeFlag,
glEdgeFlagPointer,
glEdgeFlagv,
glEnable,
glEnableClientState,
glEnd,
glEndList,
glEvalCoord1d,
glEvalCoord1dv,
glEvalCoord1f,
glEvalCoord1fv,
glEvalCoord2d,
glEvalCoord2dv,
glEvalCoord2f,
glEvalCoord2fv,
glEvalMesh1,
glEvalMesh2,
glEvalPoint1,
glEvalPoint2,
glFeedbackBuffer,
glFinish,
glFlush,
glFogf,
glFogfv,
glFogi,
glFogiv,
glFrontFace,
glFrustum,
glGenLists,
glGenTextures,
glGetBooleanv,
glGetClipPlane,
glGetDoublev,
glGetError,
glGetFloatv,
glGetIntegerv,
glGetLightfv,
glGetLightiv,
glGetMapdv,
glGetMapfv,
glGetMapiv,
glGetMaterialfv,
glGetMaterialiv,
glGetPixelMapfv,
glGetPixelMapuiv,
glGetPixelMapusv,
glGetPointerv,
glGetPolygonStipple,
glGetString,
glGetTexEnvfv,
glGetTexEnviv,
glGetTexGendv,
glGetTexGenfv,
glGetTexGeniv,
glGetTexImage,
glGetTexLevelParameterfv,
glGetTexLevelParameteriv,
glGetTexParameterfv,
glGetTexParameteriv,
glHint,
glIndexMask,
glIndexPointer,
glIndexd,
glIndexdv,
glIndexf,
glIndexfv,
glIndexi,
glIndexiv,
glIndexs,
glIndexsv,
glIndexub,
glIndexubv,
glInitNames,
glInterleavedArrays,
glIsEnabled,
glIsList,
glIsTexture,
glLightModelf,
glLightModelfv,
glLightModeli,
glLightModeliv,
glLightf,
glLightfv,
glLighti,
glLightiv,
glLineStipple,
glLineWidth,
glListBase,
glLoadIdentity,
glLoadMatrixd,
glLoadMatrixf,
glLoadName,
glLogicOp,
glMap1d,
glMap1f,
glMap2d,
glMap2f,
glMapGrid1d,
glMapGrid1f,
glMapGrid2d,
glMapGrid2f,
glMaterialf,
glMaterialfv,
glMateriali,
glMaterialiv,
glMatrixMode,
glMultMatrixd,
glMultMatrixf,
glNewList,
glNormal3b,
glNormal3bv,
glNormal3d,
glNormal3dv,
glNormal3f,
glNormal3fv,
glNormal3i,
glNormal3iv,
glNormal3s,
glNormal3sv,
glNormalPointer,
glOrtho,
glPassThrough,
glPixelMapfv,
glPixelMapuiv,
glPixelMapusv,
glPixelStoref,
glPixelStorei,
glPixelTransferf,
glPixelTransferi,
glPixelZoom,
glPointSize,
glPolygonMode,
glPolygonOffset,
glPolygonStipple,
glPopAttrib,
glPopClientAttrib,
glPopMatrix,
glPopName,
glPrioritizeTextures,
glPushAttrib,
glPushClientAttrib,
glPushMatrix,
glPushName,
glRasterPos2d,
glRasterPos2dv,
glRasterPos2f,
glRasterPos2fv,
glRasterPos2i,
glRasterPos2iv,
glRasterPos2s,
glRasterPos2sv,
glRasterPos3d,
glRasterPos3dv,
glRasterPos3f,
glRasterPos3fv,
glRasterPos3i,
glRasterPos3iv,
glRasterPos3s,
glRasterPos3sv,
glRasterPos4d,
glRasterPos4dv,
glRasterPos4f,
glRasterPos4fv,
glRasterPos4i,
glRasterPos4iv,
glRasterPos4s,
glRasterPos4sv,
glReadBuffer,
glReadPixels,
glRectd,
glRectdv,
glRectf,
glRectfv,
glRecti,
glRectiv,
glRects,
glRectsv,
glRenderMode,
glRotated,
glRotatef,
glScaled,
glScalef,
glScissor,
glSelectBuffer,
glShadeModel,
glStencilFunc,
glStencilMask,
glStencilOp,
glTexCoord1d,
glTexCoord1dv,
glTexCoord1f,
glTexCoord1fv,
glTexCoord1i,
glTexCoord1iv,
glTexCoord1s,
glTexCoord1sv,
glTexCoord2d,
glTexCoord2dv,
glTexCoord2f,
glTexCoord2fv,
glTexCoord2i,
glTexCoord2iv,
glTexCoord2s,
glTexCoord2sv,
glTexCoord3d,
glTexCoord3dv,
glTexCoord3f,
glTexCoord3fv,
glTexCoord3i,
glTexCoord3iv,
glTexCoord3s,
glTexCoord3sv,
glTexCoord4d,
glTexCoord4dv,
glTexCoord4f,
glTexCoord4fv,
glTexCoord4i,
glTexCoord4iv,
glTexCoord4s,
glTexCoord4sv,
glTexCoordPointer,
glTexEnvf,
glTexEnvfv,
glTexEnvi,
glTexEnviv,
glTexGend,
glTexGendv,
glTexGenf,
glTexGenfv,
glTexGeni,
glTexGeniv,
glTexImage1D,
glTexImage2D,
glTexParameterf,
glTexParameterfv,
glTexParameteri,
glTexParameteriv,
glTexSubImage1D,
glTexSubImage2D,
glTranslated,
glTranslatef,
glVertex2d,
glVertex2dv,
glVertex2f,
glVertex2fv,
glVertex2i,
glVertex2iv,
glVertex2s,
glVertex2sv,
glVertex3d,
glVertex3dv,
glVertex3f,
glVertex3fv,
glVertex3i,
glVertex3iv,
glVertex3s,
glVertex3sv,
glVertex4d,
glVertex4dv,
glVertex4f,
glVertex4fv,
glVertex4i,
glVertex4iv,
glVertex4s,
glVertex4sv,
glVertexPointer,
glViewport
) where
import Graphics.Rendering.OpenGL.Raw.Types
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/Version11.hs
|
bsd-3-clause
| 16,248
| 0
| 4
| 2,720
| 2,686
| 1,798
| 888
| 883
| 0
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.MN.Rules
( rules
) where
import Data.Maybe
import Data.String
import Prelude
import Duckling.AmountOfMoney.Helpers
import Duckling.AmountOfMoney.Types (Currency(..), AmountOfMoneyData(..))
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (isNatural, isPositive)
import Duckling.Numeral.Types (NumeralData (..))
import Duckling.Regex.Types
import Duckling.Types
import qualified Duckling.AmountOfMoney.Types as TAmountOfMoney
import qualified Duckling.Numeral.Types as TNumeral
ruleUnitAmount :: Rule
ruleUnitAmount = Rule
{ name = "<unit> <amount>"
, pattern =
[ Predicate isCurrencyOnly
, Predicate isPositive
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.currency = c}:
Token Numeral NumeralData{TNumeral.value = v}:
_) -> Just . Token AmountOfMoney . withValue v $ currencyOnly c
_ -> Nothing
}
ruleTugriks :: Rule
ruleTugriks = Rule
{ name = "төг"
, pattern =
[ regex "төг(рөг(ийн)?)?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly MNT
}
rulePounds :: Rule
rulePounds = Rule
{ name = "£"
, pattern =
[ regex "фунт(аар|тай|аас)?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Pound
}
ruleGBP :: Rule
ruleGBP = Rule
{ name = "Mongolian GBP"
, pattern =
[ regex "Английн\\s+фунт"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly GBP
}
ruleDollar :: Rule
ruleDollar = Rule
{ name = "$"
, pattern =
[ regex "доллар(ын|оор|оос|той)?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Dollar
}
ruleCent :: Rule
ruleCent = Rule
{ name = "cent"
, pattern =
[ regex "цент(ийн|ээс|ээр|тэй)?|пени|пенс(ээр|гээр|тэй|ээс|гээс)?|ц"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Cent
}
ruleEUR :: Rule
ruleEUR = Rule
{ name = "€"
, pattern =
[ regex "евро"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly EUR
}
ruleBucks :: Rule
ruleBucks = Rule
{ name = "bucks"
, pattern =
[ regex "бакс(аар|тай|аас)?"
]
, prod = \_ -> Just . Token AmountOfMoney $ currencyOnly Unnamed
}
rulePrecision :: Rule
rulePrecision = Rule
{ name = "about|exactly <amount-of-money>"
, pattern =
[ regex "яг|ойролцоогоор|бараг"
, Predicate isMoneyWithValue
]
, prod = \case
(_:token:_) -> Just token
_ -> Nothing
}
ruleIntervalBetweenNumeral :: Rule
ruleIntervalBetweenNumeral = Rule
{ name = "between|from <numeral> to|and <amount-of-money>"
, pattern =
[ Predicate isPositive
, regex "-c"
, Predicate isSimpleAmountOfMoney
, regex "(-н\\s+)?(хооронд|хүртэл)"
]
, prod = \case
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalBetweenNumeral2 :: Rule
ruleIntervalBetweenNumeral2 = Rule
{ name = "between|from <amount-of-money> to|and <numeral>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-c"
, Predicate isNatural
, regex "(-н\\s+)?(хооронд|хүртэл)"
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c}:
_:
Token Numeral NumeralData{TNumeral.value = to}:
_) | from < to ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalBetween :: Rule
ruleIntervalBetween = Rule
{ name = "between|from <amount-of-money> to|and <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-c"
, Predicate isSimpleAmountOfMoney
, regex "(-н\\s+)?(хооронд|хүртэл)"
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalNumeralDash :: Rule
ruleIntervalNumeralDash = Rule
{ name = "<numeral> - <amount-of-money>"
, pattern =
[ Predicate isPositive
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \case
(Token Numeral NumeralData{TNumeral.value = from}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) | from < to ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c
_ -> Nothing
}
ruleIntervalDash :: Rule
ruleIntervalDash = Rule
{ name = "<amount-of-money> - <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-"
, Predicate isSimpleAmountOfMoney
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just from,
TAmountOfMoney.currency = c1}:
_:
Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c2}:
_) | from < to && c1 == c2 ->
Just . Token AmountOfMoney . withInterval (from, to) $ currencyOnly c1
_ -> Nothing
}
ruleIntervalMax :: Rule
ruleIntervalMax = Rule
{ name = "under/less/lower/no more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-c\\s+(бага|доогуур|ихгүй)"
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMax to $ currencyOnly c
_ -> Nothing
}
ruleIntervalMin :: Rule
ruleIntervalMin = Rule
{ name = "over/above/at least/more than <amount-of-money>"
, pattern =
[ Predicate isSimpleAmountOfMoney
, regex "-c\\s+(их|дээгүүр|илүү)"
]
, prod = \case
(Token AmountOfMoney AmountOfMoneyData{TAmountOfMoney.value = Just to,
TAmountOfMoney.currency = c}:
_) -> Just . Token AmountOfMoney . withMin to $ currencyOnly c
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleUnitAmount
, ruleBucks
, ruleCent
, ruleDollar
, ruleEUR
, ruleGBP
, ruleIntervalBetweenNumeral
, ruleIntervalBetweenNumeral2
, ruleIntervalBetween
, ruleIntervalMax
, ruleIntervalMin
, ruleIntervalNumeralDash
, ruleIntervalDash
, rulePounds
, rulePrecision
, ruleTugriks
]
|
facebookincubator/duckling
|
Duckling/AmountOfMoney/MN/Rules.hs
|
bsd-3-clause
| 7,236
| 0
| 18
| 1,707
| 1,825
| 1,004
| 821
| 199
| 2
|
{-# LANGUAGE CPP, BangPatterns, Rank2Types, MagicHash #-}
-- |
-- Module : UnboxedAppend
-- Copyright : (c) 2010 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Stability : experimental
-- Portability : tested on GHC only
--
-- Try using unboxed pointers for the continuation calls to make abstract
-- appends go faster.
--
module UnboxedAppend where
import Foreign
import Foreign.UPtr
import Data.Monoid
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
#ifdef BYTESTRING_IN_BASE
import Data.ByteString.Base (inlinePerformIO)
import qualified Data.ByteString.Base as S
import qualified Data.ByteString.Lazy.Base as L -- FIXME: is this the right module for access to 'Chunks'?
#else
import Data.ByteString.Internal (inlinePerformIO)
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
#endif
import qualified Blaze.ByteString.Builder.Internal as B
import Blaze.ByteString.Builder.Write (Write(..))
import qualified Blaze.ByteString.Builder.Word as B
import Blaze.ByteString.Builder.Word (writeWord8)
import Criterion.Main
------------------------------------------------------------------------------
-- Benchmarks
------------------------------------------------------------------------------
main :: IO ()
main = defaultMain $ concat
[ benchmark "mconcat . map fromWord8"
myfromWord8s
yourfromWord8s
word8s
]
where
benchmark name putF builderF x =
[ bench (name ++ " Put") $
whnf (L.length . toLazyByteString2 . putF) x
, bench (name ++ " Builder") $
whnf (L.length . B.toLazyByteString . builderF) x
]
word8s :: [Word8]
word8s = take 100000 $ cycle [0..]
{-# NOINLINE word8s #-}
myfromWord8s :: [Word8] -> Put ()
myfromWord8s = putBuilder . mconcat . map fromWord8
{-# NOINLINE myfromWord8s #-}
yourfromWord8s :: [Word8] -> B.Builder
yourfromWord8s = mconcat . map B.fromWord8
{-# NOINLINE yourfromWord8s #-}
------------------------------------------------------------------------------
-- The Put type
------------------------------------------------------------------------------
data BufRange = BufRange {-# UNPACK #-} !(Ptr Word8) {-# UNPACK #-} !(Ptr Word8)
newtype Put a = Put {
unPut :: forall r. (a -> PutStep r) -> PutStep r
}
data PutSignal a =
Done {-# UNPACK #-} !(Ptr Word8) a
| BufferFull
{-# UNPACK #-} !Int
{-# UNPACK #-} !(Ptr Word8)
!(PutStep a)
| InsertByteString
{-# UNPACK #-} !(Ptr Word8)
!S.ByteString
!(PutStep a)
type PutStep a = UPtr -> UPtr -> IO (PutSignal a)
instance Monad Put where
return x = Put $ \k -> k x
{-# INLINE return #-}
m >>= f = Put $ \k -> unPut m (\x -> unPut (f x) k)
{-# INLINE (>>=) #-}
m >> n = Put $ \k -> unPut m (\_ -> unPut n k)
{-# INLINE (>>) #-}
------------------------------------------------------------------------------
-- The Builder type with equal signals as the Put type
------------------------------------------------------------------------------
newtype Builder = Builder (forall r. PutStep r -> PutStep r)
instance Monoid Builder where
mempty = Builder id
{-# INLINE mempty #-}
(Builder b1) `mappend` (Builder b2) = Builder $ b1 . b2
{-# INLINE mappend #-}
mconcat = foldr mappend mempty
{-# INLINE mconcat #-}
putBuilder :: Builder -> Put ()
putBuilder (Builder build) = Put $ \k -> build (k ())
fromPut :: Put () -> Builder
fromPut (Put put) = Builder $ \k -> put (\_ -> k)
fromBuildStep :: (forall r. PutStep r -> BufRange -> IO (PutSignal r)) -> Builder
fromBuildStep step = Builder step'
where
step' k op ope = step k (BufRange (uptrToPtr op) (uptrToPtr ope))
{-# INLINE fromBuildStep #-}
callBuildStep :: PutStep a -> BufRange -> IO (PutSignal a)
callBuildStep k (BufRange op ope) = k (ptrToUPtr op) (ptrToUPtr ope)
{-# INLINE callBuildStep #-}
boxBuildStep :: PutStep a -> (BufRange -> IO (PutSignal a))
boxBuildStep step (BufRange op ope) = step (ptrToUPtr op) (ptrToUPtr ope)
{-# INLINE boxBuildStep #-}
unboxBuildStep :: (BufRange -> IO (PutSignal a)) -> PutStep a
unboxBuildStep step op ope = step (BufRange (uptrToPtr op) (uptrToPtr ope))
{-# INLINE unboxBuildStep #-}
fromWriteSingleton :: (a -> Write) -> a -> Builder
fromWriteSingleton write =
mkBuilder
where
mkBuilder x = fromBuildStep step
where
step k (BufRange pf pe)
| pf `plusPtr` size <= pe = do
io pf
let !br' = BufRange (pf `plusPtr` size) pe
callBuildStep k br'
| otherwise =
return $ BufferFull size pf (unboxBuildStep $ step k)
where
Write size io = write x
{-# INLINE fromWriteSingleton #-}
fromWord8 :: Word8 -> Builder
fromWord8 = fromWriteSingleton writeWord8
{-# INLINE fromWord8 #-}
------------------------------------------------------------------------------
-- More explicit implementation of running builders
------------------------------------------------------------------------------
data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8) -- underlying pinned array
{-# UNPACK #-} !(Ptr Word8) -- beginning of slice
{-# UNPACK #-} !(Ptr Word8) -- next free byte
{-# UNPACK #-} !(Ptr Word8) -- first byte after buffer
allocBuffer :: Int -> IO Buffer
allocBuffer size = do
fpbuf <- S.mallocByteString size
let !pbuf = unsafeForeignPtrToPtr fpbuf
return $! Buffer fpbuf pbuf pbuf (pbuf `plusPtr` size)
unsafeFreezeBuffer :: Buffer -> S.ByteString
unsafeFreezeBuffer (Buffer fpbuf p0 op _) =
S.PS fpbuf 0 (op `minusPtr` p0)
unsafeFreezeNonEmptyBuffer :: Buffer -> Maybe S.ByteString
unsafeFreezeNonEmptyBuffer (Buffer fpbuf p0 op _)
| p0 == op = Nothing
| otherwise = Just $ S.PS fpbuf 0 (op `minusPtr` p0)
nextSlice :: Int -> Buffer -> Maybe Buffer
nextSlice minSize (Buffer fpbuf _ op ope)
| ope `minusPtr` op <= minSize = Nothing
| otherwise = Just (Buffer fpbuf op op ope)
runPut :: Monad m
=> (IO (PutSignal a) -> m (PutSignal a)) -- lifting of buildsteps
-> (Int -> Buffer -> m Buffer) -- output function for a guaranteedly non-empty buffer, the returned buffer will be filled next
-> (S.ByteString -> m ()) -- output function for guaranteedly non-empty bytestrings, that are inserted directly into the stream
-> Put a -- put to execute
-> Buffer -- initial buffer to be used
-> m (a, Buffer) -- result of put and remaining buffer
runPut liftIO outputBuf outputBS (Put put) =
runStep (put $ (\x -> unboxBuildStep $ finalStep x))
where
finalStep x !(BufRange op _) = return $ Done op x
runStep step buf@(Buffer fpbuf p0 op ope) = do
let !br = BufRange op ope
signal <- liftIO $ callBuildStep step br
case signal of
Done op' x -> -- put completed, buffer partially runSteped
return (x, Buffer fpbuf p0 op' ope)
BufferFull minSize op' nextStep -> do
buf' <- outputBuf minSize (Buffer fpbuf p0 op' ope)
runStep nextStep buf'
InsertByteString op' bs nextStep
| S.null bs -> -- flushing of buffer required
outputBuf 1 (Buffer fpbuf p0 op' ope) >>= runStep nextStep
| p0 == op' -> do -- no bytes written: just insert bytestring
outputBS bs
runStep nextStep buf
| otherwise -> do -- bytes written, insert buffer and bytestring
buf' <- outputBuf 1 (Buffer fpbuf p0 op' ope)
outputBS bs
runStep nextStep buf'
{-# INLINE runPut #-}
-- | A monad for lazily composing lazy bytestrings using continuations.
newtype LBSM a = LBSM { unLBSM :: (a, L.ByteString -> L.ByteString) }
instance Monad LBSM where
return x = LBSM (x, id)
(LBSM (x,k)) >>= f = let LBSM (x',k') = f x in LBSM (x', k . k')
(LBSM (_,k)) >> (LBSM (x',k')) = LBSM (x', k . k')
-- | Execute a put and return the written buffers as the chunks of a lazy
-- bytestring.
toLazyByteString2 :: Put a -> L.ByteString
toLazyByteString2 put =
k (bufToLBSCont (snd result) L.empty)
where
-- initial buffer
buf0 = inlinePerformIO $ allocBuffer B.defaultBufferSize
-- run put, but don't force result => we're lazy enough
LBSM (result, k) = runPut liftIO outputBuf outputBS put buf0
-- convert a buffer to a lazy bytestring continuation
bufToLBSCont = maybe id L.Chunk . unsafeFreezeNonEmptyBuffer
-- lifting an io putsignal to a lazy bytestring monad
liftIO io = LBSM (inlinePerformIO io, id)
-- add buffer as a chunk prepare allocation of new one
outputBuf minSize buf = LBSM
( inlinePerformIO $ allocBuffer (max minSize B.defaultBufferSize)
, bufToLBSCont buf )
-- add bytestring directly as a chunk; exploits postcondition of runPut
-- that bytestrings are non-empty
outputBS bs = LBSM ((), L.Chunk bs)
|
meiersi/blaze-builder
|
benchmarks/UnboxedAppend.hs
|
bsd-3-clause
| 9,316
| 0
| 18
| 2,345
| 2,393
| 1,249
| 1,144
| 169
| 3
|
module Distance (DistanceFunction, minkowski, euclidean, manhattan, cosineDistance) where
import HuskyML
-- | Given two vectors returns a distance
type DistanceFunction = [Feature] -> [Feature] -> Double
-- | Given two vectors and a degree returns the minkowski distance
minkowski :: Int -> DistanceFunction
minkowski p xs ys = (**) (sum $ zipWith (\x y -> abs(x `diff` y) ^ p) xs ys) (1.0 / fromIntegral p)
-- | Given two vectors returns the euclidean distance
euclidean :: DistanceFunction
euclidean xs ys = minkowski 2 xs ys
-- | Given two vectors returns the manhattan distance
manhattan :: DistanceFunction
manhattan xs ys = minkowski 1 xs ys
-- | Given two vectors returns the absolute value of the cosine similarity minus one to account for 1 meaning same orientation, -1 opposite and 0 meaning orthogonal
cosineDistance :: DistanceFunction
cosineDistance xs ys = abs $ (+) (-1) $ (/) (sum $ zipWith (\x y -> x `times` y) xs ys) ((magnitude xs) * (magnitude ys))
magnitude xs = sqrt $ sum $ zipWith (\x y -> x `times` y) xs xs
|
jc423/Husky
|
src/Distance.hs
|
bsd-3-clause
| 1,043
| 0
| 14
| 188
| 299
| 167
| 132
| 12
| 1
|
module EFA.Report.Format where
import qualified EFA.Equation.RecordIndex as RecIdx
import qualified EFA.Equation.Mix as Mix
import qualified Type.Data.Num.Unary as Unary
import qualified Data.Char.Small as SmallChar
import qualified Data.Char.Number as NumberChar
import qualified Data.FixedLength as FixedLength
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Bool.HT (if')
import Data.List (intercalate)
import Data.Ratio (Ratio, numerator, denominator)
import Data.Foldable (foldr1)
import Data.Maybe (fromMaybe)
import Text.Printf (PrintfArg, printf)
import qualified Prelude as P
import Prelude hiding (words, lines, sum, negate, foldr1)
-- * special Unicode characters
deltaChar :: Char
deltaChar = '\x2206'
heartChar :: Char
heartChar = '\x2665'
-- * common output types
newtype ASCII = ASCII { unASCII :: String }
newtype Unicode = Unicode { unUnicode :: String }
newtype Latex = Latex { unLatex :: String }
-- * class for unified handling of ASCII, Unicode and LaTeX output
{-
We need the ConstOne function in the solver
in order to create a signal for the sum of split factors,
whose length matches the length of other signals.
A different approach would be to manage the length of signals
as quantity for the solver,
but then we would need a length function.
-}
data Function = Absolute | Signum | ConstOne
deriving (Eq, Ord, Show)
class Format output where
literal :: String -> output
integer :: Integer -> output
real :: (RealFrac a, PrintfArg a, Ord a) => a -> output
ratio :: (Integral a, Show a) => Ratio a -> output
subscript :: output -> output -> output
connect :: output -> output -> output
link :: output -> output -> output
list :: [output] -> output
undetermined :: output
empty :: output
words, lines :: [output] -> output
assign :: output -> output -> output
pair :: output -> output -> output
nodeInt :: output -> output -> output
nodeString :: output -> output -> output
nodeStorage :: output
nodeSink :: output
nodeAlwaysSink :: output
nodeSource :: output
nodeAlwaysSource :: output
nodeCrossing :: output
nodeDeadNode :: output
nodeNoRestriction :: output
function :: Function -> output -> output
integral :: output -> output
recordDelta :: RecIdx.Delta -> output -> output
mixComponent :: output -> output -> output
mixPair :: output -> output -> output
mix :: Unary.Natural n =>
output -> FixedLength.T (Unary.Succ n) output -> output
initial, exit :: output
sectionNode :: output -> output -> output
directionIn, directionOut :: output
delta :: output -> output
energy, maxEnergy, power, xfactor, eta :: output
dtime, signalSum, scalarSum, storage :: output
parenthesize, negate, recip :: output -> output
plus, minus, multiply :: output -> output -> output
intPower :: output -> Integer -> output
showRaw :: output -> String
instance Format ASCII where
-- may need some escaping for non-ASCII characters
literal = ASCII
integer = ASCII . show
real = ASCII . realExp
ratio r = ASCII $ show (numerator r) ++ "/" ++ show (denominator r)
subscript (ASCII t) (ASCII s) = ASCII $ t ++ "_" ++ s
connect (ASCII t) (ASCII s) = ASCII $ t ++ "_" ++ s
link (ASCII t) (ASCII s) = ASCII $ t ++ ":" ++ s
list = ASCII . ("["++) . (++"]") . intercalate "," . map unASCII
undetermined = ASCII "?"
empty = ASCII ""
lines = ASCII . unlines . map unASCII
words = ASCII . unwords . map unASCII
assign (ASCII lhs) (ASCII rhs) =
ASCII $ lhs ++ " = " ++ rhs
pair (ASCII lhs) (ASCII rhs) =
ASCII $ "(" ++ lhs ++ ", " ++ rhs ++ ")"
nodeInt (ASCII typ) (ASCII num) = ASCII $ typ++num
nodeString (ASCII typ) (ASCII num) = ASCII $ typ++"-"++num
nodeStorage = ASCII "St"
nodeSink = ASCII "Si"
nodeAlwaysSink = ASCII "ASi"
nodeSource = ASCII "So"
nodeAlwaysSource = ASCII "ASo"
nodeCrossing = ASCII "C"
nodeDeadNode = ASCII "D"
nodeNoRestriction = ASCII "Any"
function f (ASCII rest) =
ASCII $
case f of
Absolute -> "|" ++ rest ++ "|"
Signum -> "sgn(" ++ rest ++ ")"
ConstOne -> "constone(" ++ rest ++ ")"
integral (ASCII x) = ASCII $ "integrate(" ++ x ++ ")"
recordDelta d (ASCII rest) =
ASCII $ (++rest) $
case d of
RecIdx.Before -> "[0]"
RecIdx.After -> "[1]"
RecIdx.Delta -> "d"
mixComponent (ASCII c) (ASCII x) = ASCII $ c ++ "?" ++ x
mixPair (ASCII x) (ASCII y) = ASCII $ x ++ "," ++ y
mix (ASCII s) xs =
case foldr1 mixPair xs of
ASCII v -> ASCII $ s ++ " [" ++ v ++ "]"
initial = ASCII "init"
exit = ASCII "exit"
sectionNode (ASCII s) (ASCII x) = ASCII $ s ++ "." ++ x
directionIn = ASCII shortIn
directionOut = ASCII shortOut
delta (ASCII s) = ASCII $ 'd':s
energy = ASCII "E"
maxEnergy = ASCII "Em"
power = ASCII "P"
xfactor = ASCII "x"
eta = ASCII "n"
dtime = ASCII "dt"
signalSum = ASCII "SS"
scalarSum = ASCII "Ss"
storage = ASCII "s"
parenthesize (ASCII x) = ASCII $ "(" ++ x ++ ")"
negate (ASCII x) = ASCII $ '-' : x
recip (ASCII x) = ASCII $ "1/(" ++ x ++ ")"
plus (ASCII x) (ASCII y) = ASCII $ x ++ " + " ++ y
minus (ASCII x) (ASCII y) = ASCII $ x ++ " - " ++ y
multiply (ASCII x) (ASCII y) = ASCII $ x ++ " * " ++ y
intPower (ASCII x) n = ASCII $ x ++ "^" ++ showsPrec 10 n ""
showRaw (ASCII x) = x
instance Format Unicode where
literal = Unicode
integer = Unicode . show
real = Unicode . realExp
ratio r =
Unicode $
Map.findWithDefault
(show (numerator r) ++ "/" ++ show (denominator r))
r ratioCharMap
subscript (Unicode t) (Unicode s) = Unicode $ t ++ "_" ++ s
connect (Unicode t) (Unicode s) = Unicode $ t ++ "_" ++ s
link (Unicode t) (Unicode s) = Unicode $ t ++ ":" ++ s
list = Unicode . ("["++) . (++"]") . intercalate "," . map unUnicode
undetermined = Unicode [heartChar]
empty = Unicode ""
lines = Unicode . unlines . map unUnicode
words = Unicode . unwords . map unUnicode
assign (Unicode lhs) (Unicode rhs) =
Unicode $ lhs ++ " = " ++ rhs
pair (Unicode lhs) (Unicode rhs) =
Unicode $ "(" ++ lhs ++ ", " ++ rhs ++ ")"
nodeInt (Unicode typ) (Unicode num) = Unicode $ typ++num
nodeString (Unicode typ) (Unicode num) = Unicode $ typ++"-"++num
nodeStorage = Unicode "St"
nodeSink = Unicode "Si"
nodeAlwaysSink = Unicode "ASi"
nodeSource = Unicode "So"
nodeAlwaysSource = Unicode "ASo"
nodeCrossing = Unicode "C"
nodeDeadNode = Unicode "D"
nodeNoRestriction = Unicode "Any"
function f (Unicode rest) =
Unicode $
case f of
Absolute -> "|" ++ rest ++ "|"
Signum -> "sgn(" ++ rest ++ ")"
ConstOne -> "\x2474(" ++ rest ++ ")"
integral (Unicode x) = Unicode $ "\x222B(" ++ x ++ ")"
recordDelta d (Unicode rest) =
Unicode $ (++rest) $
case d of
RecIdx.Before -> "\x2070"
RecIdx.After -> "\xb9"
RecIdx.Delta -> [deltaChar]
mixComponent (Unicode c) (Unicode x) = Unicode $ c ++ "?" ++ x
mixPair (Unicode x) (Unicode y) = Unicode $ x ++ "," ++ y
mix (Unicode s) xs =
case foldr1 mixPair xs of
Unicode v -> Unicode $ s ++ " [" ++ v ++ "]"
initial = Unicode "init"
exit = Unicode "exit"
sectionNode (Unicode s) (Unicode x) = Unicode $ s ++ "." ++ x
directionIn = Unicode shortIn
directionOut = Unicode shortOut
delta (Unicode s) = Unicode $ deltaChar:s
energy = Unicode "E"
maxEnergy = Unicode "\xCA"
power = Unicode "P"
xfactor = Unicode "x"
eta = Unicode "\x03B7"
dtime = Unicode "dt"
signalSum = Unicode "\x03A3"
scalarSum = Unicode "\x03C3"
storage = Unicode "s"
parenthesize (Unicode x) = Unicode $ "(" ++ x ++ ")"
negate (Unicode x) = Unicode $ '-' : x
recip (Unicode x) = Unicode $ "\x215f(" ++ x ++ ")"
plus (Unicode x) (Unicode y) = Unicode $ x ++ " + " ++ y
minus (Unicode x) (Unicode y) = Unicode $ x ++ " - " ++ y
multiply (Unicode x) (Unicode y) = Unicode $ x ++ "\xb7" ++ y
intPower (Unicode x) n =
-- writing many digits in superscript looks ugly in a monospace font
Unicode $
x ++ map (\c -> fromMaybe c $ SmallChar.superscriptMaybe c) (show n)
showRaw (Unicode x) = x
ratioCharMap :: Integral a => Map (Ratio a) String
ratioCharMap =
let xys = fmap (:[]) NumberChar.fractionMap
in Map.union xys (fmap ('-':) $ Map.mapKeys P.negate xys)
instance Format Latex where
-- we may use the 'latex' package for escaping non-ASCII characters
literal = Latex
integer = Latex . show
real = Latex . printf "%.6f"
ratio r = Latex $ "\\frac{" ++ show (numerator r) ++ "}{" ++ show (denominator r) ++ "}"
subscript (Latex t) (Latex s) = Latex $ t ++ "_{" ++ s ++ "}"
connect (Latex t) (Latex s) = Latex $ t ++ "." ++ s
link (Latex t) (Latex s) = Latex $ t ++ ":" ++ s
list = Latex . ("["++) . (++"]") . intercalate ", " . map unLatex
undetermined = Latex "\\heartsuit "
empty = Latex ""
lines = Latex . intercalate "\\\\\n" . map unLatex
words = Latex . unwords . map unLatex
assign (Latex lhs) (Latex rhs) =
Latex $ lhs ++ " = " ++ rhs
pair (Latex lhs) (Latex rhs) =
Latex $ "(" ++ lhs ++ ", " ++ rhs ++ ")"
nodeInt (Latex typ) (Latex num) = Latex $ typ++num
nodeString (Latex typ) (Latex num) = Latex $ typ++"-"++num
nodeStorage = Latex "St"
nodeSink = Latex "Si"
nodeAlwaysSink = Latex "ASi"
nodeSource = Latex "So"
nodeAlwaysSource = Latex "ASo"
nodeCrossing = Latex "C"
nodeDeadNode = Latex "D"
nodeNoRestriction = Latex "Any"
function f (Latex rest) =
Latex $
case f of
Absolute -> "\\abs{" ++ rest ++ "}"
Signum -> "\\sgn{\\left(" ++ rest ++ "\\right)}"
ConstOne -> "\\mathbb{1}(" ++ rest ++ ")"
integral (Latex x) = Latex $ "\\int\\left(" ++ x ++ "\\right)"
recordDelta d (Latex rest) =
Latex $
case d of
{-
http://math.mit.edu/~ssam/latex
\newcommand{\leftexp}[2]{{\vphantom{#2}}^{#1}{#2}}
alternatively use packages leftidx or tensor:
http://tex.stackexchange.com/questions/11542/left-and-right-subscript
-}
RecIdx.Before -> "\\leftexp{0}{" ++ rest ++ "}"
RecIdx.After -> "\\leftexp{1}{" ++ rest ++ "}"
RecIdx.Delta -> "\\Delta " ++ rest
mixComponent (Latex c) (Latex x) = Latex $ c ++ "?" ++ x
mixPair (Latex x) (Latex y) = Latex $ x ++ "," ++ y
mix (Latex s) xs =
case foldr1 mixPair xs of
Latex v -> Latex $ s ++ " [" ++ v ++ "]"
initial = Latex "\\mbox{init}"
exit = Latex "\\mbox{exit}"
sectionNode (Latex s) (Latex x) = Latex $ s ++ ":" ++ x
directionIn = Latex shortIn
directionOut = Latex shortOut
delta (Latex s) = Latex $ "\\Delta " ++ s
energy = Latex "E"
maxEnergy = Latex "\\^E"
power = Latex "P"
xfactor = Latex "x"
eta = Latex "\\eta"
dtime = Latex "\\dif t"
signalSum = Latex "\\Sigma"
scalarSum = Latex "\\sigma"
storage = Latex "s"
parenthesize (Latex x) = Latex $ "(" ++ x ++ ")"
negate (Latex x) = Latex $ '-' : x
recip (Latex x) = Latex $ "\\frac{1}{" ++ x ++ "}"
plus (Latex x) (Latex y) = Latex $ x ++ " + " ++ y
minus (Latex x) (Latex y) = Latex $ x ++ " - " ++ y
multiply (Latex x) (Latex y) = Latex $ x ++ " \\cdot " ++ y
intPower (Latex x) n = Latex $ x ++ "^{" ++ show n ++ "}"
showRaw (Latex x) = x
ratioAuto :: (Integral a, Show a, Format output) => Ratio a -> output
ratioAuto r =
if denominator r == 1
then integer $ toInteger $ numerator r
else ratio r
-- | actual though -- for usable figures
realExp :: (RealFrac a, PrintfArg a) => a -> String
realExp x =
case round x of
xi ->
if' (abs x < 100 && x == fromInteger xi) (show xi) $
if' (abs x < 1) (printf "%.5f" x) $
if' (abs x < 10) (printf "%.4f" x) $
if' (abs x < 100) (printf "%.3f" x) $
if' (abs x < 1000) (printf "%.2f" x) $
if' (abs x < 10000) (printf "%.1f" x) $
if' (abs x < 1e6) (printf "%.0f" x) $
if' (abs x < 1e9) (printf "%.3f E6" (x*1e-6)) $
if' (abs x < 1e12) (printf "%.3f E9" (x*1e-9)) $
if' (abs x < 1e15) (printf "%.3f E12" (x*1e-12)) $
(printf "%.e" x)
class Record record where
record :: Format output => record -> output -> output
instance Record RecIdx.Absolute where
record RecIdx.Absolute = id
instance Record RecIdx.Delta where
record = recordDelta
instance Record rec => Record (RecIdx.ExtDelta rec) where
record (RecIdx.ExtDelta d r) = recordDelta d . record r
class MixRecord len where
mixRecord ::
(Mix.Direction dir, Format output) =>
RecIdx.Mix dir len -> output -> output
instance (Unary.Natural n) => MixRecord (FixedLength.Index n) where
mixRecord RecIdx.MixTotal = id
mixRecord (RecIdx.MixComponent pos) =
mixComponent (integer $ fromIntegral $ FixedLength.numFromIndex pos)
instance (Mix.Direction dir, MixRecord len) => Record (RecIdx.Mix dir len) where
record = mixRecord
instance
(Mix.Direction dir, MixRecord len, Record rec) =>
Record (RecIdx.ExtMix dir len rec) where
record (RecIdx.ExtMix m r) = mixRecord m . record r
shortIn, shortOut :: String
shortIn = "i" -- Verwirrung mit initial aus der Format-Klasse?
shortOut = "o"
|
energyflowanalysis/efa-2.1
|
src/EFA/Report/Format.hs
|
bsd-3-clause
| 13,465
| 0
| 23
| 3,508
| 4,997
| 2,583
| 2,414
| 326
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Scotty
import Control.Monad
import Text.Regex.PCRE
import Data.Char
import Control.Monad
import Control.Monad.IO.Class
import System.Environment (getEnv)
import Network.Wai.Middleware.RequestLogger (logStdoutDev)
import qualified Data.Set as Set
import qualified Data.Text.Lazy as Text
import Pronunciation
import Dictionary
import Rhyme
import Sonnet
main :: IO ()
main = do
commonWords <- fmap (Set.fromList . lines) $ readFile "./resources/common_words.txt"
pronunciationLines <- fmap lines $ readFile "./resources/cmudict.txt"
putStrLn "Loading dictionary into memory..."
let dictEntries = map parseLine pronunciationLines
wordIsCommon (word,_) = Set.member word commonWords
commonEntries = (filter wordIsCommon dictEntries)
dict = createDictionary commonEntries
trie = createRhymeTrie commonEntries
dict `seq` putStrLn "Load complete."
print "Loading rhyme trie into memory..."
trie `seq` print "Load complete."
port <- read <$> getEnv "PORT"
scotty port $ do
middleware logStdoutDev
get "/" $ do
sonnetLines <- liftIO $ generateSonnet dict trie
text $ Text.pack (unlines sonnetLines)
parseLine :: String -> (String, Pronunciation)
parseLine line =
let regex = "^([^ ]*) *(.*)$" :: String
matchResults:_ = line =~ regex :: [[String]]
in (map toLower (matchResults !! 1), words (matchResults !! 2))
|
mg50/hypnerotomachia
|
src/Main.hs
|
bsd-3-clause
| 1,449
| 0
| 16
| 256
| 409
| 214
| 195
| 40
| 1
|
module Edit
( executeEditCommand
) where
import Control.Monad
import Control.Monad.Reader
import Control.Monad.Maybe
import Database.HDBC
import Data.Char (isDigit)
import Data.List
import Text.Parsec
import System.Console.Haskeline
import TodoArguments
import Util
import Configuration
import Filter
import Range
import DataTypes
executeEditCommand :: Config -> TodoCommand -> IO ()
executeEditCommand config editCommand = do
mconn <- getDatabaseConnection config editCommand
case mconn of
Nothing -> gracefulExit
Just conn -> do
sequence_ . intersperse putNewline . map (editSingleId conn) . getEditRanges . editRanges $ editCommand
commit conn
disconnect conn
where
-- TODO The edit command needs to deal with every specified item therefore maybe run only
-- one query to the database to get all of the required pieces of information. You can
-- still save everything back seperately.
getEditRanges :: String -> [Integer]
getEditRanges input = case parse (parseRanges ',') "(edit_ranges)" input of
Left _ -> []
Right x -> fromMergedRanges x
-- TODO Turn this into an EitherT instance
editSingleId :: (IConnection c) => c -> Integer -> IO ()
editSingleId conn id = do
putStrLn $ "Now editing item: " ++ show id
d <- runMaybeT $ getEditData conn id
case d of
Nothing -> putStrLn $ "Could not find data for id: " ++ show id
Just oldData@(oldDesc,_,oldTags) -> do
newData <- runInputT defaultSettings $ runMaybeT (askEditQuestions oldData)
case newData of
Nothing -> putStrLn "Invalid input or early termination."
Just (desc, pri, tags) -> do
-- TODO create an edit event here to log the change
run conn updateItem [toSql desc, toSql pri, toSql id]
run conn "INSERT INTO item_events (item_id, item_event_type, event_description, occurred_at) VALUES (?,?,?, datetime())" [toSql id, toSql $ fromEnum EventEdit, toSql oldDesc]
cs <- prepare conn createStatement
ds <- prepare conn deleteStatement
findOrCreateTags conn id (tags \\ oldTags) >>= mapM_ (createTagMapping cs id)
getTagMapIds conn id (oldTags \\ tags) >>= mapM_ (deleteTagMapping ds id)
putStrLn $ "Successfully updated item " ++ show id ++ "."
-- please note that we intentionally do not delete tags here; just the
-- mappings, we leave them around for later use. The 'hdo clean' or maybe
-- 'hdo gc' command will do that cleanup I think.
where
updateItem = "UPDATE items SET description = ?, priority = ? where id = ?"
getTagMapIds :: (IConnection c) => c -> Integer -> [String] -> IO [Integer]
getTagMapIds _ _ [] = return []
getTagMapIds conn itemId tags = fmap (map fromSql . concat) $ quickQuery' conn theQuery [toSql itemId]
where
theQuery = "SELECT tm.tag_id from tag_map tm, tags t where t.id = tm.tag_id and (" ++ tagOrList ++ ") and tm.item_id = ?"
tagOrList = createOrList "t.tag_name =" tags
deleteTagMapping :: Statement -> Integer -> Integer -> IO ()
deleteTagMapping statement itemId tagId = execute statement [toSql itemId, toSql tagId] >> return ()
createTagMapping :: Statement -> Integer -> Integer -> IO ()
createTagMapping statement itemId tagId = execute statement [toSql itemId, toSql tagId] >> return ()
createStatement = "INSERT INTO tag_map (item_id, tag_id, created_at) VALUES (?,?, datetime())"
deleteStatement = "DELETE FROM tag_map WHERE item_id = ? AND tag_id = ?"
getDescAndPri :: [[SqlValue]] -> Maybe (String, Integer)
getDescAndPri vals = case vals of
[[a,b]] -> Just (fromSql a, fromSql b)
_ -> Nothing
getEditData :: (IConnection c) => c -> Integer -> MaybeT IO (String, Integer, [String])
getEditData conn id = do
[[sqlDescription, sqlPriority]] <- lift $ quickQuery' conn "select description, priority from items where id = ?" [toSql id]
sqlTags <- lift $ quickQuery' conn "select t.tag_name from tags t, tag_map tm, items i where i.id = tm.item_id and tm.tag_id = t.id and i.id = ?" [toSql id]
return (fromSql sqlDescription, fromSql sqlPriority, map fromSql (concat sqlTags))
askEditQuestions :: (String, Integer, [String]) -> MaybeT (InputT IO) (String, Integer, [String])
askEditQuestions (desc, pri, tags) = do
Just newDesc <- lift $ getInputLineWithInitial "comment> " $ defInit desc
guard (not $ null newDesc)
Just newPri <- lift $ getInputLineWithInitial "priority> " . defInit $ show pri
guard (not $ null newPri)
guard (all isDigit newPri) -- the priority must be a digit
Just newTags <- lift $ getInputLineWithInitial "tags> " . defInit $ unwords tags
return (newDesc, read newPri, nub . words $ newTags)
where defInit a = (a, "")
|
robertmassaioli/hdo
|
src/Edit.hs
|
bsd-3-clause
| 5,613
| 0
| 22
| 1,867
| 1,287
| 644
| 643
| 78
| 7
|
-- code structure written by John MacFarlane,
-- I filled in some missing pieces and make it compile.
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE OverloadedStrings #-}
module Hack2.Handler.HappstackServer (run, runWithConfig, ServerConf(..), appToServerPart) where
import Control.Arrow ((>>>))
import "mtl" Control.Monad.State
import Data.Char
import Data.Default
import Data.List
import Data.Maybe
import qualified Hack2 as Hack2
import Happstack.Server.SimpleHTTP as Happstack hiding (port, escape)
import Network.URI (escapeURIString, isAllowedInURI)
import Control.Applicative
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.Map as M
import qualified Happstack.Server.SimpleHTTP as H
-- enum helper start
import qualified Data.ByteString.Char8 as Strict
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Enumerator.Binary as EB
import qualified Data.Enumerator.List as EL
import Data.Enumerator (run_, enumList, Enumerator, ($$))
fromEnumerator :: Monad m => Enumerator Strict.ByteString m B.ByteString -> m B.ByteString
fromEnumerator m = run_ $ m $$ EB.consume
toEnumerator :: Monad m => B.ByteString -> Enumerator Strict.ByteString m a
toEnumerator = enumList 1 . B.toChunks
-- enum helper end
data ServerConf = ServerConf { port :: Int, serverName :: L.ByteString }
deriving (Show)
instance Default ServerConf where
def = ServerConf { port = 3000, serverName = "localhost"}
runWithConfig :: ServerConf -> Hack2.Application -> IO ()
runWithConfig conf = simpleHTTP nullConf { H.port = port conf } . appToServerPart conf
run :: Hack2.Application -> IO ()
run = runWithConfig def
appToServerPart :: ServerConf -> Hack2.Application -> ServerPart (Happstack.Response)
appToServerPart conf app = askRq >>= \req -> liftIO $ (app $ reqToEnv req) >>= hackRToServerPartR
where
reqToEnv req =
def
{ Hack2.requestMethod = convertRequestMethod $ rqMethod req
, Hack2.pathInfo = S.pack $ escape $ "/" ++ (intercalate "/" $ rqPaths req)
, Hack2.queryString = S.pack $ escape $ dropWhile (== '?') $ rqQuery req
, Hack2.serverName = l2s $ serverName conf
, Hack2.serverPort = (snd $ rqPeer req)
, Hack2.httpHeaders = headersToHttp (rqHeaders req)
, Hack2.hackInput = Hack2.HackEnumerator $ toEnumerator $ (\(Body x) -> x) (rqBody req)
, Hack2.hackHeaders = [("RemoteHost", S.pack $ fst $ rqPeer req)]
}
escape = escapeURIString isAllowedInURI
convertRequestMethod Happstack.OPTIONS = Hack2.OPTIONS
convertRequestMethod Happstack.GET = Hack2.GET
convertRequestMethod Happstack.HEAD = Hack2.HEAD
convertRequestMethod Happstack.POST = Hack2.POST
convertRequestMethod Happstack.PUT = Hack2.PUT
convertRequestMethod Happstack.DELETE = Hack2.DELETE
convertRequestMethod Happstack.TRACE = Hack2.TRACE
convertRequestMethod Happstack.CONNECT = Hack2.CONNECT
headersToHttp :: Headers -> [(S.ByteString, S.ByteString)]
headersToHttp = M.toList >>> map snd >>> map headerToPair
where
headerToPair (HeaderPair k v) =
(S.pack $ normalizeHeader $ S.unpack k, S.intercalate " " v)
hackRToServerPartR :: Hack2.Response -> IO Happstack.Response
hackRToServerPartR r = do
body_bytestring <- fromEnumerator $ Hack2.unHackEnumerator $ Hack2.body r
return $
Happstack.Response
{ rsCode = Hack2.status r
, rsHeaders = httpToHeaders $ Hack2.headers r
, rsFlags = RsFlags {rsfContentLength = False}
, rsBody = body_bytestring
, rsValidator = Nothing
}
l2s :: L.ByteString -> S.ByteString
l2s = S.concat . L.toChunks
s2l :: S.ByteString -> L.ByteString
s2l = L.fromChunks . return
httpToHeaders :: [(S.ByteString, S.ByteString)] -> Headers
httpToHeaders = map pairToHeader >>> M.fromList
where
pairToHeader (k,v) =
((S.pack $ map toLower $ S.unpack k), HeaderPair (k) [v])
-- happstack converts all request header to lowercase ...
-- so we need to convert it back ...
normalizeHeader :: String -> String
normalizeHeader s = fromMaybe s $ find (map toLower >>> (== s) ) headerList
headerList :: [String]
headerList =
[ "Cache-Control"
, "Connection"
, "Date"
, "Pragma"
, "Transfer-Encoding"
, "Upgrade"
, "Via"
, "Accept"
, "Accept-Charset"
, "Accept-Encoding"
, "Accept-Language"
, "Authorization"
, "Cookie"
, "Expect"
, "From"
, "Host"
, "If-Modified-Since"
, "If-Match"
, "If-None-Match"
, "If-Range"
, "If-Unmodified-Since"
, "Max-Forwards"
, "Proxy-Authorization"
, "Range"
, "Referer"
, "User-Agent"
, "Age"
, "Location"
, "Proxy-Authenticate"
, "Public"
, "Retry-After"
, "Server"
, "Set-Cookie"
, "TE"
, "Trailer"
, "Vary"
, "Warning"
, "WWW-Authenticate"
, "Allow"
, "Content-Base"
, "Content-Encoding"
, "Content-Language"
, "Content-Length"
, "Content-Location"
, "Content-MD5"
, "Content-Range"
, "Content-Type"
, "ETag"
, "Expires"
, "Last-Modified"
, "Content-Transfer-Encodeing"
]
|
nfjinjing/hack2-handler-happstack-server
|
src/Hack2/Handler/HappstackServer.hs
|
bsd-3-clause
| 5,783
| 0
| 14
| 1,662
| 1,356
| 774
| 582
| 132
| 8
|
import Control.Concurrent
import System.IO
import Network
main :: IO ()
main = do
soc <- listenOn $ PortNumber 54492
(h, _, _) <- accept soc
hPutStrLn h "Good-bye!"
threadDelay 1000000
hClose h
|
YoshikuniJujo/xmpipe
|
test/exSever.hs
|
bsd-3-clause
| 200
| 0
| 9
| 38
| 82
| 39
| 43
| 10
| 1
|
module Day4
( parseRoom
, parseRooms
, Room(..)
, genCksum
, getValidRooms
, getSectorSum
, decryptChar
, decryptRoom
) where
import Text.Regex.Posix
import Data.Map as Map
import Data.List as List
import Data.Ord
import Data.Char as Char
data Room = Room { name :: String, sector :: Integer, cksum :: String } deriving (Eq, Show)
parseRoom :: String -> Room
parseRoom roomstr = let (_,_,_,(n:s:ck:_)) = (roomstr =~ "^([a-z-]+)-([0-9]+).([a-z]+).$" :: (String,String,String,[String]))
in Room n (read s) ck
parseRooms :: [String] -> [Room]
parseRooms = List.map parseRoom
sortTuples :: (Char,Integer) -> (Char,Integer) -> Ordering
sortTuples (c1,f1) (c2,f2) =
case compare f1 f2 of
EQ -> compare c1 c2
LT -> GT
GT -> LT
genCksum :: String -> String
genCksum s =
let freqmap = List.foldr (\c fs -> Map.insertWith (+) c 1 fs) Map.empty s
topfive = List.take 5 $ List.sortBy sortTuples $ Map.toList freqmap
in List.map (\(char,_) -> char) topfive
getValidRooms :: [Room] -> [Room]
getValidRooms = List.filter (\r -> genCksum (List.filter (/='-') (name r)) == cksum r)
getSectorSum :: [Room] -> Integer
getSectorSum = List.sum . List.map (sector) . getValidRooms
decryptChar :: Integer -> Char -> Char
decryptChar i c =
case c of
'-' -> ' '
_ -> let cidx = Char.ord c - Char.ord 'a'
sftidx = (cidx + fromIntegral i) `mod` 26
sftalign = sftidx + Char.ord 'a'
in Char.chr sftalign
decryptRoom :: Room -> Room
decryptRoom r = let decname = List.map (decryptChar (sector r)) $ name r
in Room decname (sector r) (cksum r)
|
reidwilbur/aoc2016
|
src/Day4.hs
|
bsd-3-clause
| 1,651
| 0
| 15
| 389
| 686
| 373
| 313
| 46
| 3
|
---------------------------------------------------------------
-- |
-- Module : Data.Minecraft.Release194.Version
-- Copyright : (c) 2016 Michael Carpenter
-- License : BSD3
-- Maintainer : Michael Carpenter <oldmanmike.dev@gmail.com>
-- Stability : experimental
-- Portability : portable
--
---------------------------------------------------------------
module Data.Minecraft.Release194.Version
( version
, minecraftVersion
, majorVersion
) where
version :: Int
version = 110
minecraftVersion :: String
minecraftVersion = "1.9.4"
majorVersion :: String
majorVersion = "1.9"
|
oldmanmike/hs-minecraft-protocol
|
src/Data/Minecraft/Release194/Version.hs
|
bsd-3-clause
| 616
| 0
| 4
| 101
| 59
| 41
| 18
| 10
| 1
|
import Test.Hspec
import TLV
main :: IO ()
main = hspec $ do
describe "parseTLVs" $ do
it "gracefully handles no bytes" $ do
parseTLVs [] `shouldBe` []
describe "a single TLV" $ do
describe "by tag" $ do
it "can parse a 1-byte long tag" $ do
let input = [0x5A, 0x01, 0x01]
expected = [([0x5A], [0x01])]
parseTLVs input `shouldBe` expected
it "can parse a 2-byte long tag" $ do
let input = [0x9F, 0x34, 0x01, 0x01]
expected = [([0x9F, 0x34], [0x01])]
parseTLVs input `shouldBe` expected
it "can parse a 3-byte long tag" $ do
let input = [0x9F, 0x9F, 0x34, 0x01, 0x01]
expected = [([0x9F, 0x9F, 0x34], [0x01])]
parseTLVs input `shouldBe` expected
describe "by length" $ do
it "can parse a length value of 0" $ do
let input = [0x5A, 0x00]
expected = [([0x5A], [])]
parseTLVs input `shouldBe` expected
it "can parse a 2-byte length" $ do
let input = [0x5A, 0x81, 0x01, 0x01]
expected = [([0x5A], [0x01])]
parseTLVs input `shouldBe` expected
it "can parse a 3-byte length" $ do
let input = [0x5A, 0x82, 0x00, 0x01, 0x01]
expected = [([0x5A], [0x01])]
parseTLVs input `shouldBe` expected
it "can parse a multi-byte value" $ do
let input = [0x5A, 0x06, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06]
expected = [([0x5A], [0x01, 0x02, 0x03, 0x04, 0x05, 0x06])]
parseTLVs input `shouldBe` expected
it "can parse multiple concatenated TLVs" $ do
let input = [0x5A, 0x01, 0x01, 0x5B, 0x01, 0x01]
expected = [([0x5A], [0x01]), ([0x5B], [0x01])]
parseTLVs input `shouldBe` expected
|
wgyn/tlv-parser
|
test/Spec.hs
|
bsd-3-clause
| 1,808
| 0
| 25
| 598
| 651
| 358
| 293
| 42
| 1
|
{-# LINE 1 "GHC.Err.hs" #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, MagicHash, ImplicitParams #-}
{-# LANGUAGE RankNTypes, TypeInType #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Err
-- Copyright : (c) The University of Glasgow, 1994-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- The "GHC.Err" module defines the code for the wired-in error functions,
-- which have a special type in the compiler (with \"open tyvars\").
--
-- We cannot define these functions in a module where they might be used
-- (e.g., "GHC.Base"), because the magical wired-in type will get confused
-- with what the typechecker figures out.
--
-----------------------------------------------------------------------------
module GHC.Err( absentErr, error, errorWithoutStackTrace, undefined ) where
import GHC.CString ()
import GHC.Types (Char, RuntimeRep)
import GHC.Stack.Types
import GHC.Prim
import GHC.Integer () -- Make sure Integer is compiled first
-- because GHC depends on it in a wired-in way
-- so the build system doesn't see the dependency
import {-# SOURCE #-} GHC.Exception( errorCallWithCallStackException )
-- | 'error' stops execution and displays an error message.
error :: forall (r :: RuntimeRep). forall (a :: TYPE r).
HasCallStack => [Char] -> a
error s = raise# (errorCallWithCallStackException s ?callStack)
-- Bleh, we should be using 'GHC.Stack.callStack' instead of
-- '?callStack' here, but 'GHC.Stack.callStack' depends on
-- 'GHC.Stack.popCallStack', which is partial and depends on
-- 'error'.. Do as I say, not as I do.
-- | A variant of 'error' that does not produce a stack trace.
--
-- @since 4.9.0.0
errorWithoutStackTrace :: forall (r :: RuntimeRep). forall (a :: TYPE r).
[Char] -> a
errorWithoutStackTrace s =
-- we don't have withFrozenCallStack yet, so we just inline the definition
let ?callStack = freezeCallStack emptyCallStack
in error s
-- Note [Errors in base]
-- ~~~~~~~~~~~~~~~~~~~~~
-- As of base-4.9.0.0, `error` produces a stack trace alongside the
-- error message using the HasCallStack machinery. This provides
-- a partial stack trace, containing the call-site of each function
-- with a HasCallStack constraint.
--
-- In base, however, the only functions that have such constraints are
-- error and undefined, so the stack traces from partial functions in
-- base will never contain a call-site in user code. Instead we'll
-- usually just get the actual call to error. Base functions already
-- have a good habit of providing detailed error messages, including the
-- name of the offending partial function, so the partial stack-trace
-- does not provide any extra information, just noise. Thus, we export
-- the callstack-aware error, but within base we use the
-- errorWithoutStackTrace variant for more hygienic error messages.
-- | A special case of 'error'.
-- It is expected that compilers will recognize this and insert error
-- messages which are more appropriate to the context in which 'undefined'
-- appears.
undefined :: forall (r :: RuntimeRep). forall (a :: TYPE r).
HasCallStack => a
undefined = error "Prelude.undefined"
-- | Used for compiler-generated error message;
-- encoding saves bytes of string junk.
absentErr :: a
absentErr = errorWithoutStackTrace "Oops! The program has entered an `absent' argument!\n"
|
phischu/fragnix
|
builtins/base/GHC.Err.hs
|
bsd-3-clause
| 3,659
| 0
| 9
| 694
| 307
| 199
| 108
| -1
| -1
|
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
module Web.Slack.Types.Team where
import Data.Text (Text)
import Web.Slack.Types.Id
import Web.Slack.Types.TeamPreferences
import Web.Slack.Types.Base
import Control.Lens.TH
import Data.Aeson
import Control.Applicative
import Prelude
data Team = Team
{ _teamId :: TeamId
, _teamName :: Text
, _teamEmailDomain :: Text
, _teamDomain :: Text
, _teamPreferences :: TeamPreferences
, _teamIcons :: TeamIcons
, _teamOverStorageLimit :: Bool
} deriving Show
data TeamIcons = TeamIcons
{ _teamIcon34 :: URL
, _teamIcon44 :: URL
, _teamIcon68 :: URL
, _teamIcon88 :: URL
, _teamIcon102 :: URL
, _teamIcon132 :: URL
, _teamIconDefault :: Maybe Bool
} deriving Show
makeLenses ''Team
makeLenses ''TeamIcons
instance FromJSON Team where
parseJSON = withObject "team"
(\o -> Team <$> o .: "id" <*> o .: "name"
<*> o .: "email_domain" <*> o .: "domain"
<*> o .: "prefs"
<*> o .: "icon" <*> o .: "over_storage_limit")
instance FromJSON TeamIcons where
parseJSON = withObject "teamIcons"
(\o -> TeamIcons
<$> o .: "image_34"
<*> o .: "image_44"
<*> o .: "image_68"
<*> o .: "image_88"
<*> o .: "image_102"
<*> o .: "image_132"
<*> o .:? "image_default")
|
madjar/slack-api
|
src/Web/Slack/Types/Team.hs
|
mit
| 1,749
| 0
| 22
| 736
| 350
| 201
| 149
| 46
| 0
|
import Control.Monad (MonadPlus(..))
shortcircuitLeft f =
{-- snippet shortcircuitLeft --}
mzero >>= f == mzero
{-- /snippet shortcircuitLeft --}
shortcircuitRight v =
{-- snippet shortcircuitRight --}
v >> mzero == mzero
{-- /snippet shortcircuitRight --}
{-- snippet guard --}
guard :: (MonadPlus m) => Bool -> m ()
guard True = return ()
guard False = mzero
{-- /snippet guard --}
{-- snippet zeroMod --}
x `zeroMod` n = guard ((x `mod` n) == 0) >> return x
{-- /snippet zeroMod --}
|
binesiyu/ifl
|
examples/ch15/MonadPlus.hs
|
mit
| 516
| 0
| 10
| 112
| 141
| 77
| 64
| 9
| 1
|
{- |
Module : $Id: DataP.hs 14719 2011-03-16 13:29:01Z maeder $
Copyright : (c) DFKI GmbH
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
Adaptation and extension of a parser for data definitions given in
appendix of G. Huttons's paper - Monadic Parser Combinators.
-}
module DataP (Statement (..), Data (..), Type (..), Body (..),
Name, Var, Class, Constructor,
datadecl, newtypedecl)
where
import ParseLib2
import Data.Char
data Statement = DataStmt | NewTypeStmt deriving (Eq, Show)
data Data = D { name :: Name, -- type name
constraints :: [(Class, Var)],
vars :: [Var], -- Parameters
body :: [Body],
derives :: [Class], -- derived classes
statement :: Statement}
| Directive
| TypeName Name
deriving (Eq, Show)
data Body = Body { constructor :: Constructor,
labels :: [Name],
types :: [Type]} deriving (Eq, Show)
type Name = String
type Var = String
type Class = String
type Constructor = String
-- --------------------------------------------------------------------------
extContext :: Parser [()]
extContext = do
symbol "forall"
many1 variable
char '.'
junk
constructorP
many variable
symbol "=>"
return []
datadecl :: Parser Data
datadecl = do
symbol "data"
cons <- opt constraint
x <- constructorP
xs <- many variable
symbol "="
opt extContext
b <- (infixdecl +++ conrecdecl) `sepby1` symbol "|"
d <- opt deriveP
return $ D x cons xs b d DataStmt
newtypedecl :: Parser Data
newtypedecl = do
symbol "newtype"
cons <- opt constraint
x <- constructorP
xs <- many variable
symbol "="
b <- conrecdecl
d <- opt deriveP
return $ D x cons xs [b] d NewTypeStmt
-- -------------------------------------------------------------------------
isSign :: Char -> Bool
isSign x = not (isAlpha x || isSpace x || elem x "\"|[](){}")
constructorP :: Parser String
constructorP = token $ do
x <- upper
xs <- many alphanum
return (x : xs)
+++ do
char '('
junk
char ':'
y <- many1 $ sat isSign
junk
char ')'
return ("(:" ++ y ++ ")")
infixconstr :: Parser String
infixconstr = token $ do
x <- char ':'
y <- many1 $ sat isSign
return (x : y)
variable :: Parser String
variable = identifier [ "data", "deriving", "newtype", "type", "forall",
"instance", "class", "module", "import",
"infixl", "infix", "infixr", "default"]
conrecdecl :: Parser Body
conrecdecl = do
x <- constructorP
(ls, ts) <- record +++ fmap (\ a -> ([], a)) (many type2)
return $ Body x ls ts
infixdecl :: Parser Body
infixdecl = do
t1 <- type2
x <- infixconstr
ts <- many1 type2
return $ Body ("(" ++ x ++ ")") [] (t1 : ts)
record :: Parser ([String], [Type])
record = do
symbol "{"
(ls, ts) <- fmap unzip $ rectype `sepby1` symbol ","
symbol "}"
return (ls, ts)
constraint :: Parser [(String, String)]
constraint = do
x <- constrs
symbol "=>"
return x
where
constrs = fmap (: []) one +++
bracket (symbol "(") (one `sepby` symbol ",") (symbol ")")
one = do
c <- constructorP
v <- variable
return (c, v)
deriveP :: Parser [String]
deriveP = do
symbol "deriving"
one +++ more
where
one = fmap (: []) constructorP -- well, it has the same form
more = bracket (symbol "(")
(constructorP `sepby` symbol ",")
(symbol ")")
-- -------------------------------------------------------------------------
data Type = Arrow Type Type -- fn
| LApply Type [Type] -- proper application
| Var String -- variable
| Con String -- constructor
| Tuple [Type] -- tuple
| List Type -- list
deriving (Eq, Show)
type0 :: Parser Type
type0 = type1 `chainr1` fmap (const Arrow) (symbol "->")
type1 :: Parser Type
type1 = do
c <- con
as <- many1 type2
return (LApply c as)
+++ type2
type2 :: Parser Type
type2 = char '!' +++ return '!' >> var +++ con +++ list +++ tuple
var :: Parser Type
var = fmap Var variable
con :: Parser Type
con = fmap Con constructorP
list :: Parser Type
list = fmap List $ bracket (symbol "[")
type0
(symbol "]")
tuple :: Parser Type
tuple = fmap f $ bracket (symbol "(")
(type0 `sepby` symbol ",")
(symbol ")")
where f [t] = t
f ts = Tuple ts
-- record entry
rectype :: Parser (String, Type)
rectype = do
s <- variable
symbol "::"
opt $ symbol "!"
t <- type0
return (s, t)
|
nevrenato/HetsAlloy
|
utils/DrIFT-src/DataP.hs
|
gpl-2.0
| 5,136
| 0
| 13
| 1,739
| 1,592
| 818
| 774
| 151
| 2
|
module Text.RDF.RDF4H.TurtleParser_ConformanceTest where
-- Testing imports
import Test.Framework.Providers.API
import Test.Framework.Providers.HUnit
import qualified Test.HUnit as TU
-- Import common libraries to facilitate tests
import Control.Monad (liftM)
import Data.RDF.GraphTestUtils
import Data.RDF.Query
import Data.RDF.TriplesGraph
import Data.RDF.Types
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Text.Printf
import Text.RDF.RDF4H.TurtleParser
tests :: [Test]
tests = [ testGroup "TurtleParser" allCTests ]
-- A list of other tests to run, each entry of which is (directory, fname_without_ext).
otherTestFiles :: [(String, String)]
otherTestFiles = [("data/ttl", "example1"),
("data/ttl", "example2"),
("data/ttl", "example3"),
("data/ttl", "example5"),
("data/ttl", "example6"),
-- ("data/ttl", "example7"), -- rdf4h URIs support RFC3986, not unicode IRIs in RFC3987
("data/ttl", "example8"),
("data/ttl", "fawlty1")
]
-- The Base URI to be used for all conformance tests:
testBaseUri :: String
testBaseUri = "http://www.w3.org/2001/sw/DataAccess/df1/tests/"
mtestBaseUri :: Maybe BaseUrl
mtestBaseUri = Just $ BaseUrl $ T.pack testBaseUri
fpath :: String -> Int -> String -> String
fpath name i ext = printf "data/ttl/conformance/%s-%02d.%s" name i ext :: String
allCTests :: [Test]
allCTests = ts1 ++ ts2 ++ ts3
where
ts1 = map (buildTest . checkGoodConformanceTest) [0..30]
ts2 = map (buildTest . checkBadConformanceTest) [0..14]
ts3 = map (buildTest . uncurry checkGoodOtherTest) otherTestFiles
checkGoodConformanceTest :: Int -> IO Test
checkGoodConformanceTest i =
do
expGr <- loadExpectedGraph "test" i
inGr <- loadInputGraph "test" i
doGoodConformanceTest expGr inGr (printf "test %d" i :: String)
checkGoodOtherTest :: String -> String -> IO Test
checkGoodOtherTest dir fname =
do
expGr <- loadExpectedGraph1 (printf "%s/%s.out" dir fname :: String)
inGr <- loadInputGraph1 dir fname
doGoodConformanceTest expGr inGr $ printf "test using file \"%s\"" fname
doGoodConformanceTest :: Either ParseFailure TriplesGraph ->
Either ParseFailure TriplesGraph ->
String -> IO Test
doGoodConformanceTest expGr inGr testname = do
let t1 = assertLoadSuccess (printf "expected (%s): " testname) expGr
t2 = assertLoadSuccess (printf " input (%s): " testname) inGr
t3 = assertEquivalent testname expGr inGr
return $ testGroup (printf "Conformance %s" testname) $ map (uncurry testCase) [("Loading expected graph data", t1), ("Loading input graph data", t2), ("Comparing graphs", t3)]
checkBadConformanceTest :: Int -> IO Test
checkBadConformanceTest i = do
let t = loadInputGraph "bad" i >>= assertLoadFailure (show i)
return $ testCase (printf "Loading test %d (negative)" i) t
-- Determines if graphs are equivalent, returning Nothing if so or else a diagnostic message.
-- First graph is expected graph, second graph is actual.
equivalent :: RDF rdf => Either ParseFailure rdf -> Either ParseFailure rdf -> Maybe String
equivalent (Left _) _ = Nothing
equivalent _ (Left _) = Nothing
equivalent (Right gr1) (Right gr2) = test $! zip gr1ts gr2ts
where
gr1ts = uordered $ triplesOf gr1
gr2ts = uordered $ triplesOf gr2
test [] = Nothing
test ((t1,t2):ts) =
case compareTriple t1 t2 of
Nothing -> test ts
err -> err
compareTriple t1 t2 =
if equalNodes s1 s2 && equalNodes p1 p2 && equalNodes o1 o2
then Nothing
else Just ("Expected:\n " ++ show t1 ++ "\nFound:\n " ++ show t2 ++ "\n")
where
(s1, p1, o1) = f t1
(s2, p2, o2) = f t2
f t = (subjectOf t, predicateOf t, objectOf t)
-- equalNodes (BNode fs1) (BNodeGen i) = T.reverse fs1 == T.pack ("_:genid" ++ show i)
equalNodes (BNode fs1) (BNodeGen i) = fs1 == T.pack ("_:genid" ++ show i)
equalNodes n1 n2 = n1 == n2
-- Returns a graph for a good ttl test that is intended to pass, and normalizes
-- triples into a format so that they can be compared with the expected output triples.
loadInputGraph :: String -> Int -> IO (Either ParseFailure TriplesGraph)
loadInputGraph name n =
TIO.readFile (fpath name n "ttl") >>=
return . parseString (TurtleParser mtestBaseUri (mkDocUrl testBaseUri name n)) >>= return . handleLoad
loadInputGraph1 :: String -> String -> IO (Either ParseFailure TriplesGraph)
loadInputGraph1 dir fname =
TIO.readFile (printf "%s/%s.ttl" dir fname :: String) >>=
return . parseString (TurtleParser mtestBaseUri (mkDocUrl1 testBaseUri fname)) >>= return . handleLoad
handleLoad :: Either ParseFailure TriplesGraph -> Either ParseFailure TriplesGraph
handleLoad res =
case res of
l@(Left _) -> l
(Right gr) -> Right $ mkRdf (map normalize (triplesOf gr)) (baseUrl gr) (prefixMappings gr)
normalize :: Triple -> Triple
normalize t = let s' = normalizeN $ subjectOf t
p' = normalizeN $ predicateOf t
o' = normalizeN $ objectOf t
in triple s' p' o'
normalizeN :: Node -> Node
normalizeN (BNodeGen i) = BNode (T.pack $ "_:genid" ++ show i)
normalizeN n = n
loadExpectedGraph :: String -> Int -> IO (Either ParseFailure TriplesGraph)
loadExpectedGraph name n = loadExpectedGraph1 (fpath name n "out")
loadExpectedGraph1 :: String -> IO (Either ParseFailure TriplesGraph)
loadExpectedGraph1 fname =
liftM (parseString (TurtleParser mtestBaseUri (mkDocUrl1 testBaseUri fname))) (TIO.readFile fname)
assertLoadSuccess, assertLoadFailure :: String -> Either ParseFailure TriplesGraph -> TU.Assertion
assertLoadSuccess idStr (Left (ParseFailure err)) = TU.assertFailure $ idStr ++ err
assertLoadSuccess _ (Right _) = return ()
assertLoadFailure _ (Left _) = return ()
assertLoadFailure idStr _ = TU.assertFailure $ "Bad test " ++ idStr ++ " loaded successfully."
assertEquivalent :: RDF rdf => String -> Either ParseFailure rdf -> Either ParseFailure rdf -> TU.Assertion
assertEquivalent testname r1 r2 =
case equiv of
Nothing -> TU.assert True
(Just msg) -> fail $ "Graph " ++ testname ++ " not equivalent to expected:\n" ++ msg
where equiv = equivalent r1 r2
mkDocUrl :: String -> String -> Int -> Maybe T.Text
mkDocUrl baseDocUrl fname testNum = Just $ T.pack $ printf "%s%s-%02d.ttl" baseDocUrl fname testNum
mkDocUrl1 :: String -> String -> Maybe T.Text
mkDocUrl1 baseDocUrl fname = Just $ T.pack $ printf "%s%s.ttl" baseDocUrl fname
|
cordawyn/rdf4h
|
testsuite/tests/Text/RDF/RDF4H/TurtleParser_ConformanceTest.hs
|
bsd-3-clause
| 6,751
| 0
| 14
| 1,521
| 1,914
| 985
| 929
| 119
| 5
|
-- | <http://library.gnome.org/devel/glib/stable/glib-utilities.html>
module Bindings.GLib.Utilities (
module Bindings.GLib.Utilities.CommandlineOptionParser,
module Bindings.GLib.Utilities.DateAndTimeFunctions,
module Bindings.GLib.Utilities.HostnameUtilities,
module Bindings.GLib.Utilities.SimpleXmlSubsetParser,
module Bindings.GLib.Utilities.UnicodeManipulation,
) where
import Bindings.GLib.Utilities.CommandlineOptionParser
import Bindings.GLib.Utilities.DateAndTimeFunctions
import Bindings.GLib.Utilities.HostnameUtilities
import Bindings.GLib.Utilities.SimpleXmlSubsetParser
import Bindings.GLib.Utilities.UnicodeManipulation
|
magthe/bindings-dsl
|
bindings-glib/src/Bindings/GLib/Utilities.hs
|
bsd-3-clause
| 647
| 0
| 5
| 39
| 86
| 63
| 23
| 11
| 0
|
module Lambda.Tree
( toTree, display, peng )
where
import Lambda.Data
import Tree
import Autolib.Dot.Dotty
instance ToTree Lambda where
toTree t = case t of
Variable v -> Node ( show v ) []
Apply fun arg -> Node "@" [ toTree fun, toTree arg ]
Abstract var body -> Node ( "\\" ++ show var ) [ toTree body ]
|
Erdwolf/autotool-bonn
|
src/Lambda/Tree.hs
|
gpl-2.0
| 341
| 0
| 12
| 95
| 133
| 68
| 65
| 10
| 0
|
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.IO.Buffer (module M) where
import "base" GHC.IO.Buffer as M
|
xwysp/codeworld
|
codeworld-base/src/GHC/IO/Buffer.hs
|
apache-2.0
| 741
| 0
| 4
| 136
| 25
| 19
| 6
| 4
| 0
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Context Alert Filters | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/alertFilters/resources/help_fa_IR/helpset_fa_IR.hs
|
apache-2.0
| 983
| 80
| 66
| 161
| 417
| 211
| 206
| -1
| -1
|
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Control.Monad.Instances (module M) where
import "base" Control.Monad.Instances as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/Control/Monad/Instances.hs
|
apache-2.0
| 761
| 0
| 4
| 136
| 25
| 19
| 6
| 4
| 0
|
{-|
Module : Language.Qux.Lexer
Description : A Parsec lexer for the Qux language.
Copyright : (c) Henry J. Wylde, 2015
License : BSD3
Maintainer : hjwylde@gmail.com
A "Text.Parsec" lexer for the Qux language.
-}
{-# OPTIONS_HADDOCK hide, prune #-}
module Language.Qux.Lexer where
import Control.Monad
import Text.Parsec
import qualified Text.Parsec.Token as Token
lexer :: Monad m => Token.GenTokenParser String u m
lexer = Token.makeTokenParser quxDef
quxDef :: Monad m => Token.GenLanguageDef String u m
quxDef = Token.LanguageDef commentStart commentEnd commentLine nestedComments identStart identLetter
opStart opLetter reservedNames reservedOpNames caseSensitive
where
commentStart = "/*"
commentEnd = "*/"
commentLine = "#"
nestedComments = False
identStart = letter <|> char '_'
identLetter = alphaNum <|> oneOf ['_', '\'']
opStart = oneOf []
opLetter = oneOf []
reservedNames = keywords
reservedOpNames = operators
caseSensitive = True
keywords :: [String]
keywords =
[ "_", "()"
, "external", "import", "module", "type"
, "else", "if", "return", "while"
, "false", "true"
, "Any", "Bool", "Int", "Str"
]
operators :: [String]
operators =
[ "*", "/", "%"
, "+", "-"
, "<", "<=", ">", ">="
, "==", "!="
]
identifier :: Monad m => ParsecT String u m String
identifier = lookAhead (lower <|> char '_') *> Token.identifier lexer
typeIdentifier :: Monad m => ParsecT String u m String
typeIdentifier = lookAhead upper *> Token.identifier lexer
string :: Monad m => ParsecT String u m String
string = Token.stringLiteral lexer
natural :: Monad m => ParsecT String u m Integer
natural = Token.natural lexer
operator :: Monad m => String -> ParsecT String u m ()
operator = Token.reservedOp lexer
keyword :: Monad m => String -> ParsecT String u m ()
keyword = Token.reserved lexer
separator :: Monad m => String -> ParsecT String u m String
separator = Token.symbol lexer
separator_ :: Monad m => String -> ParsecT String u m ()
separator_ = void . separator
colon :: Monad m => ParsecT String u m ()
colon = separator_ ":"
comma :: Monad m => ParsecT String u m ()
comma = separator_ ","
dcolon :: Monad m => ParsecT String u m ()
dcolon = separator_ "::"
dot :: Monad m => ParsecT String u m ()
dot = separator_ "."
parens :: Monad m => ParsecT String u m a -> ParsecT String u m a
parens = Token.parens lexer
rarrow :: Monad m => ParsecT String u m ()
rarrow = separator_ "->"
whiteSpace :: Monad m => ParsecT String u m ()
whiteSpace = Token.whiteSpace lexer
|
qux-lang/language-qux
|
src/Language/Qux/Lexer.hs
|
bsd-3-clause
| 2,741
| 0
| 9
| 688
| 829
| 432
| 397
| 64
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
-- | Generate HPC (Haskell Program Coverage) reports
module Stack.Coverage
( deleteHpcReports
, updateTixFile
, generateHpcReport
, HpcReportOpts(..)
, generateHpcReportForTargets
, generateHpcUnifiedReport
, generateHpcMarkupIndex
) where
import Stack.Prelude
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as BL
import Data.List
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as LT
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import Stack.Build.Target
import Stack.Config (getLocalPackages)
import Stack.Constants.Config
import Stack.Package
import Stack.PrettyPrint
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.NamedComponent
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Runner
import Stack.Types.Version
import System.FilePath (isPathSeparator)
import RIO.Process
import Text.Hastache (htmlEscape)
import Trace.Hpc.Tix
import Web.Browser (openBrowser)
-- | Invoked at the beginning of running with "--coverage"
deleteHpcReports :: HasEnvConfig env => RIO env ()
deleteHpcReports = do
hpcDir <- hpcReportDir
liftIO $ ignoringAbsence (removeDirRecur hpcDir)
-- | Move a tix file into a sub-directory of the hpc report directory. Deletes the old one if one is
-- present.
updateTixFile :: HasEnvConfig env => PackageName -> Path Abs File -> String -> RIO env ()
updateTixFile pkgName tixSrc testName = do
exists <- doesFileExist tixSrc
when exists $ do
tixDest <- tixFilePath pkgName testName
liftIO $ ignoringAbsence (removeFile tixDest)
ensureDir (parent tixDest)
-- Remove exe modules because they are problematic. This could be revisited if there's a GHC
-- version that fixes https://ghc.haskell.org/trac/ghc/ticket/1853
mtix <- readTixOrLog tixSrc
case mtix of
Nothing -> logError $ "Failed to read " <> T.pack (toFilePath tixSrc)
Just tix -> do
liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix)
-- TODO: ideally we'd do a file move, but IIRC this can
-- have problems. Something about moving between drives
-- on windows?
copyFile tixSrc =<< parseAbsFile (toFilePath tixDest ++ ".premunging")
liftIO $ ignoringAbsence (removeFile tixSrc)
-- | Get the directory used for hpc reports for the given pkgId.
hpcPkgPath :: HasEnvConfig env => PackageName -> RIO env (Path Abs Dir)
hpcPkgPath pkgName = do
outputDir <- hpcReportDir
pkgNameRel <- parseRelDir (packageNameString pkgName)
return (outputDir </> pkgNameRel)
-- | Get the tix file location, given the name of the file (without extension), and the package
-- identifier string.
tixFilePath :: HasEnvConfig env
=> PackageName -> String -> RIO env (Path Abs File)
tixFilePath pkgName testName = do
pkgPath <- hpcPkgPath pkgName
tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix")
return (pkgPath </> tixRel)
-- | Generates the HTML coverage report and shows a textual coverage summary for a package.
generateHpcReport :: HasEnvConfig env
=> Path Abs Dir -> Package -> [Text] -> RIO env ()
generateHpcReport pkgDir package tests = do
compilerVersion <- view actualCompilerVersionL
-- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See
-- https://github.com/commercialhaskell/stack/issues/785
let pkgName = packageNameText (packageName package)
pkgId = packageIdentifierString (packageIdentifier package)
ghcVersion = getGhcVersion compilerVersion
hasLibrary =
case packageLibraries package of
NoLibraries -> False
HasLibraries _ -> True
eincludeName <-
-- Pre-7.8 uses plain PKG-version in tix files.
if ghcVersion < $(mkVersion "7.10") then return $ Right $ Just pkgId
-- We don't expect to find a package key if there is no library.
else if not hasLibrary then return $ Right Nothing
-- Look in the inplace DB for the package key.
-- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986
else do
-- GHC 8.0 uses package id instead of package key.
-- See https://github.com/commercialhaskell/stack/issues/2424
let hpcNameField = if ghcVersion >= $(mkVersion "8.0") then "id" else "key"
eincludeName <- findPackageFieldForBuiltPackage pkgDir (packageIdentifier package) hpcNameField
case eincludeName of
Left err -> do
logError err
return $ Left err
Right includeName -> return $ Right $ Just $ T.unpack includeName
forM_ tests $ \testName -> do
tixSrc <- tixFilePath (packageName package) (T.unpack testName)
let report = "coverage report for " <> pkgName <> "'s test-suite \"" <> testName <> "\""
reportDir = parent tixSrc
case eincludeName of
Left err -> generateHpcErrorReport reportDir (sanitize (T.unpack err))
-- Restrict to just the current library code, if there is a library in the package (see
-- #634 - this will likely be customizable in the future)
Right mincludeName -> do
let extraArgs = case mincludeName of
Just includeName -> ["--include", includeName ++ ":"]
Nothing -> []
mreportPath <- generateHpcReportInternal tixSrc reportDir report extraArgs extraArgs
forM_ mreportPath (displayReportPath report . display)
generateHpcReportInternal :: HasEnvConfig env
=> Path Abs File -> Path Abs Dir -> Text -> [String] -> [String]
-> RIO env (Maybe (Path Abs File))
generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArgs = do
-- If a .tix file exists, move it to the HPC output directory and generate a report for it.
tixFileExists <- doesFileExist tixSrc
if not tixFileExists
then do
logError $ T.concat
[ "Didn't find .tix for "
, report
, " - expected to find it at "
, T.pack (toFilePath tixSrc)
, "."
]
return Nothing
else (`catch` \err -> do
let msg = show (err :: ReadProcessException)
logError (T.pack msg)
generateHpcErrorReport reportDir $ sanitize msg
return Nothing) $
(`onException` logError ("Error occurred while producing " <> report)) $ do
-- Directories for .mix files.
hpcRelDir <- hpcRelativeDir
-- Compute arguments used for both "hpc markup" and "hpc report".
pkgDirs <- liftM (map lpvRoot . Map.elems . lpProject) getLocalPackages
let args =
-- Use index files from all packages (allows cross-package coverage results).
concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++
-- Look for index files in the correct dir (relative to each pkgdir).
["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"]
logInfo $ "Generating " <> report
outputLines <- liftM (map (S8.filter (/= '\r')) . S8.lines . BL.toStrict) $
withProc "hpc"
( "report"
: toFilePath tixSrc
: (args ++ extraReportArgs)
)
readProcessStdout_
if all ("(0/0)" `S8.isSuffixOf`) outputLines
then do
let msg html = T.concat
[ "Error: The "
, report
, " did not consider any code. One possible cause of this is"
, " if your test-suite builds the library code (see stack "
, if html then "<a href='https://github.com/commercialhaskell/stack/issues/1008'>" else ""
, "issue #1008"
, if html then "</a>" else ""
, "). It may also indicate a bug in stack or"
, " the hpc program. Please report this issue if you think"
, " your coverage report should have meaningful results."
]
logError (msg False)
generateHpcErrorReport reportDir (msg True)
return Nothing
else do
let reportPath = reportDir </> $(mkRelFile "hpc_index.html")
-- Print output, stripping @\r@ characters because Windows.
forM_ outputLines (logInfo . T.decodeUtf8)
-- Generate the markup.
void $ withProc "hpc"
( "markup"
: toFilePath tixSrc
: ("--destdir=" ++ toFilePathNoTrailingSep reportDir)
: (args ++ extraMarkupArgs)
)
readProcessStdout_
return (Just reportPath)
data HpcReportOpts = HpcReportOpts
{ hroptsInputs :: [Text]
, hroptsAll :: Bool
, hroptsDestDir :: Maybe String
, hroptsOpenBrowser :: Bool
} deriving (Show)
generateHpcReportForTargets :: HasEnvConfig env
=> HpcReportOpts -> RIO env ()
generateHpcReportForTargets opts = do
let (tixFiles, targetNames) = partition (".tix" `T.isSuffixOf`) (hroptsInputs opts)
targetTixFiles <-
-- When there aren't any package component arguments, and --all
-- isn't passed, default to not considering any targets.
if not (hroptsAll opts) && null targetNames
then return []
else do
when (hroptsAll opts && not (null targetNames)) $
logWarn $ "Since --all is used, it is redundant to specify these targets: " <> T.pack (show targetNames)
(_,_,targets) <- parseTargets
AllowNoTargets
defaultBuildOptsCLI
{ boptsCLITargets = if hroptsAll opts then [] else targetNames }
liftM concat $ forM (Map.toList targets) $ \(name, target) ->
case target of
TargetAll Dependency -> throwString $
"Error: Expected a local package, but " ++
packageNameString name ++
" is either an extra-dep or in the snapshot."
TargetComps comps -> do
pkgPath <- hpcPkgPath name
forM (toList comps) $ \nc ->
case nc of
CTest testName ->
liftM (pkgPath </>) $ parseRelFile (T.unpack testName ++ "/" ++ T.unpack testName ++ ".tix")
_ -> fail $
"Can't specify anything except test-suites as hpc report targets (" ++
packageNameString name ++
" is used with a non test-suite target)"
TargetAll ProjectPackage -> do
pkgPath <- hpcPkgPath name
exists <- doesDirExist pkgPath
if exists
then do
(dirs, _) <- listDir pkgPath
liftM concat $ forM dirs $ \dir -> do
(_, files) <- listDir dir
return (filter ((".tix" `isSuffixOf`) . toFilePath) files)
else return []
tixPaths <- liftM (\xs -> xs ++ targetTixFiles) $ mapM (resolveFile' . T.unpack) tixFiles
when (null tixPaths) $
throwString "Not generating combined report, because no targets or tix files are specified."
outputDir <- hpcReportDir
reportDir <- case hroptsDestDir opts of
Nothing -> return (outputDir </> $(mkRelDir "combined/custom"))
Just destDir -> do
dest <- resolveDir' destDir
ensureDir dest
return dest
let report = "combined report"
mreportPath <- generateUnionReport report reportDir tixPaths
forM_ mreportPath $ \reportPath ->
if hroptsOpenBrowser opts
then do
prettyInfo $ "Opening" <+> display reportPath <+> "in the browser."
void $ liftIO $ openBrowser (toFilePath reportPath)
else displayReportPath report (display reportPath)
generateHpcUnifiedReport :: HasEnvConfig env => RIO env ()
generateHpcUnifiedReport = do
outputDir <- hpcReportDir
ensureDir outputDir
(dirs, _) <- listDir outputDir
tixFiles0 <- liftM (concat . concat) $ forM (filter (("combined" /=) . dirnameString) dirs) $ \dir -> do
(dirs', _) <- listDir dir
forM dirs' $ \dir' -> do
(_, files) <- listDir dir'
return (filter ((".tix" `isSuffixOf`) . toFilePath) files)
extraTixFiles <- findExtraTixFiles
let tixFiles = tixFiles0 ++ extraTixFiles
reportDir = outputDir </> $(mkRelDir "combined/all")
if length tixFiles < 2
then logInfo $ T.concat
[ if null tixFiles then "No tix files" else "Only one tix file"
, " found in "
, T.pack (toFilePath outputDir)
, ", so not generating a unified coverage report."
]
else do
let report = "unified report"
mreportPath <- generateUnionReport report reportDir tixFiles
forM_ mreportPath (displayReportPath report . display)
generateUnionReport :: HasEnvConfig env
=> Text -> Path Abs Dir -> [Path Abs File]
-> RIO env (Maybe (Path Abs File))
generateUnionReport report reportDir tixFiles = do
(errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles)
logDebug $ "Using the following tix files: " <> T.pack (show tixFiles)
unless (null errs) $ logWarn $ T.concat $
"The following modules are left out of the " : report : " due to version mismatches: " :
intersperse ", " (map T.pack errs)
tixDest <- liftM (reportDir </>) $ parseRelFile (dirnameString reportDir ++ ".tix")
ensureDir (parent tixDest)
liftIO $ writeTix (toFilePath tixDest) tix
generateHpcReportInternal tixDest reportDir report [] []
readTixOrLog :: HasLogFunc env => Path b File -> RIO env (Maybe Tix)
readTixOrLog path = do
mtix <- liftIO (readTix (toFilePath path)) `catchAny` \errorCall -> do
logError $ "Error while reading tix: " <> T.pack (show errorCall)
return Nothing
when (isNothing mtix) $
logError $ "Failed to read tix file " <> T.pack (toFilePath path)
return mtix
-- | Module names which contain '/' have a package name, and so they weren't built into the
-- executable.
removeExeModules :: Tix -> Tix
removeExeModules (Tix ms) = Tix (filter (\(TixModule name _ _ _) -> '/' `elem` name) ms)
unionTixes :: [Tix] -> ([String], Tix)
unionTixes tixes = (Map.keys errs, Tix (Map.elems outputs))
where
(errs, outputs) = Map.mapEither id $ Map.unionsWith merge $ map toMap tixes
toMap (Tix ms) = Map.fromList (map (\x@(TixModule k _ _ _) -> (k, Right x)) ms)
merge (Right (TixModule k hash1 len1 tix1))
(Right (TixModule _ hash2 len2 tix2))
| hash1 == hash2 && len1 == len2 = Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2))
merge _ _ = Left ()
generateHpcMarkupIndex :: HasEnvConfig env => RIO env ()
generateHpcMarkupIndex = do
outputDir <- hpcReportDir
let outputFile = outputDir </> $(mkRelFile "index.html")
ensureDir outputDir
(dirs, _) <- listDir outputDir
rows <- liftM (catMaybes . concat) $ forM dirs $ \dir -> do
(subdirs, _) <- listDir dir
forM subdirs $ \subdir -> do
let indexPath = subdir </> $(mkRelFile "hpc_index.html")
exists' <- doesFileExist indexPath
if not exists' then return Nothing else do
relPath <- stripProperPrefix outputDir indexPath
let package = dirname dir
testsuite = dirname subdir
return $ Just $ T.concat
[ "<tr><td>"
, pathToHtml package
, "</td><td><a href=\""
, pathToHtml relPath
, "\">"
, pathToHtml testsuite
, "</a></td></tr>"
]
liftIO $ T.writeFile (toFilePath outputFile) $ T.concat $
[ "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">"
-- Part of the css from HPC's output HTML
, "<style type=\"text/css\">"
, "table.dashboard { border-collapse: collapse; border: solid 1px black }"
, ".dashboard td { border: solid 1px black }"
, ".dashboard th { border: solid 1px black }"
, "</style>"
, "</head>"
, "<body>"
] ++
(if null rows
then
[ "<b>No hpc_index.html files found in \""
, pathToHtml outputDir
, "\".</b>"
]
else
[ "<table class=\"dashboard\" width=\"100%\" boder=\"1\"><tbody>"
, "<p><b>NOTE: This is merely a listing of the html files found in the coverage reports directory. Some of these reports may be old.</b></p>"
, "<tr><th>Package</th><th>TestSuite</th><th>Modification Time</th></tr>"
] ++
rows ++
["</tbody></table>"]) ++
["</body></html>"]
unless (null rows) $
logInfo $ "\nAn index of the generated HTML coverage reports is available at " <>
T.pack (toFilePath outputFile)
generateHpcErrorReport :: MonadIO m => Path Abs Dir -> Text -> m ()
generateHpcErrorReport dir err = do
ensureDir dir
liftIO $ T.writeFile (toFilePath (dir </> $(mkRelFile "hpc_index.html"))) $ T.concat
[ "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\"></head><body>"
, "<h1>HPC Report Generation Error</h1>"
, "<p>"
, err
, "</p>"
, "</body></html>"
]
pathToHtml :: Path b t -> Text
pathToHtml = T.dropWhileEnd (=='/') . sanitize . toFilePath
sanitize :: String -> Text
sanitize = LT.toStrict . htmlEscape . LT.pack
dirnameString :: Path r Dir -> String
dirnameString = dropWhileEnd isPathSeparator . toFilePath . dirname
findPackageFieldForBuiltPackage
:: HasEnvConfig env
=> Path Abs Dir -> PackageIdentifier -> Text
-> RIO env (Either Text Text)
findPackageFieldForBuiltPackage pkgDir pkgId field = do
distDir <- distDirFromDir pkgDir
let inplaceDir = distDir </> $(mkRelDir "package.conf.inplace")
pkgIdStr = packageIdentifierString pkgId
notFoundErr = return $ Left $ "Failed to find package key for " <> T.pack pkgIdStr
extractField path = do
contents <- liftIO $ T.readFile (toFilePath path)
case asum (map (T.stripPrefix (field <> ": ")) (T.lines contents)) of
Just result -> return $ Right result
Nothing -> notFoundErr
cabalVer <- view cabalVersionL
if cabalVer < $(mkVersion "1.24")
then do
path <- liftM (inplaceDir </>) $ parseRelFile (pkgIdStr ++ "-inplace.conf")
logDebug $ "Parsing config in Cabal < 1.24 location: " <> T.pack (toFilePath path)
exists <- doesFileExist path
if exists then extractField path else notFoundErr
else do
-- With Cabal-1.24, it's in a different location.
logDebug $ "Scanning " <> T.pack (toFilePath inplaceDir) <> " for files matching " <> T.pack pkgIdStr
(_, files) <- handleIO (const $ return ([], [])) $ listDir inplaceDir
logDebug $ T.pack (show files)
case mapMaybe (\file -> fmap (const file) . (T.stripSuffix ".conf" <=< T.stripPrefix (T.pack (pkgIdStr ++ "-")))
. T.pack . toFilePath . filename $ file) files of
[] -> notFoundErr
[path] -> extractField path
_ -> return $ Left $ "Multiple files matching " <> T.pack (pkgIdStr ++ "-*.conf") <> " found in " <>
T.pack (toFilePath inplaceDir) <> ". Maybe try 'stack clean' on this package?"
displayReportPath :: (HasRunner env)
=> Text -> AnsiDoc -> RIO env ()
displayReportPath report reportPath =
prettyInfo $ "The" <+> fromString (T.unpack report) <+> "is available at" <+> reportPath
findExtraTixFiles :: HasEnvConfig env => RIO env [Path Abs File]
findExtraTixFiles = do
outputDir <- hpcReportDir
let dir = outputDir </> $(mkRelDir "extra-tix-files")
dirExists <- doesDirExist dir
if dirExists
then do
(_, files) <- listDir dir
return $ filter ((".tix" `isSuffixOf`) . toFilePath) files
else return []
|
anton-dessiatov/stack
|
src/Stack/Coverage.hs
|
bsd-3-clause
| 22,201
| 0
| 30
| 7,314
| 4,984
| 2,490
| 2,494
| 393
| 9
|
module Control.Gruppe
-- $Id$
( module Control.Gruppe.Typ
, module Control.Gruppe.DB
, module Control.Gruppe.CGI
)
where
import Control.Gruppe.Typ
import Control.Gruppe.DB
import Control.Gruppe.CGI
|
Erdwolf/autotool-bonn
|
trial/src/Control/Gruppe.hs
|
gpl-2.0
| 210
| 0
| 5
| 33
| 48
| 33
| 15
| 7
| 0
|
-- Both blocks are illegal Haskell 98, because of the un-saturated
-- type synonym, but (rather obscurely) at one point (GHC 6.3), we
-- accepted 'blah', but rejected 'blah1'
module ShouldFail where
data T = T
-- This was erroneously accepted
type Foo a = String -> Maybe a
type Bar m = m Int
blah = undefined :: Bar Foo
type Foo1 a = Maybe a
type Bar1 m = m Int
blah1 = undefined :: Bar1 Foo1
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_fail/tcfail129.hs
|
bsd-3-clause
| 401
| 0
| 6
| 86
| 81
| 49
| 32
| 8
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>ToDo-List</title>
<maps>
<homeID>todo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/todo/src/main/javahelp/help_pl_PL/helpset_pl_PL.hs
|
apache-2.0
| 955
| 77
| 67
| 155
| 408
| 207
| 201
| -1
| -1
|
module Pfe3Metrics where
import Data.Maybe(mapMaybe)
import PFE0(allModules,pput)
import PFE3(parseModule)
import HsDeclStruct(DI(..))
import HsModule(hsModDecls)
import DefinedNames(definedType)
import HasBaseStruct(basestruct)
import Statistics
import PfeParse
import MUtils
pfe3MetricsCmds =
[("classmetrics", (noArgs classMetrics,"number of instances per class metrics"))]
classMetrics =
do ms <- allModules
(classes,insts) <- apBoth concat . unzip # mapM getClassInstDecls ms
let cinstcnt = [(c,length [()|i<-insts,i==c])|c<-classes]
pput (ppStatistics "number of instances" "class" cinstcnt)
getClassInstDecls m = cls_insts . hsModDecls . snd # parseModule m
cls_insts ds = (mapMaybe className ds,concatMap instClass ds)
className d =
case basestruct d of
Just (HsClassDecl _ _ tp _ _) -> Just (definedType tp)
_ -> Nothing
instClass d =
case basestruct d of
Just (HsInstDecl _ _ _ tp _) -> [definedType tp]
Just (HsDataDecl _ _ _ _ cls) -> cls
Just (HsNewTypeDecl _ _ _ _ cls) -> cls
_ -> []
|
SAdams601/HaRe
|
old/tools/pfe/Pfe3Metrics.hs
|
bsd-3-clause
| 1,053
| 0
| 15
| 193
| 406
| 212
| 194
| -1
| -1
|
module WhereIn4 where
--A definition can be demoted to the local 'where' binding of a friend declaration,
--if it is only used by this friend declaration.
--Demoting a definition narrows down the scope of the definition.
--In this example, demote the top level 'sq' to 'sumSquares'
--In this case (there is single matches), if possible,
--the parameters will be folded after demoting and type sigature will be removed.
sumSquares x y = sq x + sq y
where p=2 {-There is a comment-}
sq z = z^p --there is a comment
anotherFun 0 y = sq y
where sq x = x^2
|
SAdams601/HaRe
|
old/testing/demote/WhereIn4_TokOut.hs
|
bsd-3-clause
| 599
| 0
| 7
| 149
| 78
| 43
| 35
| 6
| 1
|
{-# LANGUAGE GADTs, EmptyDataDecls, TypeFamilies, TypeOperators, DataKinds, FlexibleInstances #-}
{- Defines a C-like printf function using DataKinds extensions. -}
module T13659 where
import Data.Kind (Type)
-- format string parameterized by a list of types
data Format (fmt :: [Type]) where
X :: Format '[] -- empty string, i.e. ""
L :: a -> String -> Format '[] -- string literal, e.g. "hello"
S :: a -> Format '[String] -- "%s"
I :: Format a -> Format '[Int, a] -- "%d"
|
sdiehl/ghc
|
testsuite/tests/polykinds/T13659.hs
|
bsd-3-clause
| 514
| 0
| 10
| 120
| 107
| 63
| 44
| 8
| 0
|
module Main (main) where
import Language.Haskell.HLint (hlint)
import System.Exit (exitFailure, exitSuccess)
arguments :: [String]
arguments =
[ "benchmark"
, "executable"
, "library"
, "test-suite"
]
main :: IO ()
main = do
hints <- hlint arguments
if null hints then exitSuccess else exitFailure
|
danplubell/CTG1371
|
test-suite/HLint.hs
|
mit
| 329
| 0
| 8
| 74
| 97
| 56
| 41
| 13
| 2
|
module Main where
import Commands
import Lib
import Configuration
import Driver.Console
dummyScript :: Script ()
dummyScript = do
forward 5.5
clockwise 30.0
forward 3.2
main :: IO ()
main = run dummyScript
|
research-team/robot-dream
|
app/Main.hs
|
mit
| 222
| 0
| 7
| 48
| 69
| 35
| 34
| 12
| 1
|
main = readFile "018.txt" >>= ( print
. best
. map (map (read :: String -> Int) . words)
. lines
)
-- dynamic
best (x:[]) = x
best (x:xs) = zipWith (+) x $ zipWith max z ys
where z@(_:ys) = best xs
|
nickspinale/euler
|
complete/018.hs
|
mit
| 321
| 0
| 14
| 167
| 123
| 64
| 59
| 7
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-missing-fields #-}
-- | This module provides the tools for defining your database schema and using
-- it to generate Haskell data types and migrations.
module Database.Persist.TH
( -- * Parse entity defs
persistWith
, persistUpperCase
, persistLowerCase
, persistFileWith
, persistManyFileWith
-- * Turn @EntityDef@s into types
, mkPersist
, MkPersistSettings
, mpsBackend
, mpsGeneric
, mpsPrefixFields
, mpsEntityJSON
, mpsGenerateLenses
, EntityJSON(..)
, mkPersistSettings
, sqlSettings
-- * Various other TH functions
, mkMigrate
, mkSave
, mkDeleteCascade
, mkEntityDefList
, share
, derivePersistField
, derivePersistFieldJSON
, persistFieldFromEntity
-- * Internal
, lensPTH
, parseReferences
, embedEntityDefs
, AtLeastOneUniqueKey(..)
, OnlyOneUniqueKey(..)
) where
import Prelude hiding ((++), take, concat, splitAt, exp)
import Control.Monad (forM, unless, (<=<), mzero)
import Data.Aeson
( ToJSON (toJSON), FromJSON (parseJSON), (.=), object
, Value (Object), (.:), (.:?)
, eitherDecodeStrict'
)
import qualified Data.ByteString as BS
import Data.Char (toLower, toUpper)
import qualified Data.HashMap.Strict as HM
import Data.Int (Int64)
import Data.List (foldl')
import qualified Data.List.NonEmpty as NEL
import qualified Data.Map as M
import Data.Maybe (isJust, listToMaybe, mapMaybe, fromMaybe)
import Data.Monoid ((<>), mappend, mconcat)
import Data.Proxy (Proxy (Proxy))
import Data.Text (pack, Text, append, unpack, concat, uncons, cons, stripPrefix, stripSuffix)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.Encoding as TE
import GHC.Generics (Generic)
import GHC.TypeLits
import Language.Haskell.TH.Lib (conT, varE)
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Text.Read (readPrec, lexP, step, prec, parens, Lexeme(Ident))
import Web.PathPieces (PathPiece(..))
import Web.HttpApiData (ToHttpApiData(..), FromHttpApiData(..))
import Database.Persist
import Database.Persist.Sql (Migration, PersistFieldSql, SqlBackend, migrate, sqlType)
import Database.Persist.Quasi
-- | This special-cases "type_" and strips out its underscore. When
-- used for JSON serialization and deserialization, it works around
-- <https://github.com/yesodweb/persistent/issues/412>
unHaskellNameForJSON :: HaskellName -> Text
unHaskellNameForJSON = fixTypeUnderscore . unHaskellName
where
fixTypeUnderscore = \case
"type" -> "type_"
name -> name
-- | Converts a quasi-quoted syntax into a list of entity definitions, to be
-- used as input to the template haskell generation code (mkPersist).
persistWith :: PersistSettings -> QuasiQuoter
persistWith ps = QuasiQuoter
{ quoteExp = parseReferences ps . pack
}
-- | Apply 'persistWith' to 'upperCaseSettings'.
persistUpperCase :: QuasiQuoter
persistUpperCase = persistWith upperCaseSettings
-- | Apply 'persistWith' to 'lowerCaseSettings'.
persistLowerCase :: QuasiQuoter
persistLowerCase = persistWith lowerCaseSettings
-- | Same as 'persistWith', but uses an external file instead of a
-- quasiquotation. The recommended file extension is @.persistentmodels@.
persistFileWith :: PersistSettings -> FilePath -> Q Exp
persistFileWith ps fp = persistManyFileWith ps [fp]
-- | Same as 'persistFileWith', but uses several external files instead of
-- one. Splitting your Persistent definitions into multiple modules can
-- potentially dramatically speed up compile times.
--
-- The recommended file extension is @.persistentmodels@.
--
-- ==== __Examples__
--
-- Split your Persistent definitions into multiple files (@models1@, @models2@),
-- then create a new module for each new file and run 'mkPersist' there:
--
-- @
-- -- Model1.hs
-- 'share'
-- ['mkPersist' 'sqlSettings']
-- $('persistFileWith' 'lowerCaseSettings' "models1")
-- @
-- @
-- -- Model2.hs
-- 'share'
-- ['mkPersist' 'sqlSettings']
-- $('persistFileWith' 'lowerCaseSettings' "models2")
-- @
--
-- Use 'persistManyFileWith' to create your migrations:
--
-- @
-- -- Migrate.hs
-- 'share'
-- ['mkMigrate' "migrateAll"]
-- $('persistManyFileWith' 'lowerCaseSettings' ["models1.persistentmodels","models2.persistentmodels"])
-- @
--
-- Tip: To get the same import behavior as if you were declaring all your models in
-- one file, import your new files @as Name@ into another file, then export @module Name@.
--
-- This approach may be used in the future to reduce memory usage during compilation,
-- but so far we've only seen mild reductions.
--
-- See <https://github.com/yesodweb/persistent/issues/778 persistent#778> and
-- <https://github.com/yesodweb/persistent/pull/791 persistent#791> for more details.
--
-- @since 2.5.4
persistManyFileWith :: PersistSettings -> [FilePath] -> Q Exp
persistManyFileWith ps fps = do
mapM_ qAddDependentFile fps
ss <- mapM (qRunIO . getFileContents) fps
let s = T.intercalate "\n" ss -- be tolerant of the user forgetting to put a line-break at EOF.
parseReferences ps s
getFileContents :: FilePath -> IO Text
getFileContents = fmap decodeUtf8 . BS.readFile
-- | Takes a list of (potentially) independently defined entities and properly
-- links all foreign keys to reference the right 'EntityDef', tying the knot
-- between entities.
--
-- Allows users to define entities indepedently or in separate modules and then
-- fix the cross-references between them at runtime to create a 'Migration'.
--
-- @since 2.7.2
embedEntityDefs :: [EntityDef] -> [EntityDef]
embedEntityDefs = snd . embedEntityDefsMap
embedEntityDefsMap :: [EntityDef] -> (M.Map HaskellName EmbedEntityDef, [EntityDef])
embedEntityDefsMap rawEnts = (embedEntityMap, noCycleEnts)
where
noCycleEnts = map breakCycleEnt entsWithEmbeds
-- every EntityDef could reference each-other (as an EmbedRef)
-- let Haskell tie the knot
embedEntityMap = constructEmbedEntityMap entsWithEmbeds
entsWithEmbeds = map setEmbedEntity rawEnts
setEmbedEntity ent = ent
{ entityFields = map (setEmbedField (entityHaskell ent) embedEntityMap) $ entityFields ent
}
-- self references are already broken
-- look at every emFieldEmbed to see if it refers to an already seen HaskellName
-- so start with entityHaskell ent and accumulate embeddedHaskell em
breakCycleEnt entDef =
let entName = entityHaskell entDef
in entDef { entityFields = map (breakCycleField entName) $ entityFields entDef }
breakCycleField entName f = case f of
FieldDef { fieldReference = EmbedRef em } ->
f { fieldReference = EmbedRef $ breakCycleEmbed [entName] em }
_ ->
f
breakCycleEmbed ancestors em =
em { embeddedFields = breakCycleEmField (emName : ancestors) <$> embeddedFields em
}
where
emName = embeddedHaskell em
breakCycleEmField ancestors emf = case embeddedHaskell <$> membed of
Nothing -> emf
Just embName -> if embName `elem` ancestors
then emf { emFieldEmbed = Nothing, emFieldCycle = Just embName }
else emf { emFieldEmbed = breakCycleEmbed ancestors <$> membed }
where
membed = emFieldEmbed emf
-- calls parse to Quasi.parse individual entities in isolation
-- afterwards, sets references to other entities
-- | @since 2.5.3
parseReferences :: PersistSettings -> Text -> Q Exp
parseReferences ps s = lift $
map (mkEntityDefSqlTypeExp embedEntityMap entityMap) noCycleEnts
where
(embedEntityMap, noCycleEnts) = embedEntityDefsMap $ parse ps s
entityMap = constructEntityMap noCycleEnts
stripId :: FieldType -> Maybe Text
stripId (FTTypeCon Nothing t) = stripSuffix "Id" t
stripId _ = Nothing
foreignReference :: FieldDef -> Maybe HaskellName
foreignReference field = case fieldReference field of
ForeignRef ref _ -> Just ref
_ -> Nothing
-- fieldSqlType at parse time can be an Exp
-- This helps delay setting fieldSqlType until lift time
data EntityDefSqlTypeExp
= EntityDefSqlTypeExp EntityDef SqlTypeExp [SqlTypeExp]
deriving Show
data SqlTypeExp
= SqlTypeExp FieldType
| SqlType' SqlType
deriving Show
instance Lift SqlTypeExp where
lift (SqlType' t) = lift t
lift (SqlTypeExp ftype) = return st
where
typ = ftToType ftype
mtyp = ConT ''Proxy `AppT` typ
typedNothing = SigE (ConE 'Proxy) mtyp
st = VarE 'sqlType `AppE` typedNothing
data FieldsSqlTypeExp = FieldsSqlTypeExp [FieldDef] [SqlTypeExp]
instance Lift FieldsSqlTypeExp where
lift (FieldsSqlTypeExp fields sqlTypeExps) =
lift $ zipWith FieldSqlTypeExp fields sqlTypeExps
data FieldSqlTypeExp = FieldSqlTypeExp FieldDef SqlTypeExp
instance Lift FieldSqlTypeExp where
lift (FieldSqlTypeExp FieldDef{..} sqlTypeExp) =
[|FieldDef fieldHaskell fieldDB fieldType $(lift sqlTypeExp) fieldAttrs fieldStrict fieldReference fieldComments|]
instance Lift EntityDefSqlTypeExp where
lift (EntityDefSqlTypeExp ent sqlTypeExp sqlTypeExps) =
[|ent { entityFields = $(lift $ FieldsSqlTypeExp (entityFields ent) sqlTypeExps)
, entityId = $(lift $ FieldSqlTypeExp (entityId ent) sqlTypeExp)
}
|]
instance Lift ReferenceDef where
lift NoReference = [|NoReference|]
lift (ForeignRef name ft) = [|ForeignRef name ft|]
lift (EmbedRef em) = [|EmbedRef em|]
lift (CompositeRef cdef) = [|CompositeRef cdef|]
lift SelfReference = [|SelfReference|]
instance Lift EmbedEntityDef where
lift (EmbedEntityDef name fields) = [|EmbedEntityDef name fields|]
instance Lift EmbedFieldDef where
lift (EmbedFieldDef name em cyc) = [|EmbedFieldDef name em cyc|]
type EmbedEntityMap = M.Map HaskellName EmbedEntityDef
constructEmbedEntityMap :: [EntityDef] -> EmbedEntityMap
constructEmbedEntityMap =
M.fromList . fmap (\ent -> (entityHaskell ent, toEmbedEntityDef ent))
type EntityMap = M.Map HaskellName EntityDef
constructEntityMap :: [EntityDef] -> EntityMap
constructEntityMap =
M.fromList . fmap (\ent -> (entityHaskell ent, ent))
data FTTypeConDescr = FTKeyCon deriving Show
mEmbedded :: EmbedEntityMap -> FieldType -> Either (Maybe FTTypeConDescr) EmbedEntityDef
mEmbedded _ (FTTypeCon Just{} _) = Left Nothing
mEmbedded ents (FTTypeCon Nothing n) =
let name = HaskellName n
in maybe (Left Nothing) Right $ M.lookup name ents
mEmbedded ents (FTList x) = mEmbedded ents x
mEmbedded ents (FTApp x y) =
-- Key converts an Record to a RecordId
-- special casing this is obviously a hack
-- This problem may not be solvable with the current QuasiQuoted approach though
if x == FTTypeCon Nothing "Key"
then Left $ Just FTKeyCon
else mEmbedded ents y
setEmbedField :: HaskellName -> EmbedEntityMap -> FieldDef -> FieldDef
setEmbedField entName allEntities field = field
{ fieldReference =
case fieldReference field of
NoReference ->
case mEmbedded allEntities (fieldType field) of
Left _ ->
case stripId $ fieldType field of
Nothing -> NoReference
Just name ->
case M.lookup (HaskellName name) allEntities of
Nothing -> NoReference
Just _ -> ForeignRef (HaskellName name)
-- This can get corrected in mkEntityDefSqlTypeExp
(FTTypeCon (Just "Data.Int") "Int64")
Right em ->
if embeddedHaskell em /= entName
then EmbedRef em
else if maybeNullable field
then SelfReference
else case fieldType field of
FTList _ -> SelfReference
_ -> error $ unpack $ unHaskellName entName <> ": a self reference must be a Maybe"
existing -> existing
}
mkEntityDefSqlTypeExp :: EmbedEntityMap -> EntityMap -> EntityDef -> EntityDefSqlTypeExp
mkEntityDefSqlTypeExp emEntities entityMap ent =
EntityDefSqlTypeExp ent (getSqlType $ entityId ent) (map getSqlType $ entityFields ent)
where
getSqlType field =
maybe
(defaultSqlTypeExp field)
(SqlType' . SqlOther)
(listToMaybe $ mapMaybe (stripPrefix "sqltype=") $ fieldAttrs field)
-- In the case of embedding, there won't be any datatype created yet.
-- We just use SqlString, as the data will be serialized to JSON.
defaultSqlTypeExp field =
case mEmbedded emEntities ftype of
Right _ -> SqlType' SqlString
Left (Just FTKeyCon) -> SqlType' SqlString
Left Nothing -> case fieldReference field of
ForeignRef refName ft -> case M.lookup refName entityMap of
Nothing -> SqlTypeExp ft
-- A ForeignRef is blindly set to an Int64 in setEmbedField
-- correct that now
Just ent' -> case entityPrimary ent' of
Nothing -> SqlTypeExp ft
Just pdef -> case compositeFields pdef of
[] -> error "mkEntityDefSqlTypeExp: no composite fields"
[x] -> SqlTypeExp $ fieldType x
_ -> SqlType' $ SqlOther "Composite Reference"
CompositeRef _ -> SqlType' $ SqlOther "Composite Reference"
_ ->
case ftype of
-- In the case of lists, we always serialize to a string
-- value (via JSON).
--
-- Normally, this would be determined automatically by
-- SqlTypeExp. However, there's one corner case: if there's
-- a list of entity IDs, the datatype for the ID has not
-- yet been created, so the compiler will fail. This extra
-- clause works around this limitation.
FTList _ -> SqlType' SqlString
_ -> SqlTypeExp ftype
where
ftype = fieldType field
-- | Create data types and appropriate 'PersistEntity' instances for the given
-- 'EntityDef's. Works well with the persist quasi-quoter.
mkPersist :: MkPersistSettings -> [EntityDef] -> Q [Dec]
mkPersist mps ents' = do
x <- fmap Data.Monoid.mconcat $ mapM (persistFieldFromEntity mps) ents
y <- fmap mconcat $ mapM (mkEntity entityMap mps) ents
z <- fmap mconcat $ mapM (mkJSON mps) ents
uniqueKeyInstances <- fmap mconcat $ mapM (mkUniqueKeyInstances mps) ents
return $ mconcat [x, y, z, uniqueKeyInstances]
where
ents = map fixEntityDef ents'
entityMap = constructEntityMap ents
-- | Implement special preprocessing on EntityDef as necessary for 'mkPersist'.
-- For example, strip out any fields marked as MigrationOnly.
fixEntityDef :: EntityDef -> EntityDef
fixEntityDef ed =
ed { entityFields = filter keepField $ entityFields ed }
where
keepField fd = "MigrationOnly" `notElem` fieldAttrs fd &&
"SafeToRemove" `notElem` fieldAttrs fd
-- | Settings to be passed to the 'mkPersist' function.
data MkPersistSettings = MkPersistSettings
{ mpsBackend :: Type
-- ^ Which database backend we\'re using.
--
-- When generating data types, each type is given a generic version- which
-- works with any backend- and a type synonym for the commonly used
-- backend. This is where you specify that commonly used backend.
, mpsGeneric :: Bool
-- ^ Create generic types that can be used with multiple backends. Good for
-- reusable code, but makes error messages harder to understand. Default:
-- False.
, mpsPrefixFields :: Bool
-- ^ Prefix field names with the model name. Default: True.
, mpsEntityJSON :: Maybe EntityJSON
-- ^ Generate @ToJSON@/@FromJSON@ instances for each model types. If it's
-- @Nothing@, no instances will be generated. Default:
--
-- @
-- Just EntityJSON
-- { entityToJSON = 'keyValueEntityToJSON
-- , entityFromJSON = 'keyValueEntityFromJSON
-- }
-- @
, mpsGenerateLenses :: !Bool
-- ^ Instead of generating normal field accessors, generator lens-style accessors.
--
-- Default: False
--
-- @since 1.3.1
}
data EntityJSON = EntityJSON
{ entityToJSON :: Name
-- ^ Name of the @toJSON@ implementation for @Entity a@.
, entityFromJSON :: Name
-- ^ Name of the @fromJSON@ implementation for @Entity a@.
}
-- | Create an @MkPersistSettings@ with default values.
mkPersistSettings
:: Type -- ^ Value for 'mpsBackend'
-> MkPersistSettings
mkPersistSettings t = MkPersistSettings
{ mpsBackend = t
, mpsGeneric = False
, mpsPrefixFields = True
, mpsEntityJSON = Just EntityJSON
{ entityToJSON = 'entityIdToJSON
, entityFromJSON = 'entityIdFromJSON
}
, mpsGenerateLenses = False
}
-- | Use the 'SqlPersist' backend.
sqlSettings :: MkPersistSettings
sqlSettings = mkPersistSettings $ ConT ''SqlBackend
recNameNoUnderscore :: MkPersistSettings -> HaskellName -> HaskellName -> Text
recNameNoUnderscore mps dt f
| mpsPrefixFields mps = lowerFirst (unHaskellName dt) ++ upperFirst ft
| otherwise = lowerFirst ft
where
ft = unHaskellName f
recName :: MkPersistSettings -> HaskellName -> HaskellName -> Text
recName mps dt f =
addUnderscore $ recNameNoUnderscore mps dt f
where
addUnderscore
| mpsGenerateLenses mps = ("_" ++)
| otherwise = id
lowerFirst :: Text -> Text
lowerFirst t =
case uncons t of
Just (a, b) -> cons (toLower a) b
Nothing -> t
upperFirst :: Text -> Text
upperFirst t =
case uncons t of
Just (a, b) -> cons (toUpper a) b
Nothing -> t
dataTypeDec :: MkPersistSettings -> EntityDef -> Q Dec
dataTypeDec mps t = do
let names = map (mkName . unpack) $ entityDerives t
#if MIN_VERSION_template_haskell(2,12,0)
DataD [] nameFinal paramsFinal
Nothing
constrs
<$> fmap (pure . DerivClause Nothing) (mapM conT names)
#else
DataD [] nameFinal paramsFinal
Nothing
constrs
<$> mapM conT names
#endif
where
mkCol x fd@FieldDef {..} =
(mkName $ unpack $ recName mps x fieldHaskell,
if fieldStrict then isStrict else notStrict,
maybeIdType mps fd Nothing Nothing
)
(nameFinal, paramsFinal)
| mpsGeneric mps = (nameG, [PlainTV backend])
| otherwise = (name, [])
nameG = mkName $ unpack $ unHaskellName (entityHaskell t) ++ "Generic"
name = mkName $ unpack $ unHaskellName $ entityHaskell t
cols = map (mkCol $ entityHaskell t) $ entityFields t
backend = backendName
constrs
| entitySum t = map sumCon $ entityFields t
| otherwise = [RecC name cols]
sumCon fd = NormalC
(sumConstrName mps t fd)
[(notStrict, maybeIdType mps fd Nothing Nothing)]
sumConstrName :: MkPersistSettings -> EntityDef -> FieldDef -> Name
sumConstrName mps t FieldDef {..} = mkName $ unpack $ concat
[ if mpsPrefixFields mps
then unHaskellName $ entityHaskell t
else ""
, upperFirst $ unHaskellName fieldHaskell
, "Sum"
]
uniqueTypeDec :: MkPersistSettings -> EntityDef -> Dec
uniqueTypeDec mps t =
DataInstD [] ''Unique
[genericDataType mps (entityHaskell t) backendT]
Nothing
(map (mkUnique mps t) $ entityUniques t)
(derivClause $ entityUniques t)
where
derivClause [] = []
#if MIN_VERSION_template_haskell(2,12,0)
derivClause _ = [DerivClause Nothing [ConT ''Show]]
#else
derivClause _ = [ConT ''Show]
#endif
mkUnique :: MkPersistSettings -> EntityDef -> UniqueDef -> Con
mkUnique mps t (UniqueDef (HaskellName constr) _ fields attrs) =
NormalC (mkName $ unpack constr) types
where
types =
map (go . flip lookup3 (entityFields t) . unHaskellName . fst) fields
force = "!force" `elem` attrs
go :: (FieldDef, IsNullable) -> (Strict, Type)
go (_, Nullable _) | not force = error nullErrMsg
go (fd, y) = (notStrict, maybeIdType mps fd Nothing (Just y))
lookup3 :: Text -> [FieldDef] -> (FieldDef, IsNullable)
lookup3 s [] =
error $ unpack $ "Column not found: " ++ s ++ " in unique " ++ constr
lookup3 x (fd@FieldDef {..}:rest)
| x == unHaskellName fieldHaskell = (fd, nullable fieldAttrs)
| otherwise = lookup3 x rest
nullErrMsg =
mconcat [ "Error: By default we disallow NULLables in an uniqueness "
, "constraint. The semantics of how NULL interacts with those "
, "constraints is non-trivial: two NULL values are not "
, "considered equal for the purposes of an uniqueness "
, "constraint. If you understand this feature, it is possible "
, "to use it your advantage. *** Use a \"!force\" attribute "
, "on the end of the line that defines your uniqueness "
, "constraint in order to disable this check. ***" ]
maybeIdType :: MkPersistSettings
-> FieldDef
-> Maybe Name -- ^ backend
-> Maybe IsNullable
-> Type
maybeIdType mps fd mbackend mnull = maybeTyp mayNullable idtyp
where
mayNullable = case mnull of
(Just (Nullable ByMaybeAttr)) -> True
_ -> maybeNullable fd
idtyp = idType mps fd mbackend
backendDataType :: MkPersistSettings -> Type
backendDataType mps
| mpsGeneric mps = backendT
| otherwise = mpsBackend mps
genericDataType :: MkPersistSettings
-> HaskellName -- ^ entity name
-> Type -- ^ backend
-> Type
genericDataType mps (HaskellName typ') backend
| mpsGeneric mps = ConT (mkName $ unpack $ typ' ++ "Generic") `AppT` backend
| otherwise = ConT $ mkName $ unpack typ'
idType :: MkPersistSettings -> FieldDef -> Maybe Name -> Type
idType mps fd mbackend =
case foreignReference fd of
Just typ ->
ConT ''Key
`AppT` genericDataType mps typ (VarT $ fromMaybe backendName mbackend)
Nothing -> ftToType $ fieldType fd
degen :: [Clause] -> [Clause]
degen [] =
let err = VarE 'error `AppE` LitE (StringL
"Degenerate case, should never happen")
in [normalClause [WildP] err]
degen x = x
mkToPersistFields :: MkPersistSettings -> String -> EntityDef -> Q Dec
mkToPersistFields mps constr ed@EntityDef { entitySum = isSum, entityFields = fields } = do
clauses <-
if isSum
then sequence $ zipWith goSum fields [1..]
else fmap return go
return $ FunD 'toPersistFields clauses
where
go :: Q Clause
go = do
xs <- sequence $ replicate fieldCount $ newName "x"
let pat = ConP (mkName constr) $ map VarP xs
sp <- [|SomePersistField|]
let bod = ListE $ map (AppE sp . VarE) xs
return $ normalClause [pat] bod
fieldCount = length fields
goSum :: FieldDef -> Int -> Q Clause
goSum fd idx = do
let name = sumConstrName mps ed fd
enull <- [|SomePersistField PersistNull|]
let beforeCount = idx - 1
afterCount = fieldCount - idx
before = replicate beforeCount enull
after = replicate afterCount enull
x <- newName "x"
sp <- [|SomePersistField|]
let body = ListE $ mconcat
[ before
, [sp `AppE` VarE x]
, after
]
return $ normalClause [ConP name [VarP x]] body
mkToFieldNames :: [UniqueDef] -> Q Dec
mkToFieldNames pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToFieldNames $ degen pairs'
where
go (UniqueDef constr _ names _) = do
names' <- lift names
return $
normalClause
[RecP (mkName $ unpack $ unHaskellName constr) []]
names'
mkUniqueToValues :: [UniqueDef] -> Q Dec
mkUniqueToValues pairs = do
pairs' <- mapM go pairs
return $ FunD 'persistUniqueToValues $ degen pairs'
where
go :: UniqueDef -> Q Clause
go (UniqueDef constr _ names _) = do
xs <- mapM (const $ newName "x") names
let pat = ConP (mkName $ unpack $ unHaskellName constr) $ map VarP xs
tpv <- [|toPersistValue|]
let bod = ListE $ map (AppE tpv . VarE) xs
return $ normalClause [pat] bod
isNotNull :: PersistValue -> Bool
isNotNull PersistNull = False
isNotNull _ = True
mapLeft :: (a -> c) -> Either a b -> Either c b
mapLeft _ (Right r) = Right r
mapLeft f (Left l) = Left (f l)
mkFromPersistValues :: MkPersistSettings -> EntityDef -> Q [Clause]
mkFromPersistValues _ t@(EntityDef { entitySum = False }) =
fromValues t "fromPersistValues" entE $ entityFields t
where
entE = ConE $ mkName $ unpack entName
entName = unHaskellName $ entityHaskell t
mkFromPersistValues mps t@(EntityDef { entitySum = True }) = do
nothing <- [|Left ("Invalid fromPersistValues input: sum type with all nulls. Entity: " `mappend` entName)|]
clauses <- mkClauses [] $ entityFields t
return $ clauses `mappend` [normalClause [WildP] nothing]
where
entName = unHaskellName $ entityHaskell t
mkClauses _ [] = return []
mkClauses before (field:after) = do
x <- newName "x"
let null' = ConP 'PersistNull []
pat = ListP $ mconcat
[ map (const null') before
, [VarP x]
, map (const null') after
]
constr = ConE $ sumConstrName mps t field
fs <- [|fromPersistValue $(return $ VarE x)|]
let guard' = NormalG $ VarE 'isNotNull `AppE` VarE x
let clause = Clause [pat] (GuardedB [(guard', InfixE (Just constr) fmapE (Just fs))]) []
clauses <- mkClauses (field : before) after
return $ clause : clauses
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
lensPTH :: (s -> a) -> (s -> b -> t) -> Lens s t a b
lensPTH sa sbt afb s = fmap (sbt s) (afb $ sa s)
fmapE :: Exp
fmapE = VarE 'fmap
mkLensClauses :: MkPersistSettings -> EntityDef -> Q [Clause]
mkLensClauses mps t = do
lens' <- [|lensPTH|]
getId <- [|entityKey|]
setId <- [|\(Entity _ value) key -> Entity key value|]
getVal <- [|entityVal|]
dot <- [|(.)|]
keyVar <- newName "key"
valName <- newName "value"
xName <- newName "x"
let idClause = normalClause
[ConP (keyIdName t) []]
(lens' `AppE` getId `AppE` setId)
if entitySum t
then return $ idClause : map (toSumClause lens' keyVar valName xName) (entityFields t)
else return $ idClause : map (toClause lens' getVal dot keyVar valName xName) (entityFields t)
where
toClause lens' getVal dot keyVar valName xName f = normalClause
[ConP (filterConName mps t f) []]
(lens' `AppE` getter `AppE` setter)
where
fieldName = mkName $ unpack $ recName mps (entityHaskell t) (fieldHaskell f)
getter = InfixE (Just $ VarE fieldName) dot (Just getVal)
setter = LamE
[ ConP 'Entity [VarP keyVar, VarP valName]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyVar `AppE` RecUpdE
(VarE valName)
[(fieldName, VarE xName)]
toSumClause lens' keyVar valName xName f = normalClause
[ConP (filterConName mps t f) []]
(lens' `AppE` getter `AppE` setter)
where
emptyMatch = Match WildP (NormalB $ VarE 'error `AppE` LitE (StringL "Tried to use fieldLens on a Sum type")) []
getter = LamE
[ ConP 'Entity [WildP, VarP valName]
] $ CaseE (VarE valName)
$ Match (ConP (sumConstrName mps t f) [VarP xName]) (NormalB $ VarE xName) []
-- FIXME It would be nice if the types expressed that the Field is
-- a sum type and therefore could result in Maybe.
: if length (entityFields t) > 1 then [emptyMatch] else []
setter = LamE
[ ConP 'Entity [VarP keyVar, WildP]
, VarP xName
]
$ ConE 'Entity `AppE` VarE keyVar `AppE` (ConE (sumConstrName mps t f) `AppE` VarE xName)
-- | declare the key type and associated instances
-- @'PathPiece'@, @'ToHttpApiData'@ and @'FromHttpApiData'@ instances are only generated for a Key with one field
mkKeyTypeDec :: MkPersistSettings -> EntityDef -> Q (Dec, [Dec])
mkKeyTypeDec mps t = do
(instDecs, i) <-
if mpsGeneric mps
then if not useNewtype
then do pfDec <- pfInstD
return (pfDec, [''Generic])
else do gi <- genericNewtypeInstances
return (gi, [])
else if not useNewtype
then do pfDec <- pfInstD
return (pfDec, [''Show, ''Read, ''Eq, ''Ord, ''Generic])
else do
let allInstances = [''Show, ''Read, ''Eq, ''Ord, ''PathPiece, ''ToHttpApiData, ''FromHttpApiData, ''PersistField, ''PersistFieldSql, ''ToJSON, ''FromJSON]
if customKeyType
then return ([], allInstances)
else do
bi <- backendKeyI
return (bi, allInstances)
#if MIN_VERSION_template_haskell(2,12,0)
cxti <- mapM conT i
let kd = if useNewtype
then NewtypeInstD [] k [recordType] Nothing dec [DerivClause Nothing cxti]
else DataInstD [] k [recordType] Nothing [dec] [DerivClause Nothing cxti]
#else
cxti <- mapM conT i
let kd = if useNewtype
then NewtypeInstD [] k [recordType] Nothing dec cxti
else DataInstD [] k [recordType] Nothing [dec] cxti
#endif
return (kd, instDecs)
where
keyConE = keyConExp t
unKeyE = unKeyExp t
dec = RecC (keyConName t) (keyFields mps t)
k = ''Key
recordType = genericDataType mps (entityHaskell t) backendT
pfInstD = -- FIXME: generate a PersistMap instead of PersistList
[d|instance PersistField (Key $(pure recordType)) where
toPersistValue = PersistList . keyToValues
fromPersistValue (PersistList l) = keyFromValues l
fromPersistValue got = error $ "fromPersistValue: expected PersistList, got: " `mappend` show got
instance PersistFieldSql (Key $(pure recordType)) where
sqlType _ = SqlString
instance ToJSON (Key $(pure recordType))
instance FromJSON (Key $(pure recordType))
|]
keyStringL = StringL . keyString
-- ghc 7.6 cannot parse the left arrow Ident $() <- lexP
keyPattern = BindS (ConP 'Ident [LitP $ keyStringL t])
backendKeyGenericI =
[d| instance PersistStore $(pure backendT) =>
ToBackendKey $(pure backendT) $(pure recordType) where
toBackendKey = $(return unKeyE)
fromBackendKey = $(return keyConE)
|]
backendKeyI = let bdt = backendDataType mps in
[d| instance ToBackendKey $(pure bdt) $(pure recordType) where
toBackendKey = $(return unKeyE)
fromBackendKey = $(return keyConE)
|]
-- truly unfortunate that TH doesn't support standalone deriving
-- https://ghc.haskell.org/trac/ghc/ticket/8100
genericNewtypeInstances = do
instances <- [|lexP|] >>= \lexPE -> [| step readPrec >>= return . ($(pure keyConE) )|] >>= \readE -> do
alwaysInstances <-
[d|instance Show (BackendKey $(pure backendT)) => Show (Key $(pure recordType)) where
showsPrec i x = showParen (i > app_prec) $
(showString $ $(pure $ LitE $ keyStringL t) `mappend` " ") .
showsPrec i ($(return unKeyE) x)
where app_prec = (10::Int)
instance Read (BackendKey $(pure backendT)) => Read (Key $(pure recordType)) where
readPrec = parens $ (prec app_prec $ $(pure $ DoE [keyPattern lexPE, NoBindS readE]))
where app_prec = (10::Int)
instance Eq (BackendKey $(pure backendT)) => Eq (Key $(pure recordType)) where
x == y =
($(return unKeyE) x) ==
($(return unKeyE) y)
instance Ord (BackendKey $(pure backendT)) => Ord (Key $(pure recordType)) where
compare x y = compare
($(return unKeyE) x)
($(return unKeyE) y)
instance ToHttpApiData (BackendKey $(pure backendT)) => ToHttpApiData (Key $(pure recordType)) where
toUrlPiece = toUrlPiece . $(return unKeyE)
instance FromHttpApiData (BackendKey $(pure backendT)) => FromHttpApiData(Key $(pure recordType)) where
parseUrlPiece = fmap $(return keyConE) . parseUrlPiece
instance PathPiece (BackendKey $(pure backendT)) => PathPiece (Key $(pure recordType)) where
toPathPiece = toPathPiece . $(return unKeyE)
fromPathPiece = fmap $(return keyConE) . fromPathPiece
instance PersistField (BackendKey $(pure backendT)) => PersistField (Key $(pure recordType)) where
toPersistValue = toPersistValue . $(return unKeyE)
fromPersistValue = fmap $(return keyConE) . fromPersistValue
instance PersistFieldSql (BackendKey $(pure backendT)) => PersistFieldSql (Key $(pure recordType)) where
sqlType = sqlType . fmap $(return unKeyE)
instance ToJSON (BackendKey $(pure backendT)) => ToJSON (Key $(pure recordType)) where
toJSON = toJSON . $(return unKeyE)
instance FromJSON (BackendKey $(pure backendT)) => FromJSON (Key $(pure recordType)) where
parseJSON = fmap $(return keyConE) . parseJSON
|]
if customKeyType then return alwaysInstances
else fmap (alwaysInstances `mappend`) backendKeyGenericI
return instances
useNewtype = pkNewtype mps t
customKeyType = not (defaultIdType t) || not useNewtype || isJust (entityPrimary t)
keyIdName :: EntityDef -> Name
keyIdName = mkName . unpack . keyIdText
keyIdText :: EntityDef -> Text
keyIdText t = unHaskellName (entityHaskell t) `mappend` "Id"
unKeyName :: EntityDef -> Name
unKeyName t = mkName $ "un" `mappend` keyString t
unKeyExp :: EntityDef -> Exp
unKeyExp = VarE . unKeyName
backendT :: Type
backendT = VarT backendName
backendName :: Name
backendName = mkName "backend"
keyConName :: EntityDef -> Name
keyConName t = mkName $ resolveConflict $ keyString t
where
resolveConflict kn = if conflict then kn `mappend` "'" else kn
conflict = any ((== HaskellName "key") . fieldHaskell) $ entityFields t
keyConExp :: EntityDef -> Exp
keyConExp = ConE . keyConName
keyString :: EntityDef -> String
keyString = unpack . keyText
keyText :: EntityDef -> Text
keyText t = unHaskellName (entityHaskell t) ++ "Key"
pkNewtype :: MkPersistSettings -> EntityDef -> Bool
pkNewtype mps t = length (keyFields mps t) < 2
defaultIdType :: EntityDef -> Bool
defaultIdType t = fieldType (entityId t) == FTTypeCon Nothing (keyIdText t)
keyFields :: MkPersistSettings -> EntityDef -> [(Name, Strict, Type)]
keyFields mps t = case entityPrimary t of
Just pdef -> map primaryKeyVar (compositeFields pdef)
Nothing -> if defaultIdType t
then [idKeyVar backendKeyType]
else [idKeyVar $ ftToType $ fieldType $ entityId t]
where
backendKeyType
| mpsGeneric mps = ConT ''BackendKey `AppT` backendT
| otherwise = ConT ''BackendKey `AppT` mpsBackend mps
idKeyVar ft = (unKeyName t, notStrict, ft)
primaryKeyVar fd = ( keyFieldName mps t fd
, notStrict
, ftToType $ fieldType fd
)
keyFieldName :: MkPersistSettings -> EntityDef -> FieldDef -> Name
keyFieldName mps t fd
| pkNewtype mps t = unKeyName t
| otherwise = mkName $ unpack $ lowerFirst (keyText t) `mappend` unHaskellName (fieldHaskell fd)
mkKeyToValues :: MkPersistSettings -> EntityDef -> Q Dec
mkKeyToValues mps t = do
(p, e) <- case entityPrimary t of
Nothing ->
([],) <$> [|(:[]) . toPersistValue . $(return $ unKeyExp t)|]
Just pdef ->
return $ toValuesPrimary pdef
return $ FunD 'keyToValues $ return $ normalClause p e
where
toValuesPrimary pdef =
( [VarP recordName]
, ListE $ map (\fd -> VarE 'toPersistValue `AppE` (VarE (keyFieldName mps t fd) `AppE` VarE recordName)) $ compositeFields pdef
)
recordName = mkName "record"
normalClause :: [Pat] -> Exp -> Clause
normalClause p e = Clause p (NormalB e) []
mkKeyFromValues :: MkPersistSettings -> EntityDef -> Q Dec
mkKeyFromValues _mps t = do
clauses <- case entityPrimary t of
Nothing -> do
e <- [|fmap $(return keyConE) . fromPersistValue . headNote|]
return [normalClause [] e]
Just pdef ->
fromValues t "keyFromValues" keyConE (compositeFields pdef)
return $ FunD 'keyFromValues clauses
where
keyConE = keyConExp t
headNote :: [PersistValue] -> PersistValue
headNote = \case
[x] -> x
xs -> error $ "mkKeyFromValues: expected a list of one element, got: " `mappend` show xs
fromValues :: EntityDef -> Text -> Exp -> [FieldDef] -> Q [Clause]
fromValues t funName conE fields = do
x <- newName "x"
let funMsg = entityText t `mappend` ": " `mappend` funName `mappend` " failed on: "
patternMatchFailure <- [|Left $ mappend funMsg (pack $ show $(return $ VarE x))|]
suc <- patternSuccess
return [ suc, normalClause [VarP x] patternMatchFailure ]
where
patternSuccess =
case fields of
[] -> do
rightE <- [|Right|]
return $ normalClause [ListP []] (rightE `AppE` conE)
_ -> do
x1 <- newName "x1"
restNames <- mapM (\i -> newName $ "x" `mappend` show i) [2..length fields]
(fpv1:mkPersistValues) <- mapM mkPersistValue fields
app1E <- [|(<$>)|]
let conApp = infixFromPersistValue app1E fpv1 conE x1
applyE <- [|(<*>)|]
let applyFromPersistValue = infixFromPersistValue applyE
return $ normalClause
[ListP $ map VarP (x1:restNames)]
(foldl' (\exp (name, fpv) -> applyFromPersistValue fpv exp name) conApp (zip restNames mkPersistValues))
infixFromPersistValue applyE fpv exp name =
UInfixE exp applyE (fpv `AppE` VarE name)
mkPersistValue field =
[|mapLeft (fieldError t field) . fromPersistValue|]
fieldError :: EntityDef -> FieldDef -> Text -> Text
fieldError entity field err = mconcat
[ "Couldn't parse field `"
, fieldName
, "` from table `"
, tableName
, "`. "
, err
]
where
fieldName =
unHaskellName (fieldHaskell field)
tableName =
unDBName (entityDB entity)
mkEntity :: EntityMap -> MkPersistSettings -> EntityDef -> Q [Dec]
mkEntity entityMap mps t = do
t' <- liftAndFixKeys entityMap t
let nameT = unHaskellName entName
let nameS = unpack nameT
let clazz = ConT ''PersistEntity `AppT` genDataType
tpf <- mkToPersistFields mps nameS t
fpv <- mkFromPersistValues mps t
utv <- mkUniqueToValues $ entityUniques t
puk <- mkUniqueKeys t
fkc <- mapM (mkForeignKeysComposite mps t) $ entityForeigns t
let primaryField = entityId t
fields <- mapM (mkField mps t) $ primaryField : entityFields t
toFieldNames <- mkToFieldNames $ entityUniques t
(keyTypeDec, keyInstanceDecs) <- mkKeyTypeDec mps t
keyToValues' <- mkKeyToValues mps t
keyFromValues' <- mkKeyFromValues mps t
let addSyn -- FIXME maybe remove this
| mpsGeneric mps = (:) $
TySynD (mkName nameS) [] $
genericDataType mps entName $ mpsBackend mps
| otherwise = id
lensClauses <- mkLensClauses mps t
lenses <- mkLenses mps t
let instanceConstraint = if not (mpsGeneric mps) then [] else
[mkClassP ''PersistStore [backendT]]
dtd <- dataTypeDec mps t
return $ addSyn $
dtd : mconcat fkc `mappend`
([ TySynD (keyIdName t) [] $
ConT ''Key `AppT` ConT (mkName nameS)
, instanceD instanceConstraint clazz
[ uniqueTypeDec mps t
, keyTypeDec
, keyToValues'
, keyFromValues'
, FunD 'entityDef [normalClause [WildP] t']
, tpf
, FunD 'fromPersistValues fpv
, toFieldNames
, utv
, puk
, DataInstD
[]
''EntityField
[ genDataType
, VarT $ mkName "typ"
]
Nothing
(map fst fields)
[]
, FunD 'persistFieldDef (map snd fields)
, TySynInstD
''PersistEntityBackend
(TySynEqn
[genDataType]
(backendDataType mps))
, FunD 'persistIdField [normalClause [] (ConE $ keyIdName t)]
, FunD 'fieldLens lensClauses
]
] `mappend` lenses) `mappend` keyInstanceDecs
where
genDataType = genericDataType mps entName backendT
entName = entityHaskell t
mkUniqueKeyInstances :: MkPersistSettings -> EntityDef -> Q [Dec]
mkUniqueKeyInstances mps t = do
-- FIXME: isExtEnabled breaks the benchmark
undecidableInstancesEnabled <- isExtEnabled UndecidableInstances
unless undecidableInstancesEnabled . fail
$ "Generating Persistent entities now requires the 'UndecidableInstances' "
`mappend` "language extension. Please enable it in your file by copy/pasting "
`mappend` "this line into the top of your file: \n\n"
`mappend` "{-# LANGUAGE UndecidableInstances #-}"
case entityUniques t of
[] -> mappend <$> typeErrorSingle <*> typeErrorAtLeastOne
[_] -> mappend <$> singleUniqueKey <*> atLeastOneKey
(_:_) -> mappend <$> typeErrorMultiple <*> atLeastOneKey
where
requireUniquesPName = mkName "requireUniquesP"
onlyUniquePName = mkName "onlyUniqueP"
typeErrorSingle = mkOnlyUniqueError typeErrorNoneCtx
typeErrorMultiple = mkOnlyUniqueError typeErrorMultipleCtx
withPersistStoreWriteCxt =
if mpsGeneric mps
then do
write <- [t|PersistStoreWrite $(pure (VarT $ mkName "backend")) |]
pure [write]
else do
pure []
typeErrorNoneCtx = do
tyErr <- [t|TypeError (NoUniqueKeysError $(pure genDataType))|]
(tyErr :) <$> withPersistStoreWriteCxt
typeErrorMultipleCtx = do
tyErr <- [t|TypeError (MultipleUniqueKeysError $(pure genDataType))|]
(tyErr :) <$> withPersistStoreWriteCxt
mkOnlyUniqueError :: Q Cxt -> Q [Dec]
mkOnlyUniqueError mkCtx = do
ctx <- mkCtx
let impl = mkImpossible onlyUniquePName
pure [instanceD ctx onlyOneUniqueKeyClass impl]
mkImpossible name =
[ FunD name
[ Clause
[ WildP ]
(NormalB
(VarE (mkName "error") `AppE` LitE (StringL "impossible"))
)
[]
]
]
typeErrorAtLeastOne :: Q [Dec]
typeErrorAtLeastOne = do
let impl = mkImpossible requireUniquesPName
cxt <- typeErrorMultipleCtx
pure [instanceD cxt atLeastOneUniqueKeyClass impl]
singleUniqueKey :: Q [Dec]
singleUniqueKey = do
expr <- [e| head . persistUniqueKeys|]
let impl = [FunD onlyUniquePName [Clause [] (NormalB expr) []]]
cxt <- withPersistStoreWriteCxt
pure [instanceD cxt onlyOneUniqueKeyClass impl]
atLeastOneUniqueKeyClass = ConT ''AtLeastOneUniqueKey `AppT` genDataType
onlyOneUniqueKeyClass = ConT ''OnlyOneUniqueKey `AppT` genDataType
atLeastOneKey :: Q [Dec]
atLeastOneKey = do
expr <- [e| NEL.fromList . persistUniqueKeys|]
let impl = [FunD requireUniquesPName [Clause [] (NormalB expr) []]]
cxt <- withPersistStoreWriteCxt
pure [instanceD cxt atLeastOneUniqueKeyClass impl]
genDataType = genericDataType mps (entityHaskell t) backendT
entityText :: EntityDef -> Text
entityText = unHaskellName . entityHaskell
mkLenses :: MkPersistSettings -> EntityDef -> Q [Dec]
mkLenses mps _ | not (mpsGenerateLenses mps) = return []
mkLenses _ ent | entitySum ent = return []
mkLenses mps ent = fmap mconcat $ forM (entityFields ent) $ \field -> do
let lensName' = recNameNoUnderscore mps (entityHaskell ent) (fieldHaskell field)
lensName = mkName $ unpack lensName'
fieldName = mkName $ unpack $ "_" ++ lensName'
needleN <- newName "needle"
setterN <- newName "setter"
fN <- newName "f"
aN <- newName "a"
yN <- newName "y"
let needle = VarE needleN
setter = VarE setterN
f = VarE fN
a = VarE aN
y = VarE yN
fT = mkName "f"
-- FIXME if we want to get really fancy, then: if this field is the
-- *only* Id field present, then set backend1 and backend2 to different
-- values
backend1 = backendName
backend2 = backendName
aT = maybeIdType mps field (Just backend1) Nothing
bT = maybeIdType mps field (Just backend2) Nothing
mkST backend = genericDataType mps (entityHaskell ent) (VarT backend)
sT = mkST backend1
tT = mkST backend2
t1 `arrow` t2 = ArrowT `AppT` t1 `AppT` t2
vars = PlainTV fT
: (if mpsGeneric mps then [PlainTV backend1{-, PlainTV backend2-}] else [])
return
[ SigD lensName $ ForallT vars [mkClassP ''Functor [VarT fT]] $
(aT `arrow` (VarT fT `AppT` bT)) `arrow`
(sT `arrow` (VarT fT `AppT` tT))
, FunD lensName $ return $ Clause
[VarP fN, VarP aN]
(NormalB $ fmapE
`AppE` setter
`AppE` (f `AppE` needle))
[ FunD needleN [normalClause [] (VarE fieldName `AppE` a)]
, FunD setterN $ return $ normalClause
[VarP yN]
(RecUpdE a
[ (fieldName, y)
])
]
]
mkForeignKeysComposite :: MkPersistSettings -> EntityDef -> ForeignDef -> Q [Dec]
mkForeignKeysComposite mps t ForeignDef {..} = do
let fieldName f = mkName $ unpack $ recName mps (entityHaskell t) f
let fname = fieldName foreignConstraintNameHaskell
let reftableString = unpack $ unHaskellName foreignRefTableHaskell
let reftableKeyName = mkName $ reftableString `mappend` "Key"
let tablename = mkName $ unpack $ entityText t
recordName <- newName "record"
let fldsE = map (\((foreignName, _),_) -> VarE (fieldName foreignName)
`AppE` VarE recordName) foreignFields
let mkKeyE = foldl' AppE (maybeExp foreignNullable $ ConE reftableKeyName) fldsE
let fn = FunD fname [normalClause [VarP recordName] mkKeyE]
let t2 = maybeTyp foreignNullable $ ConT ''Key `AppT` ConT (mkName reftableString)
let sig = SigD fname $ (ArrowT `AppT` (ConT tablename)) `AppT` t2
return [sig, fn]
maybeExp :: Bool -> Exp -> Exp
maybeExp may exp | may = fmapE `AppE` exp
| otherwise = exp
maybeTyp :: Bool -> Type -> Type
maybeTyp may typ | may = ConT ''Maybe `AppT` typ
| otherwise = typ
-- | produce code similar to the following:
--
-- @
-- instance PersistEntity e => PersistField e where
-- toPersistValue = PersistMap $ zip columNames (map toPersistValue . toPersistFields)
-- fromPersistValue (PersistMap o) =
-- let columns = HM.fromList o
-- in fromPersistValues $ map (\name ->
-- case HM.lookup name columns of
-- Just v -> v
-- Nothing -> PersistNull
-- fromPersistValue x = Left $ "Expected PersistMap, received: " ++ show x
-- sqlType _ = SqlString
-- @
persistFieldFromEntity :: MkPersistSettings -> EntityDef -> Q [Dec]
persistFieldFromEntity mps e = do
ss <- [|SqlString|]
obj <- [|\ent -> PersistMap $ zip (map pack columnNames) (map toPersistValue $ toPersistFields ent)|]
fpv <- [|\x -> let columns = HM.fromList x
in fromPersistValues $ map
(\(name) ->
case HM.lookup (pack name) columns of
Just v -> v
Nothing -> PersistNull)
$ columnNames
|]
compose <- [|(<=<)|]
getPersistMap' <- [|getPersistMap|]
return
[ persistFieldInstanceD (mpsGeneric mps) typ
[ FunD 'toPersistValue [ normalClause [] obj ]
, FunD 'fromPersistValue
[ normalClause [] (InfixE (Just fpv) compose $ Just getPersistMap')
]
]
, persistFieldSqlInstanceD (mpsGeneric mps) typ
[ sqlTypeFunD ss
]
]
where
typ = genericDataType mps (entityHaskell e) backendT
entFields = entityFields e
columnNames = map (unpack . unHaskellName . fieldHaskell) entFields
-- | Apply the given list of functions to the same @EntityDef@s.
--
-- This function is useful for cases such as:
--
-- >>> share [mkSave "myDefs", mkPersist sqlSettings] [persistLowerCase|...|]
share :: [[EntityDef] -> Q [Dec]] -> [EntityDef] -> Q [Dec]
share fs x = mconcat <$> mapM ($ x) fs
-- | Save the @EntityDef@s passed in under the given name.
mkSave :: String -> [EntityDef] -> Q [Dec]
mkSave name' defs' = do
let name = mkName name'
defs <- lift defs'
return [ SigD name $ ListT `AppT` ConT ''EntityDef
, FunD name [normalClause [] defs]
]
data Dep = Dep
{ depTarget :: HaskellName
, depSourceTable :: HaskellName
, depSourceField :: HaskellName
, depSourceNull :: IsNullable
}
-- | Generate a 'DeleteCascade' instance for the given @EntityDef@s.
mkDeleteCascade :: MkPersistSettings -> [EntityDef] -> Q [Dec]
mkDeleteCascade mps defs = do
let deps = concatMap getDeps defs
mapM (go deps) defs
where
getDeps :: EntityDef -> [Dep]
getDeps def =
concatMap getDeps' $ entityFields $ fixEntityDef def
where
getDeps' :: FieldDef -> [Dep]
getDeps' field@FieldDef {..} =
case foreignReference field of
Just name ->
return Dep
{ depTarget = name
, depSourceTable = entityHaskell def
, depSourceField = fieldHaskell
, depSourceNull = nullable fieldAttrs
}
Nothing -> []
go :: [Dep] -> EntityDef -> Q Dec
go allDeps EntityDef{entityHaskell = name} = do
let deps = filter (\x -> depTarget x == name) allDeps
key <- newName "key"
let del = VarE 'delete
let dcw = VarE 'deleteCascadeWhere
just <- [|Just|]
filt <- [|Filter|]
eq <- [|Eq|]
value <- [|FilterValue|]
let mkStmt :: Dep -> Stmt
mkStmt dep = NoBindS
$ dcw `AppE`
ListE
[ filt `AppE` ConE filtName
`AppE` (value `AppE` val (depSourceNull dep))
`AppE` eq
]
where
filtName = filterConName' mps (depSourceTable dep) (depSourceField dep)
val (Nullable ByMaybeAttr) = just `AppE` VarE key
val _ = VarE key
let stmts :: [Stmt]
stmts = map mkStmt deps `mappend`
[NoBindS $ del `AppE` VarE key]
let entityT = genericDataType mps name backendT
return $
instanceD
[ mkClassP ''PersistQuery [backendT]
, mkEqualP (ConT ''PersistEntityBackend `AppT` entityT) (ConT ''BaseBackend `AppT` backendT)
]
(ConT ''DeleteCascade `AppT` entityT `AppT` backendT)
[ FunD 'deleteCascade
[normalClause [VarP key] (DoE stmts)]
]
-- | Creates a declaration for the @['EntityDef']@ from the @persistent@
-- schema. This is necessary because the Persistent QuasiQuoter is unable
-- to know the correct type of ID fields, and assumes that they are all
-- Int64.
--
-- Provide this in the list you give to 'share', much like @'mkMigrate'@.
--
-- @
-- 'share' ['mkMigrate' "migrateAll", 'mkEntityDefList' "entityDefs"] [...]
-- @
--
-- @since 2.7.1
mkEntityDefList
:: String
-- ^ The name that will be given to the 'EntityDef' list.
-> [EntityDef]
-> Q [Dec]
mkEntityDefList entityList entityDefs = do
let entityListName = mkName entityList
edefs <- fmap ListE
. forM entityDefs
$ \(EntityDef { entityHaskell = HaskellName haskellName }) ->
let entityType = conT (mkName (T.unpack haskellName))
in [|entityDef (Proxy :: Proxy $(entityType))|]
typ <- [t|[EntityDef]|]
pure
[ SigD entityListName typ
, ValD (VarP entityListName) (NormalB edefs) []
]
mkUniqueKeys :: EntityDef -> Q Dec
mkUniqueKeys def | entitySum def =
return $ FunD 'persistUniqueKeys [normalClause [WildP] (ListE [])]
mkUniqueKeys def = do
c <- clause
return $ FunD 'persistUniqueKeys [c]
where
clause = do
xs <- forM (entityFields def) $ \fd -> do
let x = fieldHaskell fd
x' <- newName $ '_' : unpack (unHaskellName x)
return (x, x')
let pcs = map (go xs) $ entityUniques def
let pat = ConP
(mkName $ unpack $ unHaskellName $ entityHaskell def)
(map (VarP . snd) xs)
return $ normalClause [pat] (ListE pcs)
go :: [(HaskellName, Name)] -> UniqueDef -> Exp
go xs (UniqueDef name _ cols _) =
foldl' (go' xs) (ConE (mkName $ unpack $ unHaskellName name)) (map fst cols)
go' :: [(HaskellName, Name)] -> Exp -> HaskellName -> Exp
go' xs front col =
let Just col' = lookup col xs
in front `AppE` VarE col'
sqlTypeFunD :: Exp -> Dec
sqlTypeFunD st = FunD 'sqlType
[ normalClause [WildP] st ]
typeInstanceD :: Name
-> Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
typeInstanceD clazz hasBackend typ =
instanceD ctx (ConT clazz `AppT` typ)
where
ctx
| hasBackend = [mkClassP ''PersistStore [backendT]]
| otherwise = []
persistFieldInstanceD :: Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
persistFieldInstanceD = typeInstanceD ''PersistField
persistFieldSqlInstanceD :: Bool -- ^ include PersistStore backend constraint
-> Type -> [Dec] -> Dec
persistFieldSqlInstanceD = typeInstanceD ''PersistFieldSql
-- | Automatically creates a valid 'PersistField' instance for any datatype
-- that has valid 'Show' and 'Read' instances. Can be very convenient for
-- 'Enum' types.
derivePersistField :: String -> Q [Dec]
derivePersistField s = do
ss <- [|SqlString|]
tpv <- [|PersistText . pack . show|]
fpv <- [|\dt v ->
case fromPersistValue v of
Left e -> Left e
Right s' ->
case reads $ unpack s' of
(x, _):_ -> Right x
[] -> Left $ pack "Invalid " ++ pack dt ++ pack ": " ++ s'|]
return
[ persistFieldInstanceD False (ConT $ mkName s)
[ FunD 'toPersistValue
[ normalClause [] tpv
]
, FunD 'fromPersistValue
[ normalClause [] (fpv `AppE` LitE (StringL s))
]
]
, persistFieldSqlInstanceD False (ConT $ mkName s)
[ sqlTypeFunD ss
]
]
-- | Automatically creates a valid 'PersistField' instance for any datatype
-- that has valid 'ToJSON' and 'FromJSON' instances. For a datatype @T@ it
-- generates instances similar to these:
--
-- @
-- instance PersistField T where
-- toPersistValue = PersistByteString . L.toStrict . encode
-- fromPersistValue = (left T.pack) . eitherDecodeStrict' <=< fromPersistValue
-- instance PersistFieldSql T where
-- sqlType _ = SqlString
-- @
derivePersistFieldJSON :: String -> Q [Dec]
derivePersistFieldJSON s = do
ss <- [|SqlString|]
tpv <- [|PersistText . toJsonText|]
fpv <- [|\dt v -> do
text <- fromPersistValue v
let bs' = TE.encodeUtf8 text
case eitherDecodeStrict' bs' of
Left e -> Left $ pack "JSON decoding error for " ++ pack dt ++ pack ": " ++ pack e ++ pack ". On Input: " ++ decodeUtf8 bs'
Right x -> Right x|]
return
[ persistFieldInstanceD False (ConT $ mkName s)
[ FunD 'toPersistValue
[ normalClause [] tpv
]
, FunD 'fromPersistValue
[ normalClause [] (fpv `AppE` LitE (StringL s))
]
]
, persistFieldSqlInstanceD False (ConT $ mkName s)
[ sqlTypeFunD ss
]
]
-- | Creates a single function to perform all migrations for the entities
-- defined here. One thing to be aware of is dependencies: if you have entities
-- with foreign references, make sure to place those definitions after the
-- entities they reference.
mkMigrate :: String -> [EntityDef] -> Q [Dec]
mkMigrate fun allDefs = do
body' <- body
return
[ SigD (mkName fun) typ
, FunD (mkName fun) [normalClause [] body']
]
where
defs = filter isMigrated allDefs
isMigrated def = "no-migrate" `notElem` entityAttrs def
typ = ConT ''Migration
entityMap = constructEntityMap allDefs
body :: Q Exp
body =
case defs of
[] -> [|return ()|]
_ -> do
defsName <- newName "defs"
defsStmt <- do
defs' <- mapM (liftAndFixKeys entityMap) defs
let defsExp = ListE defs'
return $ LetS [ValD (VarP defsName) (NormalB defsExp) []]
stmts <- mapM (toStmt $ VarE defsName) defs
return (DoE $ defsStmt : stmts)
toStmt :: Exp -> EntityDef -> Q Stmt
toStmt defsExp ed = do
u <- liftAndFixKeys entityMap ed
m <- [|migrate|]
return $ NoBindS $ m `AppE` defsExp `AppE` u
liftAndFixKeys :: EntityMap -> EntityDef -> Q Exp
liftAndFixKeys entityMap EntityDef{..} =
[|EntityDef
entityHaskell
entityDB
entityId
entityAttrs
$(ListE <$> mapM (liftAndFixKey entityMap) entityFields)
entityUniques
entityForeigns
entityDerives
entityExtra
entitySum
entityComments
|]
liftAndFixKey :: EntityMap -> FieldDef -> Q Exp
liftAndFixKey entityMap (FieldDef a b c sqlTyp e f fieldRef mcomments) =
[|FieldDef a b c $(sqlTyp') e f fieldRef' mcomments|]
where
(fieldRef', sqlTyp') = fromMaybe (fieldRef, lift sqlTyp) $
case fieldRef of
ForeignRef refName _ft -> case M.lookup refName entityMap of
Nothing -> Nothing
Just ent ->
case fieldReference $ entityId ent of
fr@(ForeignRef _Name ft) -> Just (fr, lift $ SqlTypeExp ft)
_ -> Nothing
_ -> Nothing
instance Lift EntityDef where
lift EntityDef{..} =
[|EntityDef
entityHaskell
entityDB
entityId
entityAttrs
entityFields
entityUniques
entityForeigns
entityDerives
entityExtra
entitySum
entityComments
|]
instance Lift FieldDef where
lift (FieldDef a b c d e f g h) = [|FieldDef a b c d e f g h|]
instance Lift UniqueDef where
lift (UniqueDef a b c d) = [|UniqueDef a b c d|]
instance Lift CompositeDef where
lift (CompositeDef a b) = [|CompositeDef a b|]
instance Lift ForeignDef where
lift (ForeignDef a b c d e f g) = [|ForeignDef a b c d e f g|]
-- | A hack to avoid orphans.
class Lift' a where
lift' :: a -> Q Exp
instance Lift' Text where
lift' = liftT
instance Lift' a => Lift' [a] where
lift' xs = do { xs' <- mapM lift' xs; return (ListE xs') }
instance (Lift' k, Lift' v) => Lift' (M.Map k v) where
lift' m = [|M.fromList $(fmap ListE $ mapM liftPair $ M.toList m)|]
-- overlapping instances is for automatic lifting
-- while avoiding an orphan of Lift for Text
-- auto-lifting, means instances are overlapping
instance {-# OVERLAPPABLE #-} Lift' a => Lift a where
lift = lift'
liftT :: Text -> Q Exp
liftT t = [|pack $(lift (unpack t))|]
liftPair :: (Lift' k, Lift' v) => (k, v) -> Q Exp
liftPair (k, v) = [|($(lift' k), $(lift' v))|]
instance Lift HaskellName where
lift (HaskellName t) = [|HaskellName t|]
instance Lift DBName where
lift (DBName t) = [|DBName t|]
instance Lift FieldType where
lift (FTTypeCon Nothing t) = [|FTTypeCon Nothing t|]
lift (FTTypeCon (Just x) t) = [|FTTypeCon (Just x) t|]
lift (FTApp x y) = [|FTApp x y|]
lift (FTList x) = [|FTList x|]
instance Lift PersistFilter where
lift Eq = [|Eq|]
lift Ne = [|Ne|]
lift Gt = [|Gt|]
lift Lt = [|Lt|]
lift Ge = [|Ge|]
lift Le = [|Le|]
lift In = [|In|]
lift NotIn = [|NotIn|]
lift (BackendSpecificFilter x) = [|BackendSpecificFilter x|]
instance Lift PersistUpdate where
lift Assign = [|Assign|]
lift Add = [|Add|]
lift Subtract = [|Subtract|]
lift Multiply = [|Multiply|]
lift Divide = [|Divide|]
lift (BackendSpecificUpdate x) = [|BackendSpecificUpdate x|]
instance Lift SqlType where
lift SqlString = [|SqlString|]
lift SqlInt32 = [|SqlInt32|]
lift SqlInt64 = [|SqlInt64|]
lift SqlReal = [|SqlReal|]
lift (SqlNumeric x y) =
[|SqlNumeric (fromInteger x') (fromInteger y')|]
where
x' = fromIntegral x :: Integer
y' = fromIntegral y :: Integer
lift SqlBool = [|SqlBool|]
lift SqlDay = [|SqlDay|]
lift SqlTime = [|SqlTime|]
lift SqlDayTime = [|SqlDayTime|]
lift SqlBlob = [|SqlBlob|]
lift (SqlOther a) = [|SqlOther a|]
-- Ent
-- fieldName FieldType
--
-- forall . typ ~ FieldType => EntFieldName
--
-- EntFieldName = FieldDef ....
mkField :: MkPersistSettings -> EntityDef -> FieldDef -> Q (Con, Clause)
mkField mps et cd = do
let con = ForallC
[]
[mkEqualP (VarT $ mkName "typ") $ maybeIdType mps cd Nothing Nothing]
$ NormalC name []
bod <- lift cd
let cla = normalClause
[ConP name []]
bod
return (con, cla)
where
name = filterConName mps et cd
maybeNullable :: FieldDef -> Bool
maybeNullable fd = nullable (fieldAttrs fd) == Nullable ByMaybeAttr
filterConName :: MkPersistSettings
-> EntityDef
-> FieldDef
-> Name
filterConName mps entity field = filterConName' mps (entityHaskell entity) (fieldHaskell field)
filterConName' :: MkPersistSettings
-> HaskellName -- ^ table
-> HaskellName -- ^ field
-> Name
filterConName' mps entity field = mkName $ unpack $ concat
[ if mpsPrefixFields mps || field == HaskellName "Id"
then unHaskellName entity
else ""
, upperFirst $ unHaskellName field
]
ftToType :: FieldType -> Type
ftToType (FTTypeCon Nothing t) = ConT $ mkName $ unpack t
-- This type is generated from the Quasi-Quoter.
-- Adding this special case avoids users needing to import Data.Int
ftToType (FTTypeCon (Just "Data.Int") "Int64") = ConT ''Int64
ftToType (FTTypeCon (Just m) t) = ConT $ mkName $ unpack $ concat [m, ".", t]
ftToType (FTApp x y) = ftToType x `AppT` ftToType y
ftToType (FTList x) = ListT `AppT` ftToType x
infixr 5 ++
(++) :: Text -> Text -> Text
(++) = append
mkJSON :: MkPersistSettings -> EntityDef -> Q [Dec]
mkJSON _ def | ("json" `notElem` entityAttrs def) = return []
mkJSON mps def = do
pureE <- [|pure|]
apE' <- [|(<*>)|]
packE <- [|pack|]
dotEqualE <- [|(.=)|]
dotColonE <- [|(.:)|]
dotColonQE <- [|(.:?)|]
objectE <- [|object|]
obj <- newName "obj"
mzeroE <- [|mzero|]
xs <- mapM (newName . unpack . unHaskellNameForJSON . fieldHaskell)
$ entityFields def
let conName = mkName $ unpack $ unHaskellName $ entityHaskell def
typ = genericDataType mps (entityHaskell def) backendT
toJSONI = typeInstanceD ''ToJSON (mpsGeneric mps) typ [toJSON']
toJSON' = FunD 'toJSON $ return $ normalClause
[ConP conName $ map VarP xs]
(objectE `AppE` ListE pairs)
pairs = zipWith toPair (entityFields def) xs
toPair f x = InfixE
(Just (packE `AppE` LitE (StringL $ unpack $ unHaskellName $ fieldHaskell f)))
dotEqualE
(Just $ VarE x)
fromJSONI = typeInstanceD ''FromJSON (mpsGeneric mps) typ [parseJSON']
parseJSON' = FunD 'parseJSON
[ normalClause [ConP 'Object [VarP obj]]
(foldl'
(\x y -> InfixE (Just x) apE' (Just y))
(pureE `AppE` ConE conName)
pulls
)
, normalClause [WildP] mzeroE
]
pulls = map toPull $ entityFields def
toPull f = InfixE
(Just $ VarE obj)
(if maybeNullable f then dotColonQE else dotColonE)
(Just $ AppE packE $ LitE $ StringL $ unpack $ unHaskellName $ fieldHaskell f)
case mpsEntityJSON mps of
Nothing -> return [toJSONI, fromJSONI]
Just entityJSON -> do
entityJSONIs <- if mpsGeneric mps
then [d|
instance PersistStore $(pure backendT) => ToJSON (Entity $(pure typ)) where
toJSON = $(varE (entityToJSON entityJSON))
instance PersistStore $(pure backendT) => FromJSON (Entity $(pure typ)) where
parseJSON = $(varE (entityFromJSON entityJSON))
|]
else [d|
instance ToJSON (Entity $(pure typ)) where
toJSON = $(varE (entityToJSON entityJSON))
instance FromJSON (Entity $(pure typ)) where
parseJSON = $(varE (entityFromJSON entityJSON))
|]
return $ toJSONI : fromJSONI : entityJSONIs
mkClassP :: Name -> [Type] -> Pred
mkClassP cla tys = foldl AppT (ConT cla) tys
mkEqualP :: Type -> Type -> Pred
mkEqualP tleft tright = foldl AppT EqualityT [tleft, tright]
notStrict :: Bang
notStrict = Bang NoSourceUnpackedness NoSourceStrictness
isStrict :: Bang
isStrict = Bang NoSourceUnpackedness SourceStrict
instanceD :: Cxt -> Type -> [Dec] -> Dec
instanceD = InstanceD Nothing
-- entityUpdates :: EntityDef -> [(HaskellName, FieldType, IsNullable, PersistUpdate)]
-- entityUpdates =
-- concatMap go . entityFields
-- where
-- go FieldDef {..} = map (\a -> (fieldHaskell, fieldType, nullable fieldAttrs, a)) [minBound..maxBound]
-- mkToUpdate :: String -> [(String, PersistUpdate)] -> Q Dec
-- mkToUpdate name pairs = do
-- pairs' <- mapM go pairs
-- return $ FunD (mkName name) $ degen pairs'
-- where
-- go (constr, pu) = do
-- pu' <- lift pu
-- return $ normalClause [RecP (mkName constr) []] pu'
-- mkToFieldName :: String -> [(String, String)] -> Dec
-- mkToFieldName func pairs =
-- FunD (mkName func) $ degen $ map go pairs
-- where
-- go (constr, name) =
-- normalClause [RecP (mkName constr) []] (LitE $ StringL name)
-- mkToValue :: String -> [String] -> Dec
-- mkToValue func = FunD (mkName func) . degen . map go
-- where
-- go constr =
-- let x = mkName "x"
-- in normalClause [ConP (mkName constr) [VarP x]]
-- (VarE 'toPersistValue `AppE` VarE x)
|
gbwey/persistent
|
persistent-template/Database/Persist/TH.hs
|
mit
| 69,073
| 0
| 23
| 20,156
| 16,423
| 8,628
| 7,795
| -1
| -1
|
module Y2018.M11.D05.Solution where
import Control.Arrow ((***))
import Data.Array
import Data.Set (Set)
import qualified Data.Set as Set
{--
This is from @fermatslibrary tweet on calculator-rectangular numbers:
https://twitter.com/fermatslibrary/status/1052544515632959488
It is put forward for every 'rectangular number' from the number-grid:
7 8 9
4 5 6
1 2 3
is divisible by 11.
Let's prove that intuitionistically.
First off, we have to enumerate the rectangular numbers. If we pose that these
numbers are lying on some cartesian plane, then we have a set of four numbers
from the same four digits of the upper left quadrant:
7 -> 8 -> 5 -> 4 ->> 7854
8 -> 5 -> 4 -> 7 ->> 8547
5 -> 4 -> 7 -> 8 ->> 5478 and
4 -> 7 -> 8 -> 5 ->> 4785
but, importantly, not the numbers 7548 nor its variants.
So it's rectangles, not cross-stitch.
So, first off: come up with a representation of the domain from which you can
construct these rectangular numbers.
--}
type Digit = Int
type Index = Digit
type Domain = Array (Index,Index) Digit
calculator :: Domain
calculator = listArray ((1,1),(3,3)) [7,8,9,4,5,6,1,2,3]
-- okay, that was too easy. I love Haskell!
-- now from that domain, enumerate all the rectanglur numbers
type RectangularNumber = (Digit, Digit, Digit, Digit)
allRectangularNumbers :: Domain -> Set RectangularNumber
allRectangularNumbers calc =
let idx = [1,2]
s = succ
adder = listArray (1,2) [[s, s . s], [s]] in
Set.fromList [rect | x <- idx, y <- idx, ax <- adder ! x, ay <- adder ! y,
rect <- chooseFrom calc x y ax ay]
chooseFrom :: Domain -> Index -> Index -> (Index -> Index) -> (Index -> Index)
-> [RectangularNumber]
chooseFrom calc x y sx sy =
[(calc ! (x,y), calc ! (sx x,y), calc ! (sx x, sy y), calc ! (x,sy y)),
(calc ! (sx x,y), calc ! (sx x, sy y), calc ! (x,sy y), calc ! (x,y)),
(calc ! (sx x, sy y), calc ! (x,sy y), calc ! (x,y), calc ! (sx x,y)),
(calc ! (x,sy y), calc ! (x,y), calc ! (sx x,y), calc ! (sx x, sy y)),
-- that's one direction, counter-clockwise, now let's go the other direction
(calc ! (x,y),calc ! (x, sy y), calc ! (sx x, sy y), calc ! (sx x, y)),
(calc ! (x, sy y), calc ! (sx x, sy y), calc ! (sx x, y), calc ! (x,y)),
(calc ! (sx x, sy y), calc ! (sx x, y), calc ! (x,y), calc ! (x,sy y)),
(calc ! (sx x, y), calc ! (x,y), calc ! (x,sy y), calc ! (sx x, sy y))]
-- How many rectangular numbers are there?
{--
>>> calc = allRectangularNumbers calculator
>>> length calc
72
--}
-- are the following rectangular numbers in that set?
inRectangularSet :: Set RectangularNumber -> [RectangularNumber] -> Bool
inRectangularSet rects = all (`Set.member` rects)
-- use the following rectangular numbers as a sample set:
samples :: [RectangularNumber]
samples = [(5,6,3,2),(2,8,7,1)]
{--
>>> inRectangularSet calc samples
True
--}
-- Next, partition the rectangular numbers into two heaps: one that has numbers
-- divisible by 11 and the other that has numbers that are not divisible by 11.
partitionBy11 :: Set RectangularNumber -> (Set RectangularNumber, Set RectangularNumber)
partitionBy11 = Set.partition ((== 0) . (`mod` 11) . rect2Int)
-- What are the sizes of the two sets?
-- You may wish to have a function that converts a RectangularNumber to an Int
rect2Int :: RectangularNumber -> Int
rect2Int (a,b,c,d) = a * 1000 + b * 100 + c * 10 + d
{--
>>> (length *** length) (partitionBy11 calc)
(72,0)
All the rectangular numbers are divisible by 11. Q.E.D.
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2018/M11/D05/Solution.hs
|
mit
| 3,567
| 0
| 12
| 774
| 1,091
| 631
| 460
| 37
| 1
|
module Vm.Ecma where
import Ecma.Prims
import Data.Word
import Data.Int
import Vm.Def
import qualified Data.HashTable.IO as H
import qualified MonadLib as ML
{-
data VmRt = VmRt_Undefined
| VmRt_Null
| VmRt_Boolean Bool
| VmRt_Int Int32
| VmRt_Uint Word32
| VmRt_Number Double
| VmRt_String String
| VmRt_Object VmObject
deriving (Show)-}
-- 9
{-instance Coerce VmRt where
to_boolean VmRt_Undefined = False
to_boolean VmRt_Null = False
to_boolean (VmRt_Boolean a) = a
to_boolean (VmRt_Int a) = a /= 0
to_boolean (VmRt_Uint a) = a /= 0
to_boolean (VmRt_Number a)
| a == 0 = False
| isNaN a = False
| otherwise = True
to_boolean (VmRt_String a) = length a > 0
to_boolean (VmRt_Object _) = True
to_number VmRt_Undefined = nan
to_number VmRt_Null = 0
to_number (VmRt_Boolean True) = 1
to_number (VmRt_Boolean False) = 0
to_number (VmRt_Int a) = fromIntegral a
to_number (VmRt_Uint a) = fromIntegral a
to_number (VmRt_Number a) = a
to_number (VmRt_String a) = read a
to_number (VmRt_Object _) = undefined
to_string VmRt_Undefined = "undefined"
to_string VmRt_Null = "null"
to_string (VmRt_Boolean True) = "true"
to_string (VmRt_Boolean False) = "false"
to_string (VmRt_Int a) = show a
to_string (VmRt_Uint a) = show a
to_string (VmRt_Number a) = show a
to_string (VmRt_String a) = a
to_string (VmRt_Object _) = "[Object]"-}
|
phylake/avm3
|
vm/ecma.hs
|
mit
| 1,508
| 0
| 4
| 393
| 44
| 31
| 13
| 7
| 0
|
module Text.Enhask.CoreSpec where
import Test.Hspec
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec discovery.
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "foo"
$ it "42 == 42"
$ 42 `shouldBe` 42
|
astynax/enhask
|
test/Text/Enhask/CoreSpec.hs
|
mit
| 285
| 0
| 8
| 64
| 62
| 35
| 27
| 9
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift Compiler (0.9.0) --
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE YOU KNOW WHAT YOU ARE DOING --
-----------------------------------------------------------------
module Database.HBase.Internal.Thrift2.THBaseService where
import Prelude ( Bool(..), Enum, Double, String, Maybe(..),
Eq, Show, Ord,
return, length, IO, fromIntegral, fromEnum, toEnum,
(.), (&&), (||), (==), (++), ($), (-) )
import Control.Exception
import Data.ByteString.Lazy
import Data.Hashable
import Data.Int
import Data.Text.Lazy ( Text )
import qualified Data.Text.Lazy as TL
import Data.Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import Thrift
import Thrift.Types ()
import Database.HBase.Internal.Thrift2.HBase_Types
import qualified Database.HBase.Internal.Thrift2.THBaseService_Iface as Iface
-- HELPER FUNCTIONS AND STRUCTURES --
data Exists_args = Exists_args{f_Exists_args_table :: Maybe ByteString,f_Exists_args_get :: Maybe TGet} deriving (Show,Eq,Typeable)
instance Hashable Exists_args where
hashWithSalt salt record = salt `hashWithSalt` f_Exists_args_table record `hashWithSalt` f_Exists_args_get record
write_Exists_args oprot record = do
writeStructBegin oprot "Exists_args"
case f_Exists_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_Exists_args_get record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("get",T_STRUCT,2)
write_TGet oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Exists_args_fields iprot record = do
(_,_t150,_id151) <- readFieldBegin iprot
if _t150 == T_STOP then return record else
case _id151 of
1 -> if _t150 == T_STRING then do
s <- readBinary iprot
read_Exists_args_fields iprot record{f_Exists_args_table=Just s}
else do
skip iprot _t150
read_Exists_args_fields iprot record
2 -> if _t150 == T_STRUCT then do
s <- (read_TGet iprot)
read_Exists_args_fields iprot record{f_Exists_args_get=Just s}
else do
skip iprot _t150
read_Exists_args_fields iprot record
_ -> do
skip iprot _t150
readFieldEnd iprot
read_Exists_args_fields iprot record
read_Exists_args iprot = do
_ <- readStructBegin iprot
record <- read_Exists_args_fields iprot (Exists_args{f_Exists_args_table=Nothing,f_Exists_args_get=Nothing})
readStructEnd iprot
return record
data Exists_result = Exists_result{f_Exists_result_success :: Maybe Bool,f_Exists_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable Exists_result where
hashWithSalt salt record = salt `hashWithSalt` f_Exists_result_success record `hashWithSalt` f_Exists_result_io record
write_Exists_result oprot record = do
writeStructBegin oprot "Exists_result"
case f_Exists_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_BOOL,0)
writeBool oprot _v
writeFieldEnd oprot}
case f_Exists_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Exists_result_fields iprot record = do
(_,_t155,_id156) <- readFieldBegin iprot
if _t155 == T_STOP then return record else
case _id156 of
0 -> if _t155 == T_BOOL then do
s <- readBool iprot
read_Exists_result_fields iprot record{f_Exists_result_success=Just s}
else do
skip iprot _t155
read_Exists_result_fields iprot record
1 -> if _t155 == T_STRUCT then do
s <- (read_TIOError iprot)
read_Exists_result_fields iprot record{f_Exists_result_io=Just s}
else do
skip iprot _t155
read_Exists_result_fields iprot record
_ -> do
skip iprot _t155
readFieldEnd iprot
read_Exists_result_fields iprot record
read_Exists_result iprot = do
_ <- readStructBegin iprot
record <- read_Exists_result_fields iprot (Exists_result{f_Exists_result_success=Nothing,f_Exists_result_io=Nothing})
readStructEnd iprot
return record
data Get_args = Get_args{f_Get_args_table :: Maybe ByteString,f_Get_args_get :: Maybe TGet} deriving (Show,Eq,Typeable)
instance Hashable Get_args where
hashWithSalt salt record = salt `hashWithSalt` f_Get_args_table record `hashWithSalt` f_Get_args_get record
write_Get_args oprot record = do
writeStructBegin oprot "Get_args"
case f_Get_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_Get_args_get record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("get",T_STRUCT,2)
write_TGet oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Get_args_fields iprot record = do
(_,_t160,_id161) <- readFieldBegin iprot
if _t160 == T_STOP then return record else
case _id161 of
1 -> if _t160 == T_STRING then do
s <- readBinary iprot
read_Get_args_fields iprot record{f_Get_args_table=Just s}
else do
skip iprot _t160
read_Get_args_fields iprot record
2 -> if _t160 == T_STRUCT then do
s <- (read_TGet iprot)
read_Get_args_fields iprot record{f_Get_args_get=Just s}
else do
skip iprot _t160
read_Get_args_fields iprot record
_ -> do
skip iprot _t160
readFieldEnd iprot
read_Get_args_fields iprot record
read_Get_args iprot = do
_ <- readStructBegin iprot
record <- read_Get_args_fields iprot (Get_args{f_Get_args_table=Nothing,f_Get_args_get=Nothing})
readStructEnd iprot
return record
data Get_result = Get_result{f_Get_result_success :: Maybe TResult,f_Get_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable Get_result where
hashWithSalt salt record = salt `hashWithSalt` f_Get_result_success record `hashWithSalt` f_Get_result_io record
write_Get_result oprot record = do
writeStructBegin oprot "Get_result"
case f_Get_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_STRUCT,0)
write_TResult oprot _v
writeFieldEnd oprot}
case f_Get_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Get_result_fields iprot record = do
(_,_t165,_id166) <- readFieldBegin iprot
if _t165 == T_STOP then return record else
case _id166 of
0 -> if _t165 == T_STRUCT then do
s <- (read_TResult iprot)
read_Get_result_fields iprot record{f_Get_result_success=Just s}
else do
skip iprot _t165
read_Get_result_fields iprot record
1 -> if _t165 == T_STRUCT then do
s <- (read_TIOError iprot)
read_Get_result_fields iprot record{f_Get_result_io=Just s}
else do
skip iprot _t165
read_Get_result_fields iprot record
_ -> do
skip iprot _t165
readFieldEnd iprot
read_Get_result_fields iprot record
read_Get_result iprot = do
_ <- readStructBegin iprot
record <- read_Get_result_fields iprot (Get_result{f_Get_result_success=Nothing,f_Get_result_io=Nothing})
readStructEnd iprot
return record
data GetMultiple_args = GetMultiple_args{f_GetMultiple_args_table :: Maybe ByteString,f_GetMultiple_args_gets :: Maybe (Vector.Vector TGet)} deriving (Show,Eq,Typeable)
instance Hashable GetMultiple_args where
hashWithSalt salt record = salt `hashWithSalt` f_GetMultiple_args_table record `hashWithSalt` f_GetMultiple_args_gets record
write_GetMultiple_args oprot record = do
writeStructBegin oprot "GetMultiple_args"
case f_GetMultiple_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_GetMultiple_args_gets record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("gets",T_LIST,2)
(let f = Vector.mapM_ (\_viter169 -> write_TGet oprot _viter169) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetMultiple_args_fields iprot record = do
(_,_t171,_id172) <- readFieldBegin iprot
if _t171 == T_STOP then return record else
case _id172 of
1 -> if _t171 == T_STRING then do
s <- readBinary iprot
read_GetMultiple_args_fields iprot record{f_GetMultiple_args_table=Just s}
else do
skip iprot _t171
read_GetMultiple_args_fields iprot record
2 -> if _t171 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TGet iprot)) in do {(_etype176,_size173) <- readListBegin iprot; f _size173})
read_GetMultiple_args_fields iprot record{f_GetMultiple_args_gets=Just s}
else do
skip iprot _t171
read_GetMultiple_args_fields iprot record
_ -> do
skip iprot _t171
readFieldEnd iprot
read_GetMultiple_args_fields iprot record
read_GetMultiple_args iprot = do
_ <- readStructBegin iprot
record <- read_GetMultiple_args_fields iprot (GetMultiple_args{f_GetMultiple_args_table=Nothing,f_GetMultiple_args_gets=Nothing})
readStructEnd iprot
return record
data GetMultiple_result = GetMultiple_result{f_GetMultiple_result_success :: Maybe (Vector.Vector TResult),f_GetMultiple_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable GetMultiple_result where
hashWithSalt salt record = salt `hashWithSalt` f_GetMultiple_result_success record `hashWithSalt` f_GetMultiple_result_io record
write_GetMultiple_result oprot record = do
writeStructBegin oprot "GetMultiple_result"
case f_GetMultiple_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_LIST,0)
(let f = Vector.mapM_ (\_viter180 -> write_TResult oprot _viter180) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
case f_GetMultiple_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetMultiple_result_fields iprot record = do
(_,_t182,_id183) <- readFieldBegin iprot
if _t182 == T_STOP then return record else
case _id183 of
0 -> if _t182 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TResult iprot)) in do {(_etype187,_size184) <- readListBegin iprot; f _size184})
read_GetMultiple_result_fields iprot record{f_GetMultiple_result_success=Just s}
else do
skip iprot _t182
read_GetMultiple_result_fields iprot record
1 -> if _t182 == T_STRUCT then do
s <- (read_TIOError iprot)
read_GetMultiple_result_fields iprot record{f_GetMultiple_result_io=Just s}
else do
skip iprot _t182
read_GetMultiple_result_fields iprot record
_ -> do
skip iprot _t182
readFieldEnd iprot
read_GetMultiple_result_fields iprot record
read_GetMultiple_result iprot = do
_ <- readStructBegin iprot
record <- read_GetMultiple_result_fields iprot (GetMultiple_result{f_GetMultiple_result_success=Nothing,f_GetMultiple_result_io=Nothing})
readStructEnd iprot
return record
data Put_args = Put_args{f_Put_args_table :: Maybe ByteString,f_Put_args_put :: Maybe TPut} deriving (Show,Eq,Typeable)
instance Hashable Put_args where
hashWithSalt salt record = salt `hashWithSalt` f_Put_args_table record `hashWithSalt` f_Put_args_put record
write_Put_args oprot record = do
writeStructBegin oprot "Put_args"
case f_Put_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_Put_args_put record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("put",T_STRUCT,2)
write_TPut oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Put_args_fields iprot record = do
(_,_t192,_id193) <- readFieldBegin iprot
if _t192 == T_STOP then return record else
case _id193 of
1 -> if _t192 == T_STRING then do
s <- readBinary iprot
read_Put_args_fields iprot record{f_Put_args_table=Just s}
else do
skip iprot _t192
read_Put_args_fields iprot record
2 -> if _t192 == T_STRUCT then do
s <- (read_TPut iprot)
read_Put_args_fields iprot record{f_Put_args_put=Just s}
else do
skip iprot _t192
read_Put_args_fields iprot record
_ -> do
skip iprot _t192
readFieldEnd iprot
read_Put_args_fields iprot record
read_Put_args iprot = do
_ <- readStructBegin iprot
record <- read_Put_args_fields iprot (Put_args{f_Put_args_table=Nothing,f_Put_args_put=Nothing})
readStructEnd iprot
return record
data Put_result = Put_result{f_Put_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable Put_result where
hashWithSalt salt record = salt `hashWithSalt` f_Put_result_io record
write_Put_result oprot record = do
writeStructBegin oprot "Put_result"
case f_Put_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Put_result_fields iprot record = do
(_,_t197,_id198) <- readFieldBegin iprot
if _t197 == T_STOP then return record else
case _id198 of
1 -> if _t197 == T_STRUCT then do
s <- (read_TIOError iprot)
read_Put_result_fields iprot record{f_Put_result_io=Just s}
else do
skip iprot _t197
read_Put_result_fields iprot record
_ -> do
skip iprot _t197
readFieldEnd iprot
read_Put_result_fields iprot record
read_Put_result iprot = do
_ <- readStructBegin iprot
record <- read_Put_result_fields iprot (Put_result{f_Put_result_io=Nothing})
readStructEnd iprot
return record
data CheckAndPut_args = CheckAndPut_args{f_CheckAndPut_args_table :: Maybe ByteString,f_CheckAndPut_args_row :: Maybe ByteString,f_CheckAndPut_args_family :: Maybe ByteString,f_CheckAndPut_args_qualifier :: Maybe ByteString,f_CheckAndPut_args_value :: Maybe ByteString,f_CheckAndPut_args_put :: Maybe TPut} deriving (Show,Eq,Typeable)
instance Hashable CheckAndPut_args where
hashWithSalt salt record = salt `hashWithSalt` f_CheckAndPut_args_table record `hashWithSalt` f_CheckAndPut_args_row record `hashWithSalt` f_CheckAndPut_args_family record `hashWithSalt` f_CheckAndPut_args_qualifier record `hashWithSalt` f_CheckAndPut_args_value record `hashWithSalt` f_CheckAndPut_args_put record
write_CheckAndPut_args oprot record = do
writeStructBegin oprot "CheckAndPut_args"
case f_CheckAndPut_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_args_row record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("row",T_STRING,2)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_args_family record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("family",T_STRING,3)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_args_qualifier record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("qualifier",T_STRING,4)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_args_value record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("value",T_STRING,5)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_args_put record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("put",T_STRUCT,6)
write_TPut oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CheckAndPut_args_fields iprot record = do
(_,_t202,_id203) <- readFieldBegin iprot
if _t202 == T_STOP then return record else
case _id203 of
1 -> if _t202 == T_STRING then do
s <- readBinary iprot
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_table=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
2 -> if _t202 == T_STRING then do
s <- readBinary iprot
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_row=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
3 -> if _t202 == T_STRING then do
s <- readBinary iprot
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_family=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
4 -> if _t202 == T_STRING then do
s <- readBinary iprot
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_qualifier=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
5 -> if _t202 == T_STRING then do
s <- readBinary iprot
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_value=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
6 -> if _t202 == T_STRUCT then do
s <- (read_TPut iprot)
read_CheckAndPut_args_fields iprot record{f_CheckAndPut_args_put=Just s}
else do
skip iprot _t202
read_CheckAndPut_args_fields iprot record
_ -> do
skip iprot _t202
readFieldEnd iprot
read_CheckAndPut_args_fields iprot record
read_CheckAndPut_args iprot = do
_ <- readStructBegin iprot
record <- read_CheckAndPut_args_fields iprot (CheckAndPut_args{f_CheckAndPut_args_table=Nothing,f_CheckAndPut_args_row=Nothing,f_CheckAndPut_args_family=Nothing,f_CheckAndPut_args_qualifier=Nothing,f_CheckAndPut_args_value=Nothing,f_CheckAndPut_args_put=Nothing})
readStructEnd iprot
return record
data CheckAndPut_result = CheckAndPut_result{f_CheckAndPut_result_success :: Maybe Bool,f_CheckAndPut_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable CheckAndPut_result where
hashWithSalt salt record = salt `hashWithSalt` f_CheckAndPut_result_success record `hashWithSalt` f_CheckAndPut_result_io record
write_CheckAndPut_result oprot record = do
writeStructBegin oprot "CheckAndPut_result"
case f_CheckAndPut_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_BOOL,0)
writeBool oprot _v
writeFieldEnd oprot}
case f_CheckAndPut_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CheckAndPut_result_fields iprot record = do
(_,_t207,_id208) <- readFieldBegin iprot
if _t207 == T_STOP then return record else
case _id208 of
0 -> if _t207 == T_BOOL then do
s <- readBool iprot
read_CheckAndPut_result_fields iprot record{f_CheckAndPut_result_success=Just s}
else do
skip iprot _t207
read_CheckAndPut_result_fields iprot record
1 -> if _t207 == T_STRUCT then do
s <- (read_TIOError iprot)
read_CheckAndPut_result_fields iprot record{f_CheckAndPut_result_io=Just s}
else do
skip iprot _t207
read_CheckAndPut_result_fields iprot record
_ -> do
skip iprot _t207
readFieldEnd iprot
read_CheckAndPut_result_fields iprot record
read_CheckAndPut_result iprot = do
_ <- readStructBegin iprot
record <- read_CheckAndPut_result_fields iprot (CheckAndPut_result{f_CheckAndPut_result_success=Nothing,f_CheckAndPut_result_io=Nothing})
readStructEnd iprot
return record
data PutMultiple_args = PutMultiple_args{f_PutMultiple_args_table :: Maybe ByteString,f_PutMultiple_args_puts :: Maybe (Vector.Vector TPut)} deriving (Show,Eq,Typeable)
instance Hashable PutMultiple_args where
hashWithSalt salt record = salt `hashWithSalt` f_PutMultiple_args_table record `hashWithSalt` f_PutMultiple_args_puts record
write_PutMultiple_args oprot record = do
writeStructBegin oprot "PutMultiple_args"
case f_PutMultiple_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_PutMultiple_args_puts record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("puts",T_LIST,2)
(let f = Vector.mapM_ (\_viter211 -> write_TPut oprot _viter211) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_PutMultiple_args_fields iprot record = do
(_,_t213,_id214) <- readFieldBegin iprot
if _t213 == T_STOP then return record else
case _id214 of
1 -> if _t213 == T_STRING then do
s <- readBinary iprot
read_PutMultiple_args_fields iprot record{f_PutMultiple_args_table=Just s}
else do
skip iprot _t213
read_PutMultiple_args_fields iprot record
2 -> if _t213 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TPut iprot)) in do {(_etype218,_size215) <- readListBegin iprot; f _size215})
read_PutMultiple_args_fields iprot record{f_PutMultiple_args_puts=Just s}
else do
skip iprot _t213
read_PutMultiple_args_fields iprot record
_ -> do
skip iprot _t213
readFieldEnd iprot
read_PutMultiple_args_fields iprot record
read_PutMultiple_args iprot = do
_ <- readStructBegin iprot
record <- read_PutMultiple_args_fields iprot (PutMultiple_args{f_PutMultiple_args_table=Nothing,f_PutMultiple_args_puts=Nothing})
readStructEnd iprot
return record
data PutMultiple_result = PutMultiple_result{f_PutMultiple_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable PutMultiple_result where
hashWithSalt salt record = salt `hashWithSalt` f_PutMultiple_result_io record
write_PutMultiple_result oprot record = do
writeStructBegin oprot "PutMultiple_result"
case f_PutMultiple_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_PutMultiple_result_fields iprot record = do
(_,_t223,_id224) <- readFieldBegin iprot
if _t223 == T_STOP then return record else
case _id224 of
1 -> if _t223 == T_STRUCT then do
s <- (read_TIOError iprot)
read_PutMultiple_result_fields iprot record{f_PutMultiple_result_io=Just s}
else do
skip iprot _t223
read_PutMultiple_result_fields iprot record
_ -> do
skip iprot _t223
readFieldEnd iprot
read_PutMultiple_result_fields iprot record
read_PutMultiple_result iprot = do
_ <- readStructBegin iprot
record <- read_PutMultiple_result_fields iprot (PutMultiple_result{f_PutMultiple_result_io=Nothing})
readStructEnd iprot
return record
data DeleteSingle_args = DeleteSingle_args{f_DeleteSingle_args_table :: Maybe ByteString,f_DeleteSingle_args_deleteSingle :: Maybe TDelete} deriving (Show,Eq,Typeable)
instance Hashable DeleteSingle_args where
hashWithSalt salt record = salt `hashWithSalt` f_DeleteSingle_args_table record `hashWithSalt` f_DeleteSingle_args_deleteSingle record
write_DeleteSingle_args oprot record = do
writeStructBegin oprot "DeleteSingle_args"
case f_DeleteSingle_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_DeleteSingle_args_deleteSingle record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("deleteSingle",T_STRUCT,2)
write_TDelete oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_DeleteSingle_args_fields iprot record = do
(_,_t228,_id229) <- readFieldBegin iprot
if _t228 == T_STOP then return record else
case _id229 of
1 -> if _t228 == T_STRING then do
s <- readBinary iprot
read_DeleteSingle_args_fields iprot record{f_DeleteSingle_args_table=Just s}
else do
skip iprot _t228
read_DeleteSingle_args_fields iprot record
2 -> if _t228 == T_STRUCT then do
s <- (read_TDelete iprot)
read_DeleteSingle_args_fields iprot record{f_DeleteSingle_args_deleteSingle=Just s}
else do
skip iprot _t228
read_DeleteSingle_args_fields iprot record
_ -> do
skip iprot _t228
readFieldEnd iprot
read_DeleteSingle_args_fields iprot record
read_DeleteSingle_args iprot = do
_ <- readStructBegin iprot
record <- read_DeleteSingle_args_fields iprot (DeleteSingle_args{f_DeleteSingle_args_table=Nothing,f_DeleteSingle_args_deleteSingle=Nothing})
readStructEnd iprot
return record
data DeleteSingle_result = DeleteSingle_result{f_DeleteSingle_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable DeleteSingle_result where
hashWithSalt salt record = salt `hashWithSalt` f_DeleteSingle_result_io record
write_DeleteSingle_result oprot record = do
writeStructBegin oprot "DeleteSingle_result"
case f_DeleteSingle_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_DeleteSingle_result_fields iprot record = do
(_,_t233,_id234) <- readFieldBegin iprot
if _t233 == T_STOP then return record else
case _id234 of
1 -> if _t233 == T_STRUCT then do
s <- (read_TIOError iprot)
read_DeleteSingle_result_fields iprot record{f_DeleteSingle_result_io=Just s}
else do
skip iprot _t233
read_DeleteSingle_result_fields iprot record
_ -> do
skip iprot _t233
readFieldEnd iprot
read_DeleteSingle_result_fields iprot record
read_DeleteSingle_result iprot = do
_ <- readStructBegin iprot
record <- read_DeleteSingle_result_fields iprot (DeleteSingle_result{f_DeleteSingle_result_io=Nothing})
readStructEnd iprot
return record
data DeleteMultiple_args = DeleteMultiple_args{f_DeleteMultiple_args_table :: Maybe ByteString,f_DeleteMultiple_args_deletes :: Maybe (Vector.Vector TDelete)} deriving (Show,Eq,Typeable)
instance Hashable DeleteMultiple_args where
hashWithSalt salt record = salt `hashWithSalt` f_DeleteMultiple_args_table record `hashWithSalt` f_DeleteMultiple_args_deletes record
write_DeleteMultiple_args oprot record = do
writeStructBegin oprot "DeleteMultiple_args"
case f_DeleteMultiple_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_DeleteMultiple_args_deletes record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("deletes",T_LIST,2)
(let f = Vector.mapM_ (\_viter237 -> write_TDelete oprot _viter237) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_DeleteMultiple_args_fields iprot record = do
(_,_t239,_id240) <- readFieldBegin iprot
if _t239 == T_STOP then return record else
case _id240 of
1 -> if _t239 == T_STRING then do
s <- readBinary iprot
read_DeleteMultiple_args_fields iprot record{f_DeleteMultiple_args_table=Just s}
else do
skip iprot _t239
read_DeleteMultiple_args_fields iprot record
2 -> if _t239 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TDelete iprot)) in do {(_etype244,_size241) <- readListBegin iprot; f _size241})
read_DeleteMultiple_args_fields iprot record{f_DeleteMultiple_args_deletes=Just s}
else do
skip iprot _t239
read_DeleteMultiple_args_fields iprot record
_ -> do
skip iprot _t239
readFieldEnd iprot
read_DeleteMultiple_args_fields iprot record
read_DeleteMultiple_args iprot = do
_ <- readStructBegin iprot
record <- read_DeleteMultiple_args_fields iprot (DeleteMultiple_args{f_DeleteMultiple_args_table=Nothing,f_DeleteMultiple_args_deletes=Nothing})
readStructEnd iprot
return record
data DeleteMultiple_result = DeleteMultiple_result{f_DeleteMultiple_result_success :: Maybe (Vector.Vector TDelete),f_DeleteMultiple_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable DeleteMultiple_result where
hashWithSalt salt record = salt `hashWithSalt` f_DeleteMultiple_result_success record `hashWithSalt` f_DeleteMultiple_result_io record
write_DeleteMultiple_result oprot record = do
writeStructBegin oprot "DeleteMultiple_result"
case f_DeleteMultiple_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_LIST,0)
(let f = Vector.mapM_ (\_viter248 -> write_TDelete oprot _viter248) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
case f_DeleteMultiple_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_DeleteMultiple_result_fields iprot record = do
(_,_t250,_id251) <- readFieldBegin iprot
if _t250 == T_STOP then return record else
case _id251 of
0 -> if _t250 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TDelete iprot)) in do {(_etype255,_size252) <- readListBegin iprot; f _size252})
read_DeleteMultiple_result_fields iprot record{f_DeleteMultiple_result_success=Just s}
else do
skip iprot _t250
read_DeleteMultiple_result_fields iprot record
1 -> if _t250 == T_STRUCT then do
s <- (read_TIOError iprot)
read_DeleteMultiple_result_fields iprot record{f_DeleteMultiple_result_io=Just s}
else do
skip iprot _t250
read_DeleteMultiple_result_fields iprot record
_ -> do
skip iprot _t250
readFieldEnd iprot
read_DeleteMultiple_result_fields iprot record
read_DeleteMultiple_result iprot = do
_ <- readStructBegin iprot
record <- read_DeleteMultiple_result_fields iprot (DeleteMultiple_result{f_DeleteMultiple_result_success=Nothing,f_DeleteMultiple_result_io=Nothing})
readStructEnd iprot
return record
data CheckAndDelete_args = CheckAndDelete_args{f_CheckAndDelete_args_table :: Maybe ByteString,f_CheckAndDelete_args_row :: Maybe ByteString,f_CheckAndDelete_args_family :: Maybe ByteString,f_CheckAndDelete_args_qualifier :: Maybe ByteString,f_CheckAndDelete_args_value :: Maybe ByteString,f_CheckAndDelete_args_deleteSingle :: Maybe TDelete} deriving (Show,Eq,Typeable)
instance Hashable CheckAndDelete_args where
hashWithSalt salt record = salt `hashWithSalt` f_CheckAndDelete_args_table record `hashWithSalt` f_CheckAndDelete_args_row record `hashWithSalt` f_CheckAndDelete_args_family record `hashWithSalt` f_CheckAndDelete_args_qualifier record `hashWithSalt` f_CheckAndDelete_args_value record `hashWithSalt` f_CheckAndDelete_args_deleteSingle record
write_CheckAndDelete_args oprot record = do
writeStructBegin oprot "CheckAndDelete_args"
case f_CheckAndDelete_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_args_row record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("row",T_STRING,2)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_args_family record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("family",T_STRING,3)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_args_qualifier record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("qualifier",T_STRING,4)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_args_value record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("value",T_STRING,5)
writeBinary oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_args_deleteSingle record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("deleteSingle",T_STRUCT,6)
write_TDelete oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CheckAndDelete_args_fields iprot record = do
(_,_t260,_id261) <- readFieldBegin iprot
if _t260 == T_STOP then return record else
case _id261 of
1 -> if _t260 == T_STRING then do
s <- readBinary iprot
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_table=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
2 -> if _t260 == T_STRING then do
s <- readBinary iprot
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_row=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
3 -> if _t260 == T_STRING then do
s <- readBinary iprot
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_family=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
4 -> if _t260 == T_STRING then do
s <- readBinary iprot
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_qualifier=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
5 -> if _t260 == T_STRING then do
s <- readBinary iprot
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_value=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
6 -> if _t260 == T_STRUCT then do
s <- (read_TDelete iprot)
read_CheckAndDelete_args_fields iprot record{f_CheckAndDelete_args_deleteSingle=Just s}
else do
skip iprot _t260
read_CheckAndDelete_args_fields iprot record
_ -> do
skip iprot _t260
readFieldEnd iprot
read_CheckAndDelete_args_fields iprot record
read_CheckAndDelete_args iprot = do
_ <- readStructBegin iprot
record <- read_CheckAndDelete_args_fields iprot (CheckAndDelete_args{f_CheckAndDelete_args_table=Nothing,f_CheckAndDelete_args_row=Nothing,f_CheckAndDelete_args_family=Nothing,f_CheckAndDelete_args_qualifier=Nothing,f_CheckAndDelete_args_value=Nothing,f_CheckAndDelete_args_deleteSingle=Nothing})
readStructEnd iprot
return record
data CheckAndDelete_result = CheckAndDelete_result{f_CheckAndDelete_result_success :: Maybe Bool,f_CheckAndDelete_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable CheckAndDelete_result where
hashWithSalt salt record = salt `hashWithSalt` f_CheckAndDelete_result_success record `hashWithSalt` f_CheckAndDelete_result_io record
write_CheckAndDelete_result oprot record = do
writeStructBegin oprot "CheckAndDelete_result"
case f_CheckAndDelete_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_BOOL,0)
writeBool oprot _v
writeFieldEnd oprot}
case f_CheckAndDelete_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CheckAndDelete_result_fields iprot record = do
(_,_t265,_id266) <- readFieldBegin iprot
if _t265 == T_STOP then return record else
case _id266 of
0 -> if _t265 == T_BOOL then do
s <- readBool iprot
read_CheckAndDelete_result_fields iprot record{f_CheckAndDelete_result_success=Just s}
else do
skip iprot _t265
read_CheckAndDelete_result_fields iprot record
1 -> if _t265 == T_STRUCT then do
s <- (read_TIOError iprot)
read_CheckAndDelete_result_fields iprot record{f_CheckAndDelete_result_io=Just s}
else do
skip iprot _t265
read_CheckAndDelete_result_fields iprot record
_ -> do
skip iprot _t265
readFieldEnd iprot
read_CheckAndDelete_result_fields iprot record
read_CheckAndDelete_result iprot = do
_ <- readStructBegin iprot
record <- read_CheckAndDelete_result_fields iprot (CheckAndDelete_result{f_CheckAndDelete_result_success=Nothing,f_CheckAndDelete_result_io=Nothing})
readStructEnd iprot
return record
data Increment_args = Increment_args{f_Increment_args_table :: Maybe ByteString,f_Increment_args_increment :: Maybe TIncrement} deriving (Show,Eq,Typeable)
instance Hashable Increment_args where
hashWithSalt salt record = salt `hashWithSalt` f_Increment_args_table record `hashWithSalt` f_Increment_args_increment record
write_Increment_args oprot record = do
writeStructBegin oprot "Increment_args"
case f_Increment_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_Increment_args_increment record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("increment",T_STRUCT,2)
write_TIncrement oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Increment_args_fields iprot record = do
(_,_t270,_id271) <- readFieldBegin iprot
if _t270 == T_STOP then return record else
case _id271 of
1 -> if _t270 == T_STRING then do
s <- readBinary iprot
read_Increment_args_fields iprot record{f_Increment_args_table=Just s}
else do
skip iprot _t270
read_Increment_args_fields iprot record
2 -> if _t270 == T_STRUCT then do
s <- (read_TIncrement iprot)
read_Increment_args_fields iprot record{f_Increment_args_increment=Just s}
else do
skip iprot _t270
read_Increment_args_fields iprot record
_ -> do
skip iprot _t270
readFieldEnd iprot
read_Increment_args_fields iprot record
read_Increment_args iprot = do
_ <- readStructBegin iprot
record <- read_Increment_args_fields iprot (Increment_args{f_Increment_args_table=Nothing,f_Increment_args_increment=Nothing})
readStructEnd iprot
return record
data Increment_result = Increment_result{f_Increment_result_success :: Maybe TResult,f_Increment_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable Increment_result where
hashWithSalt salt record = salt `hashWithSalt` f_Increment_result_success record `hashWithSalt` f_Increment_result_io record
write_Increment_result oprot record = do
writeStructBegin oprot "Increment_result"
case f_Increment_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_STRUCT,0)
write_TResult oprot _v
writeFieldEnd oprot}
case f_Increment_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_Increment_result_fields iprot record = do
(_,_t275,_id276) <- readFieldBegin iprot
if _t275 == T_STOP then return record else
case _id276 of
0 -> if _t275 == T_STRUCT then do
s <- (read_TResult iprot)
read_Increment_result_fields iprot record{f_Increment_result_success=Just s}
else do
skip iprot _t275
read_Increment_result_fields iprot record
1 -> if _t275 == T_STRUCT then do
s <- (read_TIOError iprot)
read_Increment_result_fields iprot record{f_Increment_result_io=Just s}
else do
skip iprot _t275
read_Increment_result_fields iprot record
_ -> do
skip iprot _t275
readFieldEnd iprot
read_Increment_result_fields iprot record
read_Increment_result iprot = do
_ <- readStructBegin iprot
record <- read_Increment_result_fields iprot (Increment_result{f_Increment_result_success=Nothing,f_Increment_result_io=Nothing})
readStructEnd iprot
return record
data OpenScanner_args = OpenScanner_args{f_OpenScanner_args_table :: Maybe ByteString,f_OpenScanner_args_scan :: Maybe TScan} deriving (Show,Eq,Typeable)
instance Hashable OpenScanner_args where
hashWithSalt salt record = salt `hashWithSalt` f_OpenScanner_args_table record `hashWithSalt` f_OpenScanner_args_scan record
write_OpenScanner_args oprot record = do
writeStructBegin oprot "OpenScanner_args"
case f_OpenScanner_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_OpenScanner_args_scan record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("scan",T_STRUCT,2)
write_TScan oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_OpenScanner_args_fields iprot record = do
(_,_t280,_id281) <- readFieldBegin iprot
if _t280 == T_STOP then return record else
case _id281 of
1 -> if _t280 == T_STRING then do
s <- readBinary iprot
read_OpenScanner_args_fields iprot record{f_OpenScanner_args_table=Just s}
else do
skip iprot _t280
read_OpenScanner_args_fields iprot record
2 -> if _t280 == T_STRUCT then do
s <- (read_TScan iprot)
read_OpenScanner_args_fields iprot record{f_OpenScanner_args_scan=Just s}
else do
skip iprot _t280
read_OpenScanner_args_fields iprot record
_ -> do
skip iprot _t280
readFieldEnd iprot
read_OpenScanner_args_fields iprot record
read_OpenScanner_args iprot = do
_ <- readStructBegin iprot
record <- read_OpenScanner_args_fields iprot (OpenScanner_args{f_OpenScanner_args_table=Nothing,f_OpenScanner_args_scan=Nothing})
readStructEnd iprot
return record
data OpenScanner_result = OpenScanner_result{f_OpenScanner_result_success :: Maybe Int32,f_OpenScanner_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable OpenScanner_result where
hashWithSalt salt record = salt `hashWithSalt` f_OpenScanner_result_success record `hashWithSalt` f_OpenScanner_result_io record
write_OpenScanner_result oprot record = do
writeStructBegin oprot "OpenScanner_result"
case f_OpenScanner_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_I32,0)
writeI32 oprot _v
writeFieldEnd oprot}
case f_OpenScanner_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_OpenScanner_result_fields iprot record = do
(_,_t285,_id286) <- readFieldBegin iprot
if _t285 == T_STOP then return record else
case _id286 of
0 -> if _t285 == T_I32 then do
s <- readI32 iprot
read_OpenScanner_result_fields iprot record{f_OpenScanner_result_success=Just s}
else do
skip iprot _t285
read_OpenScanner_result_fields iprot record
1 -> if _t285 == T_STRUCT then do
s <- (read_TIOError iprot)
read_OpenScanner_result_fields iprot record{f_OpenScanner_result_io=Just s}
else do
skip iprot _t285
read_OpenScanner_result_fields iprot record
_ -> do
skip iprot _t285
readFieldEnd iprot
read_OpenScanner_result_fields iprot record
read_OpenScanner_result iprot = do
_ <- readStructBegin iprot
record <- read_OpenScanner_result_fields iprot (OpenScanner_result{f_OpenScanner_result_success=Nothing,f_OpenScanner_result_io=Nothing})
readStructEnd iprot
return record
data GetScannerRows_args = GetScannerRows_args{f_GetScannerRows_args_scannerId :: Maybe Int32,f_GetScannerRows_args_numRows :: Maybe Int32} deriving (Show,Eq,Typeable)
instance Hashable GetScannerRows_args where
hashWithSalt salt record = salt `hashWithSalt` f_GetScannerRows_args_scannerId record `hashWithSalt` f_GetScannerRows_args_numRows record
write_GetScannerRows_args oprot record = do
writeStructBegin oprot "GetScannerRows_args"
case f_GetScannerRows_args_scannerId record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("scannerId",T_I32,1)
writeI32 oprot _v
writeFieldEnd oprot}
case f_GetScannerRows_args_numRows record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("numRows",T_I32,2)
writeI32 oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetScannerRows_args_fields iprot record = do
(_,_t290,_id291) <- readFieldBegin iprot
if _t290 == T_STOP then return record else
case _id291 of
1 -> if _t290 == T_I32 then do
s <- readI32 iprot
read_GetScannerRows_args_fields iprot record{f_GetScannerRows_args_scannerId=Just s}
else do
skip iprot _t290
read_GetScannerRows_args_fields iprot record
2 -> if _t290 == T_I32 then do
s <- readI32 iprot
read_GetScannerRows_args_fields iprot record{f_GetScannerRows_args_numRows=Just s}
else do
skip iprot _t290
read_GetScannerRows_args_fields iprot record
_ -> do
skip iprot _t290
readFieldEnd iprot
read_GetScannerRows_args_fields iprot record
read_GetScannerRows_args iprot = do
_ <- readStructBegin iprot
record <- read_GetScannerRows_args_fields iprot (GetScannerRows_args{f_GetScannerRows_args_scannerId=Nothing,f_GetScannerRows_args_numRows=Nothing})
readStructEnd iprot
return record
data GetScannerRows_result = GetScannerRows_result{f_GetScannerRows_result_success :: Maybe (Vector.Vector TResult),f_GetScannerRows_result_io :: Maybe TIOError,f_GetScannerRows_result_ia :: Maybe TIllegalArgument} deriving (Show,Eq,Typeable)
instance Hashable GetScannerRows_result where
hashWithSalt salt record = salt `hashWithSalt` f_GetScannerRows_result_success record `hashWithSalt` f_GetScannerRows_result_io record `hashWithSalt` f_GetScannerRows_result_ia record
write_GetScannerRows_result oprot record = do
writeStructBegin oprot "GetScannerRows_result"
case f_GetScannerRows_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_LIST,0)
(let f = Vector.mapM_ (\_viter294 -> write_TResult oprot _viter294) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
case f_GetScannerRows_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
case f_GetScannerRows_result_ia record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("ia",T_STRUCT,2)
write_TIllegalArgument oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetScannerRows_result_fields iprot record = do
(_,_t296,_id297) <- readFieldBegin iprot
if _t296 == T_STOP then return record else
case _id297 of
0 -> if _t296 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TResult iprot)) in do {(_etype301,_size298) <- readListBegin iprot; f _size298})
read_GetScannerRows_result_fields iprot record{f_GetScannerRows_result_success=Just s}
else do
skip iprot _t296
read_GetScannerRows_result_fields iprot record
1 -> if _t296 == T_STRUCT then do
s <- (read_TIOError iprot)
read_GetScannerRows_result_fields iprot record{f_GetScannerRows_result_io=Just s}
else do
skip iprot _t296
read_GetScannerRows_result_fields iprot record
2 -> if _t296 == T_STRUCT then do
s <- (read_TIllegalArgument iprot)
read_GetScannerRows_result_fields iprot record{f_GetScannerRows_result_ia=Just s}
else do
skip iprot _t296
read_GetScannerRows_result_fields iprot record
_ -> do
skip iprot _t296
readFieldEnd iprot
read_GetScannerRows_result_fields iprot record
read_GetScannerRows_result iprot = do
_ <- readStructBegin iprot
record <- read_GetScannerRows_result_fields iprot (GetScannerRows_result{f_GetScannerRows_result_success=Nothing,f_GetScannerRows_result_io=Nothing,f_GetScannerRows_result_ia=Nothing})
readStructEnd iprot
return record
data CloseScanner_args = CloseScanner_args{f_CloseScanner_args_scannerId :: Maybe Int32} deriving (Show,Eq,Typeable)
instance Hashable CloseScanner_args where
hashWithSalt salt record = salt `hashWithSalt` f_CloseScanner_args_scannerId record
write_CloseScanner_args oprot record = do
writeStructBegin oprot "CloseScanner_args"
case f_CloseScanner_args_scannerId record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("scannerId",T_I32,1)
writeI32 oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CloseScanner_args_fields iprot record = do
(_,_t306,_id307) <- readFieldBegin iprot
if _t306 == T_STOP then return record else
case _id307 of
1 -> if _t306 == T_I32 then do
s <- readI32 iprot
read_CloseScanner_args_fields iprot record{f_CloseScanner_args_scannerId=Just s}
else do
skip iprot _t306
read_CloseScanner_args_fields iprot record
_ -> do
skip iprot _t306
readFieldEnd iprot
read_CloseScanner_args_fields iprot record
read_CloseScanner_args iprot = do
_ <- readStructBegin iprot
record <- read_CloseScanner_args_fields iprot (CloseScanner_args{f_CloseScanner_args_scannerId=Nothing})
readStructEnd iprot
return record
data CloseScanner_result = CloseScanner_result{f_CloseScanner_result_io :: Maybe TIOError,f_CloseScanner_result_ia :: Maybe TIllegalArgument} deriving (Show,Eq,Typeable)
instance Hashable CloseScanner_result where
hashWithSalt salt record = salt `hashWithSalt` f_CloseScanner_result_io record `hashWithSalt` f_CloseScanner_result_ia record
write_CloseScanner_result oprot record = do
writeStructBegin oprot "CloseScanner_result"
case f_CloseScanner_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
case f_CloseScanner_result_ia record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("ia",T_STRUCT,2)
write_TIllegalArgument oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_CloseScanner_result_fields iprot record = do
(_,_t311,_id312) <- readFieldBegin iprot
if _t311 == T_STOP then return record else
case _id312 of
1 -> if _t311 == T_STRUCT then do
s <- (read_TIOError iprot)
read_CloseScanner_result_fields iprot record{f_CloseScanner_result_io=Just s}
else do
skip iprot _t311
read_CloseScanner_result_fields iprot record
2 -> if _t311 == T_STRUCT then do
s <- (read_TIllegalArgument iprot)
read_CloseScanner_result_fields iprot record{f_CloseScanner_result_ia=Just s}
else do
skip iprot _t311
read_CloseScanner_result_fields iprot record
_ -> do
skip iprot _t311
readFieldEnd iprot
read_CloseScanner_result_fields iprot record
read_CloseScanner_result iprot = do
_ <- readStructBegin iprot
record <- read_CloseScanner_result_fields iprot (CloseScanner_result{f_CloseScanner_result_io=Nothing,f_CloseScanner_result_ia=Nothing})
readStructEnd iprot
return record
data MutateRow_args = MutateRow_args{f_MutateRow_args_table :: Maybe ByteString,f_MutateRow_args_rowMutations :: Maybe TRowMutations} deriving (Show,Eq,Typeable)
instance Hashable MutateRow_args where
hashWithSalt salt record = salt `hashWithSalt` f_MutateRow_args_table record `hashWithSalt` f_MutateRow_args_rowMutations record
write_MutateRow_args oprot record = do
writeStructBegin oprot "MutateRow_args"
case f_MutateRow_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_MutateRow_args_rowMutations record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("rowMutations",T_STRUCT,2)
write_TRowMutations oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_MutateRow_args_fields iprot record = do
(_,_t316,_id317) <- readFieldBegin iprot
if _t316 == T_STOP then return record else
case _id317 of
1 -> if _t316 == T_STRING then do
s <- readBinary iprot
read_MutateRow_args_fields iprot record{f_MutateRow_args_table=Just s}
else do
skip iprot _t316
read_MutateRow_args_fields iprot record
2 -> if _t316 == T_STRUCT then do
s <- (read_TRowMutations iprot)
read_MutateRow_args_fields iprot record{f_MutateRow_args_rowMutations=Just s}
else do
skip iprot _t316
read_MutateRow_args_fields iprot record
_ -> do
skip iprot _t316
readFieldEnd iprot
read_MutateRow_args_fields iprot record
read_MutateRow_args iprot = do
_ <- readStructBegin iprot
record <- read_MutateRow_args_fields iprot (MutateRow_args{f_MutateRow_args_table=Nothing,f_MutateRow_args_rowMutations=Nothing})
readStructEnd iprot
return record
data MutateRow_result = MutateRow_result{f_MutateRow_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable MutateRow_result where
hashWithSalt salt record = salt `hashWithSalt` f_MutateRow_result_io record
write_MutateRow_result oprot record = do
writeStructBegin oprot "MutateRow_result"
case f_MutateRow_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_MutateRow_result_fields iprot record = do
(_,_t321,_id322) <- readFieldBegin iprot
if _t321 == T_STOP then return record else
case _id322 of
1 -> if _t321 == T_STRUCT then do
s <- (read_TIOError iprot)
read_MutateRow_result_fields iprot record{f_MutateRow_result_io=Just s}
else do
skip iprot _t321
read_MutateRow_result_fields iprot record
_ -> do
skip iprot _t321
readFieldEnd iprot
read_MutateRow_result_fields iprot record
read_MutateRow_result iprot = do
_ <- readStructBegin iprot
record <- read_MutateRow_result_fields iprot (MutateRow_result{f_MutateRow_result_io=Nothing})
readStructEnd iprot
return record
data GetScannerResults_args = GetScannerResults_args{f_GetScannerResults_args_table :: Maybe ByteString,f_GetScannerResults_args_scan :: Maybe TScan,f_GetScannerResults_args_numRows :: Maybe Int32} deriving (Show,Eq,Typeable)
instance Hashable GetScannerResults_args where
hashWithSalt salt record = salt `hashWithSalt` f_GetScannerResults_args_table record `hashWithSalt` f_GetScannerResults_args_scan record `hashWithSalt` f_GetScannerResults_args_numRows record
write_GetScannerResults_args oprot record = do
writeStructBegin oprot "GetScannerResults_args"
case f_GetScannerResults_args_table record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("table",T_STRING,1)
writeBinary oprot _v
writeFieldEnd oprot}
case f_GetScannerResults_args_scan record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("scan",T_STRUCT,2)
write_TScan oprot _v
writeFieldEnd oprot}
case f_GetScannerResults_args_numRows record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("numRows",T_I32,3)
writeI32 oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetScannerResults_args_fields iprot record = do
(_,_t326,_id327) <- readFieldBegin iprot
if _t326 == T_STOP then return record else
case _id327 of
1 -> if _t326 == T_STRING then do
s <- readBinary iprot
read_GetScannerResults_args_fields iprot record{f_GetScannerResults_args_table=Just s}
else do
skip iprot _t326
read_GetScannerResults_args_fields iprot record
2 -> if _t326 == T_STRUCT then do
s <- (read_TScan iprot)
read_GetScannerResults_args_fields iprot record{f_GetScannerResults_args_scan=Just s}
else do
skip iprot _t326
read_GetScannerResults_args_fields iprot record
3 -> if _t326 == T_I32 then do
s <- readI32 iprot
read_GetScannerResults_args_fields iprot record{f_GetScannerResults_args_numRows=Just s}
else do
skip iprot _t326
read_GetScannerResults_args_fields iprot record
_ -> do
skip iprot _t326
readFieldEnd iprot
read_GetScannerResults_args_fields iprot record
read_GetScannerResults_args iprot = do
_ <- readStructBegin iprot
record <- read_GetScannerResults_args_fields iprot (GetScannerResults_args{f_GetScannerResults_args_table=Nothing,f_GetScannerResults_args_scan=Nothing,f_GetScannerResults_args_numRows=Nothing})
readStructEnd iprot
return record
data GetScannerResults_result = GetScannerResults_result{f_GetScannerResults_result_success :: Maybe (Vector.Vector TResult),f_GetScannerResults_result_io :: Maybe TIOError} deriving (Show,Eq,Typeable)
instance Hashable GetScannerResults_result where
hashWithSalt salt record = salt `hashWithSalt` f_GetScannerResults_result_success record `hashWithSalt` f_GetScannerResults_result_io record
write_GetScannerResults_result oprot record = do
writeStructBegin oprot "GetScannerResults_result"
case f_GetScannerResults_result_success record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("success",T_LIST,0)
(let f = Vector.mapM_ (\_viter330 -> write_TResult oprot _viter330) in do {writeListBegin oprot (T_STRUCT,fromIntegral $ Vector.length _v); f _v;writeListEnd oprot})
writeFieldEnd oprot}
case f_GetScannerResults_result_io record of {Nothing -> return (); Just _v -> do
writeFieldBegin oprot ("io",T_STRUCT,1)
write_TIOError oprot _v
writeFieldEnd oprot}
writeFieldStop oprot
writeStructEnd oprot
read_GetScannerResults_result_fields iprot record = do
(_,_t332,_id333) <- readFieldBegin iprot
if _t332 == T_STOP then return record else
case _id333 of
0 -> if _t332 == T_LIST then do
s <- (let f n = Vector.replicateM (fromIntegral n) ((read_TResult iprot)) in do {(_etype337,_size334) <- readListBegin iprot; f _size334})
read_GetScannerResults_result_fields iprot record{f_GetScannerResults_result_success=Just s}
else do
skip iprot _t332
read_GetScannerResults_result_fields iprot record
1 -> if _t332 == T_STRUCT then do
s <- (read_TIOError iprot)
read_GetScannerResults_result_fields iprot record{f_GetScannerResults_result_io=Just s}
else do
skip iprot _t332
read_GetScannerResults_result_fields iprot record
_ -> do
skip iprot _t332
readFieldEnd iprot
read_GetScannerResults_result_fields iprot record
read_GetScannerResults_result iprot = do
_ <- readStructBegin iprot
record <- read_GetScannerResults_result_fields iprot (GetScannerResults_result{f_GetScannerResults_result_success=Nothing,f_GetScannerResults_result_io=Nothing})
readStructEnd iprot
return record
process_exists (seqid, iprot, oprot, handler) = do
args <- read_Exists_args iprot
readMessageEnd iprot
rs <- return (Exists_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.exists handler (f_Exists_args_table args) (f_Exists_args_get args)
return rs{f_Exists_result_success= Just res})
(\e ->
return rs{f_Exists_result_io =Just e}))
writeMessageBegin oprot ("exists", M_REPLY, seqid);
write_Exists_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_get (seqid, iprot, oprot, handler) = do
args <- read_Get_args iprot
readMessageEnd iprot
rs <- return (Get_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.get handler (f_Get_args_table args) (f_Get_args_get args)
return rs{f_Get_result_success= Just res})
(\e ->
return rs{f_Get_result_io =Just e}))
writeMessageBegin oprot ("get", M_REPLY, seqid);
write_Get_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_getMultiple (seqid, iprot, oprot, handler) = do
args <- read_GetMultiple_args iprot
readMessageEnd iprot
rs <- return (GetMultiple_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.getMultiple handler (f_GetMultiple_args_table args) (f_GetMultiple_args_gets args)
return rs{f_GetMultiple_result_success= Just res})
(\e ->
return rs{f_GetMultiple_result_io =Just e}))
writeMessageBegin oprot ("getMultiple", M_REPLY, seqid);
write_GetMultiple_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_put (seqid, iprot, oprot, handler) = do
args <- read_Put_args iprot
readMessageEnd iprot
rs <- return (Put_result Nothing)
res <- (Control.Exception.catch
(do
Iface.put handler (f_Put_args_table args) (f_Put_args_put args)
return rs)
(\e ->
return rs{f_Put_result_io =Just e}))
writeMessageBegin oprot ("put", M_REPLY, seqid);
write_Put_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_checkAndPut (seqid, iprot, oprot, handler) = do
args <- read_CheckAndPut_args iprot
readMessageEnd iprot
rs <- return (CheckAndPut_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.checkAndPut handler (f_CheckAndPut_args_table args) (f_CheckAndPut_args_row args) (f_CheckAndPut_args_family args) (f_CheckAndPut_args_qualifier args) (f_CheckAndPut_args_value args) (f_CheckAndPut_args_put args)
return rs{f_CheckAndPut_result_success= Just res})
(\e ->
return rs{f_CheckAndPut_result_io =Just e}))
writeMessageBegin oprot ("checkAndPut", M_REPLY, seqid);
write_CheckAndPut_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_putMultiple (seqid, iprot, oprot, handler) = do
args <- read_PutMultiple_args iprot
readMessageEnd iprot
rs <- return (PutMultiple_result Nothing)
res <- (Control.Exception.catch
(do
Iface.putMultiple handler (f_PutMultiple_args_table args) (f_PutMultiple_args_puts args)
return rs)
(\e ->
return rs{f_PutMultiple_result_io =Just e}))
writeMessageBegin oprot ("putMultiple", M_REPLY, seqid);
write_PutMultiple_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_deleteSingle (seqid, iprot, oprot, handler) = do
args <- read_DeleteSingle_args iprot
readMessageEnd iprot
rs <- return (DeleteSingle_result Nothing)
res <- (Control.Exception.catch
(do
Iface.deleteSingle handler (f_DeleteSingle_args_table args) (f_DeleteSingle_args_deleteSingle args)
return rs)
(\e ->
return rs{f_DeleteSingle_result_io =Just e}))
writeMessageBegin oprot ("deleteSingle", M_REPLY, seqid);
write_DeleteSingle_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_deleteMultiple (seqid, iprot, oprot, handler) = do
args <- read_DeleteMultiple_args iprot
readMessageEnd iprot
rs <- return (DeleteMultiple_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.deleteMultiple handler (f_DeleteMultiple_args_table args) (f_DeleteMultiple_args_deletes args)
return rs{f_DeleteMultiple_result_success= Just res})
(\e ->
return rs{f_DeleteMultiple_result_io =Just e}))
writeMessageBegin oprot ("deleteMultiple", M_REPLY, seqid);
write_DeleteMultiple_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_checkAndDelete (seqid, iprot, oprot, handler) = do
args <- read_CheckAndDelete_args iprot
readMessageEnd iprot
rs <- return (CheckAndDelete_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.checkAndDelete handler (f_CheckAndDelete_args_table args) (f_CheckAndDelete_args_row args) (f_CheckAndDelete_args_family args) (f_CheckAndDelete_args_qualifier args) (f_CheckAndDelete_args_value args) (f_CheckAndDelete_args_deleteSingle args)
return rs{f_CheckAndDelete_result_success= Just res})
(\e ->
return rs{f_CheckAndDelete_result_io =Just e}))
writeMessageBegin oprot ("checkAndDelete", M_REPLY, seqid);
write_CheckAndDelete_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_increment (seqid, iprot, oprot, handler) = do
args <- read_Increment_args iprot
readMessageEnd iprot
rs <- return (Increment_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.increment handler (f_Increment_args_table args) (f_Increment_args_increment args)
return rs{f_Increment_result_success= Just res})
(\e ->
return rs{f_Increment_result_io =Just e}))
writeMessageBegin oprot ("increment", M_REPLY, seqid);
write_Increment_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_openScanner (seqid, iprot, oprot, handler) = do
args <- read_OpenScanner_args iprot
readMessageEnd iprot
rs <- return (OpenScanner_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.openScanner handler (f_OpenScanner_args_table args) (f_OpenScanner_args_scan args)
return rs{f_OpenScanner_result_success= Just res})
(\e ->
return rs{f_OpenScanner_result_io =Just e}))
writeMessageBegin oprot ("openScanner", M_REPLY, seqid);
write_OpenScanner_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_getScannerRows (seqid, iprot, oprot, handler) = do
args <- read_GetScannerRows_args iprot
readMessageEnd iprot
rs <- return (GetScannerRows_result Nothing Nothing Nothing)
res <- (Control.Exception.catch
(Control.Exception.catch
(do
res <- Iface.getScannerRows handler (f_GetScannerRows_args_scannerId args) (f_GetScannerRows_args_numRows args)
return rs{f_GetScannerRows_result_success= Just res})
(\e ->
return rs{f_GetScannerRows_result_io =Just e}))
(\e ->
return rs{f_GetScannerRows_result_ia =Just e}))
writeMessageBegin oprot ("getScannerRows", M_REPLY, seqid);
write_GetScannerRows_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_closeScanner (seqid, iprot, oprot, handler) = do
args <- read_CloseScanner_args iprot
readMessageEnd iprot
rs <- return (CloseScanner_result Nothing Nothing)
res <- (Control.Exception.catch
(Control.Exception.catch
(do
Iface.closeScanner handler (f_CloseScanner_args_scannerId args)
return rs)
(\e ->
return rs{f_CloseScanner_result_io =Just e}))
(\e ->
return rs{f_CloseScanner_result_ia =Just e}))
writeMessageBegin oprot ("closeScanner", M_REPLY, seqid);
write_CloseScanner_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_mutateRow (seqid, iprot, oprot, handler) = do
args <- read_MutateRow_args iprot
readMessageEnd iprot
rs <- return (MutateRow_result Nothing)
res <- (Control.Exception.catch
(do
Iface.mutateRow handler (f_MutateRow_args_table args) (f_MutateRow_args_rowMutations args)
return rs)
(\e ->
return rs{f_MutateRow_result_io =Just e}))
writeMessageBegin oprot ("mutateRow", M_REPLY, seqid);
write_MutateRow_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
process_getScannerResults (seqid, iprot, oprot, handler) = do
args <- read_GetScannerResults_args iprot
readMessageEnd iprot
rs <- return (GetScannerResults_result Nothing Nothing)
res <- (Control.Exception.catch
(do
res <- Iface.getScannerResults handler (f_GetScannerResults_args_table args) (f_GetScannerResults_args_scan args) (f_GetScannerResults_args_numRows args)
return rs{f_GetScannerResults_result_success= Just res})
(\e ->
return rs{f_GetScannerResults_result_io =Just e}))
writeMessageBegin oprot ("getScannerResults", M_REPLY, seqid);
write_GetScannerResults_result oprot res
writeMessageEnd oprot
tFlush (getTransport oprot)
proc_ handler (iprot,oprot) (name,typ,seqid) = case name of
"exists" -> process_exists (seqid,iprot,oprot,handler)
"get" -> process_get (seqid,iprot,oprot,handler)
"getMultiple" -> process_getMultiple (seqid,iprot,oprot,handler)
"put" -> process_put (seqid,iprot,oprot,handler)
"checkAndPut" -> process_checkAndPut (seqid,iprot,oprot,handler)
"putMultiple" -> process_putMultiple (seqid,iprot,oprot,handler)
"deleteSingle" -> process_deleteSingle (seqid,iprot,oprot,handler)
"deleteMultiple" -> process_deleteMultiple (seqid,iprot,oprot,handler)
"checkAndDelete" -> process_checkAndDelete (seqid,iprot,oprot,handler)
"increment" -> process_increment (seqid,iprot,oprot,handler)
"openScanner" -> process_openScanner (seqid,iprot,oprot,handler)
"getScannerRows" -> process_getScannerRows (seqid,iprot,oprot,handler)
"closeScanner" -> process_closeScanner (seqid,iprot,oprot,handler)
"mutateRow" -> process_mutateRow (seqid,iprot,oprot,handler)
"getScannerResults" -> process_getScannerResults (seqid,iprot,oprot,handler)
_ -> do
skip iprot T_STRUCT
readMessageEnd iprot
writeMessageBegin oprot (name,M_EXCEPTION,seqid)
writeAppExn oprot (AppExn AE_UNKNOWN_METHOD ("Unknown function " ++ TL.unpack name))
writeMessageEnd oprot
tFlush (getTransport oprot)
process handler (iprot, oprot) = do
(name, typ, seqid) <- readMessageBegin iprot
proc_ handler (iprot,oprot) (name,typ,seqid)
return True
|
danplubell/hbase-haskell
|
src/Database/HBase/Internal/Thrift2/THBaseService.hs
|
mit
| 71,566
| 327
| 29
| 13,711
| 19,396
| 9,586
| 9,810
| 1,519
| 16
|
module Syntax where
type Name = String
type Path = String
data Expr
= Float Double
| BinaryOp Name Expr Expr
| Var Name
| VarDef Name Name
| Call Name [Expr]
| MethodCall Name [Expr]
| Function Name [Expr] Expr
| Module Name [Expr]
| Class Name [Expr]
| Import Name Path
| Extern Name [Expr]
| When [Expr]
| Else
| Clause Expr Expr
| UnaryOp Name Expr
deriving (Eq, Ord, Show)
|
NoxHarmonium/lithium-lang
|
src/Syntax.hs
|
mit
| 410
| 0
| 7
| 107
| 155
| 92
| 63
| 20
| 0
|
{-# LANGUAGE TemplateHaskell #-}
import System.Exit
import Data.List
import Data.Maybe
import Control.Monad
import Control.Applicative
import Test.QuickCheck
import Test.QuickCheck.All
import Test.QuickCheck.Test
import PathFinder.Graph as G
import PathFinder.PathFinder
instance (Arbitrary a, Eq a) => Arbitrary (Graph a) where
arbitrary = do
nodes <- nub <$> listOf1 arbitrary
esize <- arbitrary `suchThat` (< 2 * length nodes)
edges <- concat <$> vectorOf esize
(do f <- elements nodes
t <- elements nodes `suchThat` (/= f)
w <- arbitrary `suchThat` neitherTooBigNorTooSmall
return [Edge f t w, Edge t f w])
return $ Graph (map Node nodes) edges
where neitherTooBigNorTooSmall x = x > 0 && x < 100
prop_nodesWorks :: Eq a => [a] -> a -> Bool
prop_nodesWorks ls l = (l `elem` ls) == isJust (node g l)
where ns = fmap Node ls
g = Graph ns []
prop_connectedPaths :: Ord a => Graph a -> Bool
prop_connectedPaths g = isConnected g == all isJust [path g x y | (Node x) <- nodes g, (Node y) <- nodes g]
prop_triangularInequality :: Ord a => Graph a -> Property
prop_triangularInequality g = length (nodes g) >= 3
&& isJust pab
&& isJust pbc ==> fromMaybe False $ do
lac <- length <$> pac
lab <- length <$> pab
lbc <- length <$> pbc
return $ lac <= lab + lbc
where a:b:c:_ = take 3 (nodes g)
pab = path g (G.label a) (G.label b)
pbc = path g (G.label b) (G.label c)
pac = path g (G.label a) (G.label c)
main = do
result <- $quickCheckAll
unless result exitFailure
|
MonadNomads/PathFinder
|
tests/tests.hs
|
mit
| 1,821
| 0
| 16
| 621
| 671
| 334
| 337
| 43
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
module Diagrams.Plots.Basic.Types
(
-- * General plot options
PlotOpt
, x
, y
, height
, width
, title
, file
, xlab
, ylab
, xNames
, yNames
, xLabelOpt
, yLabelOpt
, extra
, pads
-- * Line Options
, LinePlotOpt
, showPoint
) where
import Control.Lens
import Data.Default
import Diagrams.Plots
data PlotOpt datX datY opt = PlotOpt
{ _plotOptX :: [datX]
, _plotOptY :: [datY]
, _plotOptHeight :: Double
, _plotOptWidth :: Double
, _plotOptXlab :: String
, _plotOptYlab :: String
, _plotOptXNames :: [String]
, _plotOptYNames :: [String]
, _plotOptXLabelOpt :: LabelOpt
, _plotOptYLabelOpt :: LabelOpt
, _plotOptTitle :: String
, _plotOptFile :: String
, _plotOptExtra :: opt
, _plotOptPads :: (Double, Double) -- (x,y)
}
makeFields ''PlotOpt
instance Default opt => Default (PlotOpt datX datY opt) where
def = PlotOpt
{ _plotOptX = []
, _plotOptY = []
, _plotOptHeight = 480
, _plotOptWidth = 480
, _plotOptXlab = ""
, _plotOptYlab = ""
, _plotOptXNames = []
, _plotOptYNames = []
, _plotOptXLabelOpt = def
, _plotOptYLabelOpt = def
, _plotOptTitle = ""
, _plotOptFile = "plot.png"
, _plotOptExtra = def
, _plotOptPads = (0.1,0.1)
}
data LinePlotOpt = LinePlotOpt
{ _linePlotOptShowPoint :: Bool
}
makeFields ''LinePlotOpt
instance Default LinePlotOpt where
def = LinePlotOpt
{ _linePlotOptShowPoint = False
}
|
kaizhang/haskell-plot
|
src/Diagrams/Plots/Basic/Types.hs
|
mit
| 1,729
| 0
| 9
| 538
| 378
| 239
| 139
| 63
| 0
|
fib = 1 : 1 : zipWith (+) fib (tail fib)
main = print $ sum $ takeWhile (<4000001) $ filter even fib
|
dpieroux/euler
|
0/0002.hs
|
mit
| 101
| 0
| 8
| 23
| 58
| 30
| 28
| 2
| 1
|
1 + 2 -- 3
3 * 4 -- 12
5 / 2 -- 2.5
div 5 2 -- 2
5 `div` 2 -- 2
True && True -- True
False || False -- False
not True -- False
5 == 5 -- True
5 \= 5 -- False
"hello" == "hello" -- True
succ 8 -- 9
succ 8.5 -- 9.5
succ 'h' -- 'i'
succ "h" -- Throws exception
min 9 10 -- 9
min 10 0.1 -- 0.1
min 1 2 3 -- Throws exception
max 1 2 -- 2
-- function application has higher precedence than operators
succ 9 * 8 -- 80
succ (9 * 8) -- 73
[] -- Empty List
[1, 2, 3] -- list of numbers
[1, 2] ++ [3, 4] -- [1, 2, 3, 4]
" hello " ++ " " ++ " world " -- " hello world "
[ ’w ’ , ’o ’] ++ [ ’o ’ , ’t ’] -- " woot "
-- : is the cons operator
5 : [1, 2, 3] -- [5, 1, 2, 3]
-- !! is the index dereference operator
[1, 2, 3, 4] !! 2 -- 3
-- All the following are true. Elements compared one by one using
-- comparison operator
[3 ,2 ,1] > [2 ,1 ,0]
[3 ,2 ,1] > [2 ,10 ,100]
[3 ,4 ,2] > [3 ,4]
[3 ,4 ,2] > [2 ,4]
[3 ,4 ,2] == [3 ,4 ,2]
head [1, 2, 3] -- 1
tail [1, 2, 3] -- [2, 3]
init [1, 2, 3] -- [1, 2]
last [1, 2, 3] -- 3
length [1, 2, 3] -- 3
length [] -- 0
-- null checks if a list is empty
null [] -- True
null [1] -- False
null [[]] -- False
reverse [1, 2, 3] -- [3, 2, 1]
take 3 [1..5] -- [1, 2, 3]
take 10 [1..5] -- [1..5]
drop 3 [1..5] -- [4, 5]
drop 10 [1..5] -- []
maximum [1..5] -- 5
minimum [1..5] -- 1
sum [1..5] -- 15
product [1..5] -- 120
-- elem function is like python's in operator
1 `elem` [1..5] -- True
6 `elem` [1..5] -- False
'h' `elem` [2..5] -- Throws exception
-- Ranges
[2, 4..20] -- [2 ,4 ,6 ,8 ,10 ,12 ,14 ,16 ,18 ,20]
[3, 6..20] -- [3 ,6 ,9 ,12 ,15 ,18]
[5, 4..1] -- [5, 4, 3, 2, 1]
-- Cycle makes an infinite list by repeating it's argument's elements over
-- and over
take 10 ( cycle [1 ,2 ,3]) -- [1 ,2 ,3 ,1 ,2 ,3 ,1 ,2 ,3 ,1]
take 10 ( repeat 5) -- [5 ,5 ,5 ,5 ,5 ,5 ,5 ,5 ,5 ,5]
-- List comprehensions
[x*x | x <- [1..10]] -- [1,4,9,16,25,36,49,64,81,100]
-- With filter:
[x * 2 | x <- [1..10] , x * 2 >= 12] -- [12 ,14 ,16 ,18 ,20]
-- With multiple predicates:
[ x | x <- [10..20] , x /= 13 , x /= 15 , x /= 19] -- [10 ,11 ,12 ,14 ,16 ,17 ,18 ,20]
-- With multiple iterators:
[x * y | x <- [2 ,5 ,10] , y <- [8 ,10 ,11] , x * y > 50] -- [55 ,80 ,100 ,110]
-- Tuples
-- * Can mix types
-- * Size is fixed
-- * No unary tuple
(1, 2)
('a', 3.0, 5)
zip [1, 2, 3] ['a', 'b', 'c'] -- [(1,'a'),(2,'b'),(3,'c')]
zip [1, 2, 3] ['a', 'b', 'c', 'd'] -- [(1,'a'),(2,'b'),(3,'c')]
fst (1, 2) -- 1
snd (1, 2) -- 2
fst (1, 2, 3) -- Error
-- Types
--
-- GHCI command :t shows type of argument
--
-- Types are always capitalized, names/labels are always
-- lowercase/camelCase
-- :: is "has type of" operator
1 :: Int
1 :: Integer
1 :: Double -- 1.0
1 :: Float -- 1.0
'c' :: Char
removeNonUppercase :: [ Char ] -> [ Char ]
removeNonUppercase = [ c | c <- st , c ‘ elem ‘ [ ’A ’.. ’ Z ’]]
addThree :: Int -> Int -> Int -> Int
addThree x y z = x + y + z
-- Int - Machine integer type
-- Integer - Bignum integer type
-- Double
-- Float
-- Char
-- [ Char ] - String
-- Bool
-- Type variables
:t head -- head :: [ a ] -> a
1 `compare` 2 -- LT
1 `compare` 1 -- EQ
2 `compare` 1 -- GT
:t 1 `compare` 2 -- Ordering
-- show typeclass - Anything with a toString function
show 3 -- "3"
show True -- "True"
-- read typeclass - Anything with a parse function
read "True" :: Bool -- True
read "3.2" :: Double -- 3.2
-- Bounded - typeclass defining minBound and maxBound for bounded types
maxBound :: Int -- 2^63 - 1 on 64 bit arch
-- Enum - typeclass defining pred and succ for enumerable types
-- Integral - Subclass of Num, applies to integers (Int and Integer)
-- fromIntegral - Promotes Integral to Num type so it can be combined with
-- other nums (essentially a casting operator)
--
|
bhuber/Software-Foundations
|
haskell/notes.hs
|
mit
| 4,074
| 59
| 8
| 1,227
| 1,318
| 732
| 586
| -1
| -1
|
module LMonad.Label.PowerSet where
import Data.Set (Set)
import qualified Data.Set as Set
import Prelude
import LMonad
-- | Power set label made of all combinations of the principals.
data Ord p => PSLabel p = PSLabel {
psLabelConfidentiality :: Set p
, psLabelIntegrity :: Set p
}
deriving Show
-- | Convenience function to convert a principal to confidentiality and integrity PSLabel.
psSingleton :: Ord p => p -> PSLabel p
psSingleton p =
let p' = Set.singleton p in
PSLabel p' p'
-- | Convenience function to convert a principal to confidentiality PSLabel.
psConfidentialitySingleton :: Ord p => p -> PSLabel p
psConfidentialitySingleton p =
let p' = Set.singleton p in
PSLabel p' Set.empty
-- | Convenience function to convert a principal to integrity PSLabel.
psIntegritySingleton :: Ord p => p -> PSLabel p
psIntegritySingleton p =
let p' = Set.singleton p in
PSLabel Set.empty p'
instance Ord p => Label (PSLabel p) where
-- Meet
glb (PSLabel c1 i1) (PSLabel c2 i2) =
let c = Set.intersection c1 c2 in
let i = Set.intersection i1 i2 in
PSLabel c i
-- Join
lub (PSLabel c1 i1) (PSLabel c2 i2) =
let c = Set.union c1 c2 in
let i = Set.union i1 i2 in
PSLabel c i
-- Flow to
canFlowTo (PSLabel c1 i1) (PSLabel c2 i2) =
(Set.isSubsetOf c1 c2) && (Set.isSubsetOf i1 i2)
-- Bottom
bottom =
PSLabel Set.empty Set.empty
-- | Type alias for labeled power sets.
type PSLabeled p = Labeled (PSLabel p)
|
jprider63/LMonad
|
src/LMonad/Label/PowerSet.hs
|
mit
| 1,565
| 0
| 13
| 415
| 461
| 232
| 229
| 35
| 1
|
module Language.SystemF.Parser (
parseExpr,
parseType
) where
import Control.Monad
import Data.Functor
import Prelude hiding (abs)
import Text.Parsec
import Text.Parsec.String
import Language.SystemF.Expression
parseExpr :: String -> Either ParseError (SystemFExpr String String)
parseExpr = parse (whitespace *> expr <* eof) ""
parseType :: String -> Either ParseError (Ty String)
parseType = parse (whitespace *> ty <* eof) ""
-- Parse expressions
expr :: Parser (SystemFExpr String String)
expr = try tyapp <|> try app <|> term
app :: Parser (SystemFExpr String String)
app = chainl1 term (return App)
tyapp :: Parser (SystemFExpr String String)
tyapp = TyApp
<$> term
<*> ty'
where ty' = symbol '[' *> ty <* symbol ']'
term :: Parser (SystemFExpr String String)
term = try abs <|> tyabs <|> var <|> parens expr
var :: Parser (SystemFExpr String String)
var = Var <$> exprId
abs :: Parser (SystemFExpr String String)
abs = curry
<$> (symbol '\\' *> many1 args <* symbol '.')
<*> expr
where args = (,) <$> (exprId <* symbol ':') <*> ty
curry = flip . foldr . uncurry $ Abs
tyabs :: Parser (SystemFExpr String String)
tyabs = curry <$> args <*> expr
where args = symbol '\\' *> many1 typeId <* symbol '.'
curry = flip (foldr TyAbs)
-- Parse type expressions
ty :: Parser (Ty String)
ty = try arrow
arrow :: Parser (Ty String)
arrow = chainr1 tyterm (symbol' "->" $> TyArrow)
tyterm :: Parser (Ty String)
tyterm = tyvar <|> parens ty
tyvar :: Parser (Ty String)
tyvar = TyVar <$> typeId
parens :: Parser a -> Parser a
parens p = symbol '(' *> p <* symbol ')'
identifier :: Parser Char -> Parser String
identifier firstChar = lexeme ((:) <$> first <*> many rest)
where first = firstChar <|> char '_'
rest = first <|> digit
typeId, exprId :: Parser String
typeId = identifier upper
exprId = identifier lower
whitespace :: Parser ()
whitespace = void . many . oneOf $ " \t"
symbol :: Char -> Parser ()
symbol = void . lexeme . char
symbol' :: String -> Parser ()
symbol' = void . lexeme . string
lexeme :: Parser a -> Parser a
lexeme p = p <* whitespace
|
sgillespie/lambda-calculus
|
src/Language/SystemF/Parser.hs
|
mit
| 2,134
| 0
| 11
| 448
| 820
| 421
| 399
| 61
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Bfparser (
Program (..)
, Command (..)
, parseProgram
) where
import Data.Attoparsec.Text (Parser, endOfInput, char, many')
import Control.Applicative ((<|>), (<*), (*>), (<$>))
{-import Data.Text-}
{-m1 = "[+++..>>>>--[,,..++--]++--]"-}
{-m2 = "+++..>>>>--,,..++--++--"-}
{-s1 = parseTest parseProgram "<>b><"-}
{-s2 = parse parseProgram "<>b><"-}
{-s3 = parseOnly parseProgram m1-}
{-s4 = parseOnly parseProgram m2-}
newtype Program = Program [Command] deriving (Show)
data Command = IncDataPtr
| DecDataPtr
| IncByteAtPtr
| DecByteAtPtr
| OpByteAtPtr
| IpByteAtPtr
| Loop [Command] deriving (Show, Eq)
parseCommands :: Parser Command
parseCommands = parseIncDataPtr
<|> parseDecDataPtr
<|> parseIncByteAtPtr
<|> parseDecByteAtPtr
<|> parseOpByteAtPtr
<|> parseIpByteAtPtr
<|> parseLoop
parseIncDataPtr :: Parser Command
parseIncDataPtr = const IncDataPtr <$> char '>'
parseDecDataPtr :: Parser Command
parseDecDataPtr = const DecDataPtr <$> char '<'
parseIncByteAtPtr :: Parser Command
parseIncByteAtPtr = const IncByteAtPtr <$> char '+'
parseDecByteAtPtr :: Parser Command
parseDecByteAtPtr = const DecByteAtPtr <$> char '-'
parseOpByteAtPtr :: Parser Command
parseOpByteAtPtr = const OpByteAtPtr <$> char '.'
parseIpByteAtPtr :: Parser Command
parseIpByteAtPtr = const IpByteAtPtr <$> char ','
parseLoop :: Parser Command
parseLoop = Loop <$> (char '[' *> many' parseCommands <* char ']')
parseProgram :: Parser Program
parseProgram = Program <$> many' parseCommands <* endOfInput
|
ajjaic/hs-bfk
|
Bfparser.hs
|
mit
| 1,703
| 0
| 10
| 371
| 373
| 208
| 165
| 39
| 1
|
import Control.Monad
import Data.List.Extra
import Data.Maybe
import qualified Data.Char as C
import qualified Data.Map as Map
import qualified Data.Set as Set
------
iread :: String -> Int
iread = read
do2 f g x = (f x, g x)
answer :: (Show a) => (String -> a) -> IO ()
answer f = interact $ (++"\n") . show . f
ord0 c = C.ord c - C.ord 'a'
chr0 i = C.chr (i + C.ord 'a')
incletter c i = chr0 ((ord0 c + i) `mod` 26)
splitOn1 a b = fromJust $ stripInfix a b
rsplitOn1 a b = fromJust $ stripInfixEnd a b
-- pull out every part of a String that can be read in
-- for some Read a and ignore the rest
readOut :: Read a => String -> [a]
readOut "" = []
readOut s = case reads s of
[] -> readOut $ tail s
[(x, s')] -> x : readOut s'
_ -> error "ambiguous parse"
ireadOut :: String -> [Int]
ireadOut = readOut
--------
part1 ls = length (filter (\l -> length l == length (nub l)) ls)
part2 ls = length (filter (\l -> length l == length (nub (map sort l))) ls)
main = answer $ do2 part1 part2 . map words . lines
|
msullivan/advent-of-code
|
2017/A4.hs
|
mit
| 1,021
| 0
| 15
| 230
| 480
| 251
| 229
| 27
| 3
|
module Main where
import Lexer
import Parser
import AST
import Pretty
getAST src = case scan src of
Left err -> error err
Right toks -> parseCool toks
main :: IO ()
main = do
src <- getContents
print (pretty $ getAST src)
|
gnuvince/vfb-coolc
|
src/Main.hs
|
mit
| 265
| 0
| 10
| 86
| 93
| 46
| 47
| 12
| 2
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-instance-creditspecification.html
module Stratosphere.ResourceProperties.EC2InstanceCreditSpecification where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2InstanceCreditSpecification. See
-- 'ec2InstanceCreditSpecification' for a more convenient constructor.
data EC2InstanceCreditSpecification =
EC2InstanceCreditSpecification
{ _eC2InstanceCreditSpecificationCPUCredits :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON EC2InstanceCreditSpecification where
toJSON EC2InstanceCreditSpecification{..} =
object $
catMaybes
[ fmap (("CPUCredits",) . toJSON) _eC2InstanceCreditSpecificationCPUCredits
]
-- | Constructor for 'EC2InstanceCreditSpecification' containing required
-- fields as arguments.
ec2InstanceCreditSpecification
:: EC2InstanceCreditSpecification
ec2InstanceCreditSpecification =
EC2InstanceCreditSpecification
{ _eC2InstanceCreditSpecificationCPUCredits = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-instance-creditspecification.html#cfn-ec2-instance-creditspecification-cpucredits
ecicsCPUCredits :: Lens' EC2InstanceCreditSpecification (Maybe (Val Text))
ecicsCPUCredits = lens _eC2InstanceCreditSpecificationCPUCredits (\s a -> s { _eC2InstanceCreditSpecificationCPUCredits = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/EC2InstanceCreditSpecification.hs
|
mit
| 1,544
| 0
| 12
| 157
| 173
| 100
| 73
| 22
| 1
|
#!/usr/bin/runghc
{-# Language TemplateHaskell, QuasiQuotes, FlexibleContexts,
TypeOperators, TupleSections, LambdaCase, OverloadedStrings,
NoMonomorphismRestriction, RelaxedPolyRec, ScopedTypeVariables #-}
import Text.Groom
import Prelude hiding ((.), id, foldl)
import Control.Category ((.), id)
import Control.Monad
import Data.Char
import Control.Isomorphism.Partial
import Control.Isomorphism.Partial.TH
import Control.Isomorphism.Partial.Unsafe (Iso (Iso))
import Text.Syntax
import qualified Text.Syntax.Parser.Naive as Parser
import qualified Text.Syntax.Printer.Naive as Printer
import Data.Maybe
import qualified Data.List as Ls
import Data.Set.CharSet
import Text.Groom
type Quote = String
type Named = String
data ExprR
= ExRPostfix [Expr] String
| ExRNamed String
deriving (Eq,Read,Show,Ord)
data Expr
= ExSelector Char String
| ExRef String
| ExSlot
| ExBlock [Expr]
| ExBranch [Expr]
| ExInfixBinary Expr String Expr
| ExPrefix String [Expr]
| ExLR Expr ExprR
| ExRegex
-- | ExDumb
deriving (Eq,Read,Show,Ord)
$(defineIsomorphisms ''ExprR)
$(defineIsomorphisms ''Expr)
operatorSymbols = stringCS "-+" --Read from the operator def table
alpha = subset isAlpha <$> token
num = subset isNumber <$> token
symbol = subset (`elemCS` operatorSymbols) <$> token
operator = many1 symbol <|> many1 alpha
--identifier = cons <$> alpha <*> many (alpha <|> num)
identifier = many (alpha <|> num)
mkSelector delim =
exSelector <$> pure d <*> between (text [d]) (text [d])
(many $ subset (/=d) <$> token <|> (text [d,d] *> pure d))
where
d = delim
selectors = Ls.foldl1 (<|>) $ map mkSelector "/¶\\█○"
arityMark n = text $ replicate n '`'
preOp x = operator <*> text x
nList::(Syntax f, Eq a) => Int -> f a -> f [a]
nList n x = f n where
f 0 = pure []
f i = cons <$> x <*> f (i-1)
alts :: Alternative f => [f x] -> f x
alts = Ls.foldl1 (<|>)
expr::Syntax f => f Expr
expr = e 0
where
postfixNRest x n = exRPostfix <$> nList (n-1) x <*> arityMark n *> operator
e::Syntax f => Int -> f Expr
e = \case
i@0 -> chainl1 (e (i+1)) operator exInfixBinary
i@1 -> let x = e $ i+1 in
(\r->foldl exLR <$> x <*> many r)
$ exRNamed <$> text "@" *> identifier
<|> alts (map (postfixNRest x) [1,2,3])
_ -> expr'
expr'::Syntax f => f Expr
expr' = exRef <$> text "$" *> identifier
<|> exSlot <$> text "_"
<|> selectors
-- <|> exPrefix <$> text ":." <*> nlist 3 expr
-- <|> exPrefix <$> text ":" <*> nlist 2 expr
<|> alts (map prefixN [1,2,3])
<|> exBlock <$> between (text "{") (text "}") (many expr)
where
prefixN n = exPrefix <$> operator <*> arityMark n *> nList n expr
test p p' x = do
putStrLn "========================================="
putStrLn x
--let p0 = Parser.Parser p
--let p1 = Printer.Printer p
let a = Parser.parse p x::[Expr]
putStrLn $ groom a
let b = map (Printer.print p') a
putStrLn $ unlines $ catMaybes b
main = do
let t = test expr expr
t "$abcf@kkk+++/.whatever//.kkk/@abc@def+++○div○@123@@"
t "$aaa@kkk-_"
t "--`$abcf"
t "--``/.ww/@$"
t "$a$b``+$``+"
t "$1`+@5"
t "$--$@$@$@```x/a/@``y/b/@``z-/fff/@5"
t "--``$abcd"
t ""
t "$$"
return ()
|
ducis/scraper-dsl-open-snapshot
|
scrapoo2.hs
|
gpl-2.0
| 3,157
| 18
| 20
| 590
| 1,193
| 618
| 575
| 94
| 3
|
{- |
Module : $Header$
Description : Morphisms in Propositional logic
Copyright : (c) Dominik Luecke, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : luecke@informatik.uni-bremen.de
Stability : experimental
Portability : portable
Definition of morphisms for propositional logic
copied to "Temporal.Morphism"
-}
{-
Ref.
Till Mossakowski, Joseph Goguen, Razvan Diaconescu, Andrzej Tarlecki.
What is a Logic?.
In Jean-Yves Beziau (Ed.), Logica Universalis, pp. 113-@133. Birkhaeuser.
2005.
-}
module Propositional.Morphism
( Morphism (..) -- datatype for Morphisms
, pretty -- pretty printing
, idMor -- identity morphism
, isLegalMorphism -- check if morhpism is ok
, composeMor -- composition
, inclusionMap -- inclusion map
, mapSentence -- map of sentences
, mapSentenceH -- map of sentences, without Result type
, applyMap -- application function for maps
, applyMorphism -- application function for morphism
, morphismUnion
) where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Propositional.Sign as Sign
import qualified Common.Result as Result
import qualified Propositional.AS_BASIC_Propositional as AS_BASIC
import Common.Id as Id
import Common.Result
import Common.Doc
import Common.DocUtils
-- | The datatype for morphisms in propositional logic as
-- maps of sets
data Morphism = Morphism
{ source :: Sign
, target :: Sign
, propMap :: Map.Map Id Id
} deriving (Eq, Ord, Show)
instance Pretty Morphism where
pretty = printMorphism
-- | Constructs an id-morphism
idMor :: Sign -> Morphism
idMor a = inclusionMap a a
-- | Determines whether a morphism is valid
isLegalMorphism :: Morphism -> Result ()
isLegalMorphism pmor =
let psource = items $ source pmor
ptarget = items $ target pmor
pdom = Map.keysSet $ propMap pmor
pcodom = Set.map (applyMorphism pmor) psource
in if Set.isSubsetOf pcodom ptarget && Set.isSubsetOf pdom psource
then return () else fail "illegal Propositional morphism"
-- | Application funtion for morphisms
applyMorphism :: Morphism -> Id -> Id
applyMorphism mor idt = Map.findWithDefault idt idt $ propMap mor
-- | Application function for propMaps
applyMap :: Map.Map Id Id -> Id -> Id
applyMap pmap idt = Map.findWithDefault idt idt pmap
-- | Composition of morphisms in propositional Logic
composeMor :: Morphism -> Morphism -> Result Morphism
composeMor f g =
let fSource = source f
gTarget = target g
fMap = propMap f
gMap = propMap g
in return Morphism
{ source = fSource
, target = gTarget
, propMap = if Map.null gMap then fMap else
Set.fold ( \ i -> let j = applyMap gMap (applyMap fMap i) in
if i == j then id else Map.insert i j)
Map.empty $ items fSource }
-- | Pretty printing for Morphisms
printMorphism :: Morphism -> Doc
printMorphism m = pretty (source m) <> text "-->" <> pretty (target m)
<> vcat (map ( \ (x, y) -> lparen <> pretty x <> text ","
<> pretty y <> rparen) $ Map.assocs $ propMap m)
-- | Inclusion map of a subsig into a supersig
inclusionMap :: Sign.Sign -> Sign.Sign -> Morphism
inclusionMap s1 s2 = Morphism
{ source = s1
, target = s2
, propMap = Map.empty }
-- | sentence translation along signature morphism
-- here just the renaming of formulae
mapSentence :: Morphism -> AS_BASIC.FORMULA -> Result.Result AS_BASIC.FORMULA
mapSentence mor = return . mapSentenceH mor
mapSentenceH :: Morphism -> AS_BASIC.FORMULA -> AS_BASIC.FORMULA
mapSentenceH mor frm = case frm of
AS_BASIC.Negation form rn -> AS_BASIC.Negation (mapSentenceH mor form) rn
AS_BASIC.Conjunction form rn ->
AS_BASIC.Conjunction (map (mapSentenceH mor) form) rn
AS_BASIC.Disjunction form rn ->
AS_BASIC.Disjunction (map (mapSentenceH mor) form) rn
AS_BASIC.Implication form1 form2 rn -> AS_BASIC.Implication
(mapSentenceH mor form1) (mapSentenceH mor form2) rn
AS_BASIC.Equivalence form1 form2 rn -> AS_BASIC.Equivalence
(mapSentenceH mor form1) (mapSentenceH mor form2) rn
AS_BASIC.True_atom rn -> AS_BASIC.True_atom rn
AS_BASIC.False_atom rn -> AS_BASIC.False_atom rn
AS_BASIC.Predication predH -> AS_BASIC.Predication
$ id2SimpleId $ applyMorphism mor $ Id.simpleIdToId predH
morphismUnion :: Morphism -> Morphism -> Result.Result Morphism
morphismUnion mor1 mor2 =
let pmap1 = propMap mor1
pmap2 = propMap mor2
p1 = source mor1
p2 = source mor2
up1 = Set.difference (items p1) $ Map.keysSet pmap1
up2 = Set.difference (items p2) $ Map.keysSet pmap2
(pds, pmap) = foldr ( \ (i, j) (ds, m) -> case Map.lookup i m of
Nothing -> (ds, Map.insert i j m)
Just k -> if j == k then (ds, m) else
(Diag Error
("incompatible mapping of prop " ++ showId i " to "
++ showId j " and " ++ showId k "")
nullRange : ds, m)) ([], pmap1)
(Map.toList pmap2 ++ map (\ a -> (a, a))
(Set.toList $ Set.union up1 up2))
in if null pds then return Morphism
{ source = unite p1 p2
, target = unite (target mor1) $ target mor2
, propMap = pmap } else Result pds Nothing
|
nevrenato/Hets_Fork
|
Propositional/Morphism.hs
|
gpl-2.0
| 5,453
| 0
| 23
| 1,400
| 1,421
| 744
| 677
| 102
| 8
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module GeoLocation where
import Data.Aeson
import Data.Aeson.Types
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Text.Regex.Posix
import Data.List.Split
import GHC.Generics
import qualified Data.ByteString.Lazy.Char8 as B
data GeoReq = GeoReq { wifiAPs :: [AccessPoint] } deriving (Show, Generic)
data AccessPoint = AccessPoint { address :: String } deriving (Show, Generic)
data GeoResp = GeoResp { location :: Location
, accuracy :: Float
} deriving (Show, Generic)
data Location = Location { lat :: Float
, lng :: Float
} deriving (Show, Generic)
-- let the compiler do these
instance FromJSON Location
instance FromJSON GeoReq
instance FromJSON GeoResp
instance FromJSON AccessPoint
instance ToJSON Location
instance ToJSON GeoReq
instance ToJSON GeoResp
instance ToJSON AccessPoint
testReq :: IO (Maybe Location)
testReq = do
manager <- newManager tlsManagerSettings
-- get Google api key to get latitude/longitude
latlongKey <- readFile "/home/frank/Documents/Haskell/WeatherApplication/GoogleLocationKey.txt"
-- Create the request
initialRequest <- parseUrl $ "https://www.googleapis.com/geolocation/v1/geolocate?key=" ++ latlongKey
let request = initialRequest { method = "POST", requestBody = RequestBodyLBS $ encode req }
response <- httpLbs request manager
-- putStrLn $ "The status code was: " ++ (show $ statusCode $ responseStatus response)
let b = responseBody response
-- print $ responseBody response
let gsp = decode b :: Maybe GeoResp
case gsp of
Nothing -> return Nothing
Just r -> return $ Just $ location r
where body = B.unpack $ encode $ req
req = GeoReq [AccessPoint addr]
addr = "" -- "00:00:0c:07:ac:2c"
lookupLatLong :: (Maybe Location) -> IO (String, String)
lookupLatLong l = case l of
Nothing -> print "what" >> return ("","")
Just loc -> do
let lt = lat loc
lg = lng loc
manager <- newManager tlsManagerSettings
locationKey <- readFile "/home/frank/Documents/Haskell/WeatherApplication/GoogleLatLongToCityKey.txt"
request <- parseUrl $
"https://maps.googleapis.com/maps/api/geocode/json?latlng=" ++ show lt ++ "," ++ show lg ++ "&location_type=approximate&result_type=locality&key" ++ locationKey
response <- httpLbs request manager
let res = B.unpack $ responseBody response
-- formatting mess. will be cleaned up
let res' = getAllTextMatches $ res =~ ("\"formatted_address\" : .*$" :: String) :: [String]
res'' = head $ tail $ splitOn ":" $ head res'
res''' = reverse $ dropWhile (\c -> c == '"' || c == ' ' || c == ',') $ reverse $ dropWhile (\c -> c == '"' || c == ' ' || c == ',') $ res''
res'''' = (\(x:y:[]) -> (x,y)) $ take 2 $ splitOn ", " res'''
-- print res''''
return $ res''''
|
frankhucek/WeatherApplication
|
src/GeoLocation.hs
|
gpl-3.0
| 3,040
| 0
| 24
| 732
| 802
| 416
| 386
| 59
| 2
|
import Network.Transport.TCP (createTransport, defaultTCPParameters)
import Control.Distributed.Process
import Control.Distributed.Process.Node
main :: IO ()
main = do
print "test1"
Right t <- createTransport "127.0.0.1" "10501" defaultTCPParameters
node <- newLocalNode t initRemoteTable
return ()
|
adarqui/ToyBox
|
haskell/cloud-haskell/src/test1.hs
|
gpl-3.0
| 304
| 0
| 8
| 36
| 86
| 43
| 43
| 9
| 1
|
module Wiretap.Analysis.DataRace
( raceCandidates
, sharedLocations
, DataRace (..)
) where
import Prelude hiding (reads)
import Data.Function (on)
import qualified Data.List as L
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Unique
import Control.Monad (liftM2)
import Wiretap.Data.Event
import Wiretap.Data.History
import Wiretap.Analysis.Permute
import Wiretap.Utils
sharedLocations :: PartialHistory h
=> h
-> [(Location, [(Unique Event, Unique Event)])]
sharedLocations h =
filter (not . L.null . snd) . map combineLocation $ byLocation writes
where
combineLocation (l, ws) =
(l, filter (uncurry ((/=) `on` threadOf)) pairs)
where
pairs =
combinations ws ++ readwriteconflicts
readwriteconflicts =
crossproduct ws (concat $ M.lookup l readsByLocation)
readsByLocation =
M.fromDistinctAscList $ byLocation reads
byLocation =
groupOnFst . L.sortOn fst
reads =
onReads (\u (l, _) -> (l, u)) h
writes =
onWrites (\u (l, _) -> (l, u)) h
data DataRace = DataRace
{ location :: Location
, eventA :: UE
, eventB :: UE
} deriving (Show, Eq)
instance Ord DataRace where
compare = compare `on` (liftM2 (,) eventA eventB)
instance Candidate DataRace where
candidateSet (DataRace _ a b) =
S.fromList [a, b]
raceCandidates :: PartialHistory h
=> h
-> [DataRace]
raceCandidates =
concatMap toDataRaces . sharedLocations
where
toDataRaces (l, events) =
map (uncurry $ DataRace l) events
|
ucla-pls/wiretap-tools
|
src/Wiretap/Analysis/DataRace.hs
|
gpl-3.0
| 1,708
| 0
| 13
| 507
| 524
| 298
| 226
| 51
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
module Bamboo.Type.Theme where
import Bamboo.Helper.PreludeEnv
import Data.Default
data ThemeConfig = ThemeConfig
{ name :: String
, css :: [String]
, js :: [String]
} deriving (Show, Read)
data ThemeConfigData =
Name
| Css
| Js
deriving (Eq, Show, Read)
instance Default ThemeConfig where
def = ThemeConfig def def def
|
nfjinjing/bamboo
|
src/Bamboo/Type/Theme.hs
|
gpl-3.0
| 404
| 0
| 9
| 102
| 111
| 66
| 45
| 16
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Config.Class where
import Prelude hiding ((||))
import Helper.MaybeHelper((||))
import Data.Text (Text, pack, unpack)
import Class.Castable(Castable(from))
import Data.List.Extra (lower) -- extra
import Data.Yaml(decodeFile, FromJSON, Object, Value(Object, Number))
import qualified Data.Yaml as Y
import Data.HashMap.Strict as M
-- import qualified DB -- (AdapterType(..), Config(Config), adapter, database, pool, timeout) as
import Data.Scientific (Scientific(..), coefficient)
import Data.Maybe(fromMaybe)
import Data.List(elem)
import Config.Env(Env(..))
-- 引数で指定したキーを持つオブジェクトを返す
getObject :: Text -> Value -> Maybe (Object)
getObject env v = do
envs <- fromObject v
case M.lookup env envs of
Just (Object o) -> Just o
_ -> Nothing
fromObject :: Value -> Maybe (Object)
fromObject m = do
case m of
Object o -> Just o
_ -> Nothing
class ConfigClass a where
defaultConfig :: Env -> IO a
objectToConfig :: Object -> a -> a
mor :: a -> Object -> (a -> Maybe b) -> (Object -> Maybe b) -> Maybe b
mor dflt obj dfltf objf = (objf obj) || (dfltf dflt)
or :: a -> Object -> (a -> b) -> (Object -> Maybe b) -> b
or dflt obj dfltf objf = fromMaybe (dfltf dflt) (objf obj)
readYaml :: FilePath -> Env -> IO (Maybe a)
readYaml path env = do
-- putStrLn $ "filepath: " ++ path
mallconfigs <- decodeFile path
-- putStrLn $ show mallconfigs
defaultConfig' <- defaultConfig env
return $ (mallconfigs >>= (\all -> Just $ initImportOtherConfig' all defaultConfig'))
where
envstr' = Data.Text.pack $ lower $ show env
initImportOtherConfig' allconfigs' dflt' = importOtherConfig allconfigs' [] envstr' (getObject envstr' allconfigs') dflt'
importOtherConfig :: ConfigClass a => Value -> [Text] -> Text -> Maybe Object -> a -> a
importOtherConfig _ _ _ Nothing dflt = dflt
importOtherConfig allconfigs imported selfname (Just confobj) dflt =
if elem selfname imported
then dflt
else onlySelfOrWithInc' $ readInclude confobj
where
imported' = imported ++ [selfname]
onlySelfOrWithInc' Nothing = objectToConfig confobj dflt
onlySelfOrWithInc' (Just incText') = objectToConfig confobj $ importOtherConfig allconfigs imported' incText' (getObject incText' allconfigs) dflt
{-
importOtherConfig :: ConfigClass a => Y.Value -> [String] -> Y.Value -> a -> a
importOtherConfig allconfigs imported config dflt =
(include config
>>= (\str -> bool (Just str) Nothing (L.elem str imported))
>>= (\str ->
getObject (Data.Text.pack str) allconfigs
>>= return . (config <<<) . importOtherConfig allconfigs (imported ++ [str]) . objectToPreDBConfig)) ||| config
-}
instance Castable String Env where
from "production" = Production
from "test" = Test
from _ = Development
instance Castable Env String where
from Production = "production"
from Test = "test"
from Development = "development"
readInclude :: Object -> Maybe Text
readInclude config =
case M.lookup "<<" config of
Just (Y.String s) -> Just s -- $ Data.Text.unpack s
Nothing -> Nothing
_ -> fail "Invalid type for: <<"
lookupInt :: Text -> Object -> Maybe (Int)
lookupInt key config = lookupInteger key config >>= return . fromInteger
lookupInteger :: Text -> Object -> Maybe Integer
lookupInteger k config =
case M.lookup k config of
Just (Number t) -> Just (coefficient t)
Nothing -> Nothing
_ -> fail $ "Invalid type (not integer) for: " ++ (Data.Text.unpack k)
lookupText :: Text -> Object -> Maybe Text
lookupText k config =
case M.lookup k config of
Just (Y.String t) -> Just t
Nothing -> Nothing
_ -> fail $ "Invalid type (not string) for: " ++ (Data.Text.unpack k)
lookupString :: Text -> Object -> Maybe String
lookupString k config = lookupText k config >>= return . Data.Text.unpack
lookupWord :: Text -> Object -> Maybe (Word)
lookupWord key config = lookupInteger key config >>= return . fromInteger
|
shinjiro-itagaki/shinjirecs
|
shinjirecs-api/src/Config/Class.hs
|
gpl-3.0
| 4,249
| 0
| 15
| 946
| 1,208
| 626
| 582
| 82
| 3
|
module XMonad.Hooks.DynamicLog.PrettyPrinter where
import XMonad hiding (workspaces)
import XMonad.Core hiding (workspaces)
import qualified XMonad.StackSet as S
import XMonad.Util.NamedWindows
import XMonad.Hooks.UrgencyHook
import XMonad.Hooks.DynamicLog.Status.DZen2.Universal
import XMonad.Hooks.DynamicLog.Status.StatusText
import XMonad.Hooks.DynamicLog.Status.StatusText.Dynamic
import qualified Data.Text as T
import Control.Monad.IO.Class
import Control.Monad
import Data.Maybe
import Data.List (intersperse, init)
|
Fizzixnerd/xmonad-config
|
site-haskell/src/XMonad/Hooks/DynamicLog/PrettyPrinter.hs
|
gpl-3.0
| 532
| 0
| 5
| 49
| 112
| 78
| 34
| 14
| 0
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.WebApps.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the details of all web apps for a given enterprise.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.webapps.list@.
module Network.Google.Resource.AndroidEnterprise.WebApps.List
(
-- * REST Resource
WebAppsListResource
-- * Creating a Request
, webAppsList
, WebAppsList
-- * Request Lenses
, walXgafv
, walUploadProtocol
, walEnterpriseId
, walAccessToken
, walUploadType
, walCallback
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.webapps.list@ method which the
-- 'WebAppsList' request conforms to.
type WebAppsListResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"webApps" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] WebAppsListResponse
-- | Retrieves the details of all web apps for a given enterprise.
--
-- /See:/ 'webAppsList' smart constructor.
data WebAppsList =
WebAppsList'
{ _walXgafv :: !(Maybe Xgafv)
, _walUploadProtocol :: !(Maybe Text)
, _walEnterpriseId :: !Text
, _walAccessToken :: !(Maybe Text)
, _walUploadType :: !(Maybe Text)
, _walCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'WebAppsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'walXgafv'
--
-- * 'walUploadProtocol'
--
-- * 'walEnterpriseId'
--
-- * 'walAccessToken'
--
-- * 'walUploadType'
--
-- * 'walCallback'
webAppsList
:: Text -- ^ 'walEnterpriseId'
-> WebAppsList
webAppsList pWalEnterpriseId_ =
WebAppsList'
{ _walXgafv = Nothing
, _walUploadProtocol = Nothing
, _walEnterpriseId = pWalEnterpriseId_
, _walAccessToken = Nothing
, _walUploadType = Nothing
, _walCallback = Nothing
}
-- | V1 error format.
walXgafv :: Lens' WebAppsList (Maybe Xgafv)
walXgafv = lens _walXgafv (\ s a -> s{_walXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
walUploadProtocol :: Lens' WebAppsList (Maybe Text)
walUploadProtocol
= lens _walUploadProtocol
(\ s a -> s{_walUploadProtocol = a})
-- | The ID of the enterprise.
walEnterpriseId :: Lens' WebAppsList Text
walEnterpriseId
= lens _walEnterpriseId
(\ s a -> s{_walEnterpriseId = a})
-- | OAuth access token.
walAccessToken :: Lens' WebAppsList (Maybe Text)
walAccessToken
= lens _walAccessToken
(\ s a -> s{_walAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
walUploadType :: Lens' WebAppsList (Maybe Text)
walUploadType
= lens _walUploadType
(\ s a -> s{_walUploadType = a})
-- | JSONP
walCallback :: Lens' WebAppsList (Maybe Text)
walCallback
= lens _walCallback (\ s a -> s{_walCallback = a})
instance GoogleRequest WebAppsList where
type Rs WebAppsList = WebAppsListResponse
type Scopes WebAppsList =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient WebAppsList'{..}
= go _walEnterpriseId _walXgafv _walUploadProtocol
_walAccessToken
_walUploadType
_walCallback
(Just AltJSON)
androidEnterpriseService
where go
= buildClient (Proxy :: Proxy WebAppsListResource)
mempty
|
brendanhay/gogol
|
gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/WebApps/List.hs
|
mpl-2.0
| 4,614
| 0
| 18
| 1,122
| 706
| 411
| 295
| 105
| 1
|
<h3>Example 62 (categorical bars #3)</h3>
<palette name="irispal" type="discrete">
"I. setosa" red
"I. versicolor" green
"I. virginica" blue
</palette>
<plot height=600 aspect=1 axis-x-label="Species" axis-y-label="Petal length">
<boxes x="[[iris.species]]" y="[[iris.petal_length]]"
stroke="[[irispal(iris.species)]]">
</boxes>
</plot>
<plot-data name="iris" format="csv"
cols="sepal_length,sepal_width,petal_length,petal_width,species">
<metadata name="species"
category-order="I. setosa;I. versicolor;I. virginica"></metadata>
5.1,3.5,1.4,0.2,I. setosa
4.9,3.0,1.4,0.2,I. setosa
4.7,3.2,1.3,0.2,I. setosa
4.6,3.1,1.5,0.2,I. setosa
5.0,3.6,1.4,0.2,I. setosa
5.4,3.9,1.7,0.4,I. setosa
4.6,3.4,1.4,0.3,I. setosa
5.0,3.4,1.5,0.2,I. setosa
4.4,2.9,1.4,0.2,I. setosa
4.9,3.1,1.5,0.1,I. setosa
5.4,3.7,1.5,0.2,I. setosa
4.8,3.4,1.6,0.2,I. setosa
4.8,3.0,1.4,0.1,I. setosa
4.3,3.0,1.1,0.1,I. setosa
5.8,4.0,1.2,0.2,I. setosa
5.7,4.4,1.5,0.4,I. setosa
5.4,3.9,1.3,0.4,I. setosa
5.1,3.5,1.4,0.3,I. setosa
5.7,3.8,1.7,0.3,I. setosa
5.1,3.8,1.5,0.3,I. setosa
5.4,3.4,1.7,0.2,I. setosa
5.1,3.7,1.5,0.4,I. setosa
4.6,3.6,1.0,0.2,I. setosa
5.1,3.3,1.7,0.5,I. setosa
4.8,3.4,1.9,0.2,I. setosa
5.0,3.0,1.6,0.2,I. setosa
5.0,3.4,1.6,0.4,I. setosa
5.2,3.5,1.5,0.2,I. setosa
5.2,3.4,1.4,0.2,I. setosa
4.7,3.2,1.6,0.2,I. setosa
4.8,3.1,1.6,0.2,I. setosa
5.4,3.4,1.5,0.4,I. setosa
5.2,4.1,1.5,0.1,I. setosa
5.5,4.2,1.4,0.2,I. setosa
4.9,3.1,1.5,0.2,I. setosa
5.0,3.2,1.2,0.2,I. setosa
5.5,3.5,1.3,0.2,I. setosa
4.9,3.6,1.4,0.1,I. setosa
4.4,3.0,1.3,0.2,I. setosa
5.1,3.4,1.5,0.2,I. setosa
5.0,3.5,1.3,0.3,I. setosa
4.5,2.3,1.3,0.3,I. setosa
4.4,3.2,1.3,0.2,I. setosa
5.0,3.5,1.6,0.6,I. setosa
5.1,3.8,1.9,0.4,I. setosa
4.8,3.0,1.4,0.3,I. setosa
5.1,3.8,1.6,0.2,I. setosa
4.6,3.2,1.4,0.2,I. setosa
5.3,3.7,1.5,0.2,I. setosa
5.0,3.3,1.4,0.2,I. setosa
7.0,3.2,4.7,1.4,I. versicolor
6.4,3.2,4.5,1.5,I. versicolor
6.9,3.1,4.9,1.5,I. versicolor
5.5,2.3,4.0,1.3,I. versicolor
6.5,2.8,4.6,1.5,I. versicolor
5.7,2.8,4.5,1.3,I. versicolor
6.3,3.3,4.7,1.6,I. versicolor
4.9,2.4,3.3,1.0,I. versicolor
6.6,2.9,4.6,1.3,I. versicolor
5.2,2.7,3.9,1.4,I. versicolor
5.0,2.0,3.5,1.0,I. versicolor
5.9,3.0,4.2,1.5,I. versicolor
6.0,2.2,4.0,1.0,I. versicolor
6.1,2.9,4.7,1.4,I. versicolor
5.6,2.9,3.6,1.3,I. versicolor
6.7,3.1,4.4,1.4,I. versicolor
5.6,3.0,4.5,1.5,I. versicolor
5.8,2.7,4.1,1.0,I. versicolor
6.2,2.2,4.5,1.5,I. versicolor
5.6,2.5,3.9,1.1,I. versicolor
5.9,3.2,4.8,1.8,I. versicolor
6.1,2.8,4.0,1.3,I. versicolor
6.3,2.5,4.9,1.5,I. versicolor
6.1,2.8,4.7,1.2,I. versicolor
6.4,2.9,4.3,1.3,I. versicolor
6.6,3.0,4.4,1.4,I. versicolor
6.8,2.8,4.8,1.4,I. versicolor
6.7,3.0,5.0,1.7,I. versicolor
6.0,2.9,4.5,1.5,I. versicolor
5.7,2.6,3.5,1.0,I. versicolor
5.5,2.4,3.8,1.1,I. versicolor
5.5,2.4,3.7,1.0,I. versicolor
5.8,2.7,3.9,1.2,I. versicolor
6.0,2.7,5.1,1.6,I. versicolor
5.4,3.0,4.5,1.5,I. versicolor
6.0,3.4,4.5,1.6,I. versicolor
6.7,3.1,4.7,1.5,I. versicolor
6.3,2.3,4.4,1.3,I. versicolor
5.6,3.0,4.1,1.3,I. versicolor
5.5,2.5,4.0,1.3,I. versicolor
5.5,2.6,4.4,1.2,I. versicolor
6.1,3.0,4.6,1.4,I. versicolor
5.8,2.6,4.0,1.2,I. versicolor
5.0,2.3,3.3,1.0,I. versicolor
5.6,2.7,4.2,1.3,I. versicolor
5.7,3.0,4.2,1.2,I. versicolor
5.7,2.9,4.2,1.3,I. versicolor
6.2,2.9,4.3,1.3,I. versicolor
5.1,2.5,3.0,1.1,I. versicolor
5.7,2.8,4.1,1.3,I. versicolor
6.3,3.3,6.0,2.5,I. virginica
5.8,2.7,5.1,1.9,I. virginica
7.1,3.0,5.9,2.1,I. virginica
6.3,2.9,5.6,1.8,I. virginica
6.5,3.0,5.8,2.2,I. virginica
7.6,3.0,6.6,2.1,I. virginica
4.9,2.5,4.5,1.7,I. virginica
7.3,2.9,6.3,1.8,I. virginica
6.7,2.5,5.8,1.8,I. virginica
7.2,3.6,6.1,2.5,I. virginica
6.5,3.2,5.1,2.0,I. virginica
6.4,2.7,5.3,1.9,I. virginica
6.8,3.0,5.5,2.1,I. virginica
5.7,2.5,5.0,2.0,I. virginica
5.8,2.8,5.1,2.4,I. virginica
6.4,3.2,5.3,2.3,I. virginica
6.5,3.0,5.5,1.8,I. virginica
7.7,3.8,6.7,2.2,I. virginica
7.7,2.6,6.9,2.3,I. virginica
6.0,2.2,5.0,1.5,I. virginica
6.9,3.2,5.7,2.3,I. virginica
5.6,2.8,4.9,2.0,I. virginica
7.7,2.8,6.7,2.0,I. virginica
6.3,2.7,4.9,1.8,I. virginica
6.7,3.3,5.7,2.1,I. virginica
7.2,3.2,6.0,1.8,I. virginica
6.2,2.8,4.8,1.8,I. virginica
6.1,3.0,4.9,1.8,I. virginica
6.4,2.8,5.6,2.1,I. virginica
7.2,3.0,5.8,1.6,I. virginica
7.4,2.8,6.1,1.9,I. virginica
7.9,3.8,6.4,2.0,I. virginica
6.4,2.8,5.6,2.2,I. virginica
6.3,2.8,5.1,1.5,I. virginica
6.1,2.6,5.6,1.4,I. virginica
7.7,3.0,6.1,2.3,I. virginica
6.3,3.4,5.6,2.4,I. virginica
6.4,3.1,5.5,1.8,I. virginica
6.0,3.0,4.8,1.8,I. virginica
6.9,3.1,5.4,2.1,I. virginica
6.7,3.1,5.6,2.4,I. virginica
6.9,3.1,5.1,2.3,I. virginica
5.8,2.7,5.1,1.9,I. virginica
6.8,3.2,5.9,2.3,I. virginica
6.7,3.3,5.7,2.5,I. virginica
6.7,3.0,5.2,2.3,I. virginica
6.3,2.5,5.0,1.9,I. virginica
6.5,3.0,5.2,2.0,I. virginica
6.2,3.4,5.4,2.3,I. virginica
5.9,3.0,5.1,1.8,I. virginica
</plot-data>
|
openbrainsrc/hRadian
|
examples/Example/defunct/Eg62.hs
|
mpl-2.0
| 4,839
| 1,233
| 19
| 387
| 3,825
| 1,756
| 2,069
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.PlayLists.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a playlist.
--
-- /See:/ <https://developers.google.com/youtube/v3 YouTube Data API Reference> for @youtube.playlists.delete@.
module Network.Google.Resource.YouTube.PlayLists.Delete
(
-- * REST Resource
PlayListsDeleteResource
-- * Creating a Request
, playListsDelete
, PlayListsDelete
-- * Request Lenses
, pldOnBehalfOfContentOwner
, pldId
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.playlists.delete@ method which the
-- 'PlayListsDelete' request conforms to.
type PlayListsDeleteResource =
"youtube" :>
"v3" :>
"playlists" :>
QueryParam "id" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a playlist.
--
-- /See:/ 'playListsDelete' smart constructor.
data PlayListsDelete = PlayListsDelete'
{ _pldOnBehalfOfContentOwner :: !(Maybe Text)
, _pldId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PlayListsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldOnBehalfOfContentOwner'
--
-- * 'pldId'
playListsDelete
:: Text -- ^ 'pldId'
-> PlayListsDelete
playListsDelete pPldId_ =
PlayListsDelete'
{ _pldOnBehalfOfContentOwner = Nothing
, _pldId = pPldId_
}
-- | Note: This parameter is intended exclusively for YouTube content
-- partners. The onBehalfOfContentOwner parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The CMS account that the user authenticates with must be linked
-- to the specified YouTube content owner.
pldOnBehalfOfContentOwner :: Lens' PlayListsDelete (Maybe Text)
pldOnBehalfOfContentOwner
= lens _pldOnBehalfOfContentOwner
(\ s a -> s{_pldOnBehalfOfContentOwner = a})
-- | The id parameter specifies the YouTube playlist ID for the playlist that
-- is being deleted. In a playlist resource, the id property specifies the
-- playlist\'s ID.
pldId :: Lens' PlayListsDelete Text
pldId = lens _pldId (\ s a -> s{_pldId = a})
instance GoogleRequest PlayListsDelete where
type Rs PlayListsDelete = ()
type Scopes PlayListsDelete =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient PlayListsDelete'{..}
= go (Just _pldId) _pldOnBehalfOfContentOwner
(Just AltJSON)
youTubeService
where go
= buildClient
(Proxy :: Proxy PlayListsDeleteResource)
mempty
|
rueshyna/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/PlayLists/Delete.hs
|
mpl-2.0
| 4,020
| 0
| 13
| 890
| 409
| 249
| 160
| 63
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Config.PutConfigurationRecorder
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a new configuration recorder to record the resource configurations.
--
-- You can use this action to change the role ('roleARN') of an existing
-- recorder. To change the role, call the action on the existing configuration
-- recorder and specify a role.
--
-- <http://docs.aws.amazon.com/config/latest/APIReference/API_PutConfigurationRecorder.html>
module Network.AWS.Config.PutConfigurationRecorder
(
-- * Request
PutConfigurationRecorder
-- ** Request constructor
, putConfigurationRecorder
-- ** Request lenses
, pcrConfigurationRecorder
-- * Response
, PutConfigurationRecorderResponse
-- ** Response constructor
, putConfigurationRecorderResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Config.Types
import qualified GHC.Exts
newtype PutConfigurationRecorder = PutConfigurationRecorder
{ _pcrConfigurationRecorder :: ConfigurationRecorder
} deriving (Eq, Read, Show)
-- | 'PutConfigurationRecorder' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'pcrConfigurationRecorder' @::@ 'ConfigurationRecorder'
--
putConfigurationRecorder :: ConfigurationRecorder -- ^ 'pcrConfigurationRecorder'
-> PutConfigurationRecorder
putConfigurationRecorder p1 = PutConfigurationRecorder
{ _pcrConfigurationRecorder = p1
}
-- | The configuration recorder object that records each configuration change made
-- to the resources.
pcrConfigurationRecorder :: Lens' PutConfigurationRecorder ConfigurationRecorder
pcrConfigurationRecorder =
lens _pcrConfigurationRecorder
(\s a -> s { _pcrConfigurationRecorder = a })
data PutConfigurationRecorderResponse = PutConfigurationRecorderResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'PutConfigurationRecorderResponse' constructor.
putConfigurationRecorderResponse :: PutConfigurationRecorderResponse
putConfigurationRecorderResponse = PutConfigurationRecorderResponse
instance ToPath PutConfigurationRecorder where
toPath = const "/"
instance ToQuery PutConfigurationRecorder where
toQuery = const mempty
instance ToHeaders PutConfigurationRecorder
instance ToJSON PutConfigurationRecorder where
toJSON PutConfigurationRecorder{..} = object
[ "ConfigurationRecorder" .= _pcrConfigurationRecorder
]
instance AWSRequest PutConfigurationRecorder where
type Sv PutConfigurationRecorder = Config
type Rs PutConfigurationRecorder = PutConfigurationRecorderResponse
request = post "PutConfigurationRecorder"
response = nullResponse PutConfigurationRecorderResponse
|
kim/amazonka
|
amazonka-config/gen/Network/AWS/Config/PutConfigurationRecorder.hs
|
mpl-2.0
| 3,731
| 0
| 9
| 702
| 349
| 216
| 133
| 50
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.AccountPermissions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of account permissions.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.accountPermissions.list@.
module Network.Google.Resource.DFAReporting.AccountPermissions.List
(
-- * REST Resource
AccountPermissionsListResource
-- * Creating a Request
, accountPermissionsList
, AccountPermissionsList
-- * Request Lenses
, aplProFileId
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.accountPermissions.list@ method which the
-- 'AccountPermissionsList' request conforms to.
type AccountPermissionsListResource =
"dfareporting" :>
"v2.7" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"accountPermissions" :>
QueryParam "alt" AltJSON :>
Get '[JSON] AccountPermissionsListResponse
-- | Retrieves the list of account permissions.
--
-- /See:/ 'accountPermissionsList' smart constructor.
newtype AccountPermissionsList = AccountPermissionsList'
{ _aplProFileId :: Textual Int64
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccountPermissionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aplProFileId'
accountPermissionsList
:: Int64 -- ^ 'aplProFileId'
-> AccountPermissionsList
accountPermissionsList pAplProFileId_ =
AccountPermissionsList'
{ _aplProFileId = _Coerce # pAplProFileId_
}
-- | User profile ID associated with this request.
aplProFileId :: Lens' AccountPermissionsList Int64
aplProFileId
= lens _aplProFileId (\ s a -> s{_aplProFileId = a})
. _Coerce
instance GoogleRequest AccountPermissionsList where
type Rs AccountPermissionsList =
AccountPermissionsListResponse
type Scopes AccountPermissionsList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient AccountPermissionsList'{..}
= go _aplProFileId (Just AltJSON) dFAReportingService
where go
= buildClient
(Proxy :: Proxy AccountPermissionsListResource)
mempty
|
rueshyna/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/AccountPermissions/List.hs
|
mpl-2.0
| 3,137
| 0
| 13
| 675
| 320
| 194
| 126
| 52
| 1
|
-------------------------------------------------------------------
-- |
-- Module : Data.Boolean.BF
-- Copyright : (C) 2002-2005, 2009 University of New South Wales, (C) 2009-2011 Peter Gammie
-- License : LGPL (see COPYING.LIB for details)
-------------------------------------------------------------------
module Data.Boolean.BF
(
-- * A data-structure based instance of 'Boolean'.
BF(..)
) where
-------------------------------------------------------------------
-- Dependencies.
-------------------------------------------------------------------
import Data.Boolean
-------------------------------------------------------------------
-- | An abstract syntax tree-ish instance of the 'Boolean' interface,
-- sometimes useful for debugging.
--
-- Note the 'Eq' instance is /not/ semantic equality.
data BF = BFtrue
| BFfalse
| BFvar String
| BF `BFand` BF
| BF `BFor` BF
| BF `BFxor` BF
-- | BFite BF BF BF
| BF `BFimplies` BF
| BF `BFiff` BF
| BFneg BF
| BFexists [BF] BF
| BFforall [BF] BF
| BFsubst [(BF, BF)] BF
deriving (Eq, Show)
instance BooleanVariable BF where
bvar = BFvar
unbvar (BFvar v) = v
unbvar _ = error $ "BF.unbvar: not a variable."
instance Boolean BF where
false = BFfalse
true = BFtrue
(/\) = BFand
(\/) = BFor
xor = BFxor
-- ite = BFite
(-->) = BFimplies
(<->) = BFiff
neg = BFneg
instance QBF BF where
data Group BF = MkGroup { unMkGroup :: [BF] }
mkGroup = MkGroup
exists = BFexists . unMkGroup
forall = BFforall . unMkGroup
instance Substitution BF where
data Subst BF = MkBFpair [(BF, BF)]
mkSubst = MkBFpair
substitute (MkBFpair s) = BFsubst s
|
m4lvin/hBDD
|
Data/Boolean/BF.hs
|
lgpl-2.1
| 1,797
| 2
| 10
| 451
| 352
| 214
| 138
| -1
| -1
|
{-# LANGUAGE StandaloneDeriving, GADTs, TypeFamilies, MultiParamTypeClasses, GeneralizedNewtypeDeriving, OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module NumberDataSource where
import Data.Hashable
import Haxl.Core
import Data.IORef
import Control.Monad (forM_)
import Text.Printf
import GraphQLHelpers
import qualified Data.HashMap.Strict as HashMap
data NumberRequest a where
AddToNumber :: Int -> NumberRequest ()
FetchCurrentNumber :: NumberRequest NumberObject
deriving instance Eq (NumberRequest a)
deriving instance Show (NumberRequest a)
instance Hashable (NumberRequest a) where
hashWithSalt salt (AddToNumber i) = hashWithSalt salt (0 :: Int, i)
hashWithSalt salt (FetchCurrentNumber) = hashWithSalt salt (1 :: Int)
data NumberObject = NumberObject
{ theNumber :: Int
}
deriving (Show)
instance GraphQLObject NumberObject where
resolveObject (NumberObject v) = HashMap.fromList
[ ("theNumber", knownValue v)
]
instance StateKey NumberRequest where
data State NumberRequest = NumberRequestState (IORef Int)
runNumberRequest :: IORef Int -> NumberRequest a -> ResultVar a -> IO ()
runNumberRequest numberRef (AddToNumber i) var = do
modifyIORef numberRef (+ i)
putSuccess var ()
runNumberRequest numberRef FetchCurrentNumber var = do
NumberObject <$> readIORef numberRef >>= putSuccess var
instance DataSourceName NumberRequest where
dataSourceName _ = "NumberRequestDataSource"
instance Show1 NumberRequest where
show1 (AddToNumber i) = printf "AddToNumber(%i)" i
show1 (FetchCurrentNumber) = "FetchCurrentNumber"
instance DataSource () NumberRequest where
fetch (NumberRequestState numberRef) _ _ reqs = SyncFetch $ do
putStrLn $ "do some number requests: " ++ show [show1 req | BlockedFetch req _ <- reqs]
forM_ reqs $ \(BlockedFetch req var) -> do
runNumberRequest numberRef req var
initializeNumberDataSource :: Int -> IO (State NumberRequest)
initializeNumberDataSource i = NumberRequestState <$> newIORef i
|
dropbox/datagraph
|
src/NumberDataSource.hs
|
apache-2.0
| 2,007
| 0
| 14
| 307
| 548
| 280
| 268
| 44
| 1
|
-- http://www.codewars.com/kata/54c27a33fb7da0db0100040e
module Codewars.Kata.Square where
import Control.Arrow
isSquare :: Integral n => n -> Bool
isSquare = uncurry (==) . ((^2) . round . sqrt . fromIntegral &&& id)
|
Bodigrim/katas
|
src/haskell/7-Youre-a-square.hs
|
bsd-2-clause
| 219
| 0
| 11
| 30
| 66
| 38
| 28
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Ermine.Syntax.ModuleName
( ModuleName(ModuleName)
, mkModuleName
, mkModuleName_
, HasModuleName(..)
) where
import Control.Applicative
import Control.Lens
import Crypto.Hash.MD5 as MD5
import Data.Binary
import Data.Bytes.Serial
import Data.ByteString
import Data.Data
import Data.Function
import Data.Hashable
import Data.Serialize
import Data.Text
import Ermine.Syntax.Digest
import Ermine.Syntax.Name
import GHC.Generics
data ModuleName = ModuleName
{ _digest :: !ByteString
, _package :: !Text
, _name :: !Text
} deriving (Data, Typeable, Generic)
mkModuleName :: Text -> Text -> ModuleName
mkModuleName p m = ModuleName d p m where
d = MD5.finalize $ digest MD5.init p `digest` m
mkModuleName_ :: String -> ModuleName
mkModuleName_ nam = mkModuleName (Data.Text.pack "ermine") (Data.Text.pack nam)
instance Show ModuleName where
showsPrec d (ModuleName _ p n) = showParen (d > 10) $
showString "mkModuleName " . showsPrec 11 p .
showChar ' ' . showsPrec 11 n
instance Read ModuleName where
readsPrec d = readParen (d > 10) $ \r -> do
("mkModuleName", r') <- lex r
(p, r'') <- readsPrec 11 r'
(n, r''') <- readsPrec 11 r''
return (mkModuleName p n, r''')
instance Eq ModuleName where
(==) = (==) `on` _digest
instance Ord ModuleName where
compare = compare `on` _digest
instance Hashable ModuleName where
hashWithSalt s c = hashWithSalt s (_digest c)
instance HasName ModuleName
where name f (ModuleName _ pkg nm) = mkModuleName pkg <$> f nm
class HasModuleName t where
module_ :: Lens' t ModuleName
package :: Lens' t Text
package f = module_ $ \(ModuleName _ pkg nm) -> f pkg <&> \pkg' -> mkModuleName pkg' nm
instance HasModuleName ModuleName where module_ = id
instance Digestable ModuleName where
digest c ModuleName{_digest = d} = update c d
instance Serial ModuleName where
serialize mn = serialize (_digest mn) >> serialize (mn^.package) >> serialize (mn^.name)
deserialize = ModuleName <$> deserialize <*> deserialize <*> deserialize
instance Binary ModuleName where
get = deserialize; put = serialize
instance Serialize ModuleName where
get = deserialize; put = serialize
|
ekmett/ermine
|
src/Ermine/Syntax/ModuleName.hs
|
bsd-2-clause
| 2,285
| 0
| 12
| 444
| 757
| 404
| 353
| 72
| 1
|
{-# LANGUAGE FlexibleContexts, TypeOperators, FlexibleInstances, ScopedTypeVariables #-}
module Data.Persist.Interface
(
-- * Operations on relationships
addRelation
, findAllRelated
, findAllRelated'
, findRelation
, findRelation'
-- * Operations on entities
, find
, update
, findAll
-- * Functions exposed for auto-generated code
, createSchemaEntity_
, createSchemaRelationship_
, Ref (..)
, create_
, Relation (..)
, Persistent (..)
) where
import Data.Maybe (catMaybes)
import Control.Monad (join)
import Control.Applicative
import Generics.Regular
import Data.Persist.Backend.Interface
-- | A reference to an entity of type @a@. Never construct these values manually: the implementation is exposed so that new backends can make use of it.
data Ref a = Ref {refKey :: Int}
deriving Show
-- | Builds an `undefined` value of type @a@. Necessary for the generic functions.
getUndefined :: (Ref a) -> a
getUndefined _ = error "Trying to access getUndefined's value"
-- | Describes a relation between @a@ and @b@. Never construct these values manually.
data Relation a b = Relation { relTableName :: String }
-- | Creates a new value. To assure your database stays correct, never use this function directly.
create_ :: (Regular a, DatabaseRepr (PF a), Persistent p) => a -> p (Ref a)
create_ x = fmap Ref $ createImpl (tableName genX) (toDatabaseValue genX)
where genX = from x
-- | Add a relation between entities of type @a@ and @b@.
addRelation :: Persistent p => Ref a -> Ref b -> Relation a b -> p ()
addRelation (Ref a) (Ref b) (Relation r) = addRelationImpl a b r
-- | Find all entities in the relationship
findAllRelated :: (Regular b, DatabaseRepr (PF b), Persistent p) => Ref a -> Relation a b -> p [b]
findAllRelated ref relation = findRelation ref relation >>= fmap catMaybes . mapM find
-- | Find all entities in the relationship
findAllRelated' :: (Regular a, DatabaseRepr (PF a), Persistent p) => Ref b -> Relation a b -> p [a]
findAllRelated' ref relation = findRelation' ref relation >>= fmap catMaybes . mapM find
findRelation :: Persistent p => Ref a -> Relation a b -> p [Ref b]
findRelation (Ref x) relation = fmap (map Ref) $ findRelationImpl (Left x) (relTableName relation)
findRelation' :: Persistent p => Ref b -> Relation a b -> p [Ref a]
findRelation' (Ref y) relation = fmap (map Ref) $ findRelationImpl (Right y) (relTableName relation)
-- | Find all entities
findAll :: forall p a . (Regular a, DatabaseRepr (PF a), Persistent p) => p [(Ref a, a)]
findAll = do
if False then (return [(Ref 0, x)]) else do
fmap (catMaybes . map convert) $ findAllImpl (tableName genX) (keys x)
where x = (undefined :: a)
genX = from x
convert :: (Regular a, DatabaseRepr (PF a)) => [DBValue] -> Maybe (Ref a, a)
convert (y:ys) = do ref <- Ref <$> dbValueAsInt y
res <- fromDatabaseValue ys
return (ref, to res)
convert _ = Nothing
-- | Create the schema for entities of type @a@. The argument may be undefined (it's only necessary for the type)
createSchemaEntity_ :: (Regular a, DatabaseRepr (PF a), Persistent p) => a -> p ()
createSchemaEntity_ undefinedValue = createSchemaForEntity (tableName $ from undefinedValue) (keys undefinedValue)
-- | Create the schema for the relationship
createSchemaRelationship_ :: Persistent p => Relation a b -> p ()
createSchemaRelationship_ rel = createSchemaForRelationship (relTableName rel)
update :: (Regular a, DatabaseRepr (PF a), Persistent p) => Ref a -> a -> p ()
update (Ref x) value = updateImpl x (tableName genericValue) (toDatabaseValue genericValue)
where genericValue = from value
-- | Finds an entity by reference.
find :: (Regular a, DatabaseRepr (PF a), Persistent p) => Ref a -> p (Maybe a)
find r@(Ref x) = fmap (join . fmap (fmap to . fromDatabaseValue)) $ findImpl x (tableName $ from undefinedValue) (keys undefinedValue)
where undefinedValue = getUndefined r
-- todo rewrite, join should be unnecessary
-- Conversion code
keys :: (Regular a, DatabaseValue (PF a)) => a -> [String]
keys = map fst . toDatabaseValue . from
class DatabaseValue f => DatabaseRepr f where
tableName :: f a -> String
class DatabaseField f where
toField :: f a -> DBValue
fromField :: DBValue -> Maybe (f a)
class DatabaseValue f where
toDatabaseValue :: f a -> [(String, DBValue)]
fromDatabaseValue :: [DBValue] -> Maybe (f a)
instance (DatabaseValue f) => DatabaseValue (C c f) where
toDatabaseValue ~(C x) = toDatabaseValue x
fromDatabaseValue x = C <$> fromDatabaseValue x
instance (DatabaseValue f, DatabaseValue g) => DatabaseValue (f :*: g) where
toDatabaseValue ~(x :*: y) = toDatabaseValue x ++ toDatabaseValue y
fromDatabaseValue (x:xs) = (:*:) <$> fromDatabaseValue [x] <*> fromDatabaseValue xs
fromDatabaseValue _ = Nothing
instance (Selector s, DatabaseField f) => DatabaseValue (S s f) where
toDatabaseValue ~s@(S x) = [(selName s, toField x)]
fromDatabaseValue [x] = S <$> fromField x
fromDatabaseValue _ = Nothing
instance DatabaseField (K String) where
toField ~(K x) = DBString x
fromField (DBString x) = Just (K x)
fromField _ = Nothing
instance DatabaseField (K [Int]) where
toField ~(K x) = DBString $ show x
fromField (DBString x) = Just (K $ read x)
fromField _ = Nothing
instance (Constructor c, DatabaseValue f) => DatabaseRepr (C c f) where
tableName x = conName x
|
chriseidhof/persist
|
src/Data/Persist/Interface.hs
|
bsd-3-clause
| 5,542
| 0
| 14
| 1,160
| 1,850
| 945
| 905
| 92
| 3
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Main where
import Control.Concurrent (threadDelay)
import Control.Exception (Exception)
import Control.Monad (void)
import Control.Monad.Catch (catch, throwM)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Logger (runStderrLoggingT)
import Database.Persist (getBy, insert)
import Database.Persist.TH (mkMigrate, mkPersist, persistLowerCase, share, sqlSettings)
import Database.Persist.Sqlite (runMigration, runSqlConn, withSqliteConn)
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Foo
bar Int
UniqueBar bar
deriving Show
|]
data MyException = MyException
deriving Show
instance Exception MyException
main :: IO ()
main =
runStderrLoggingT $ withSqliteConn ":memory:" $ \sqlbackend -> do
runSqlConn (runMigration migrateAll) sqlbackend
liftIO $ putStrLn ""
void $ runSqlConn (insert $ Foo 1) sqlbackend
liftIO $ putStrLn ""
liftIO $ threadDelay (60 * 1000000)
runSqlConn (getBy (UniqueBar 1) >> throwM MyException) sqlbackend
`catch` \(e::MyException) -> liftIO $ putStrLn "Caught exception"
liftIO $ putStrLn ""
liftIO $ threadDelay (60 * 1000000)
ret <- runSqlConn (getBy (UniqueBar 1)) sqlbackend
liftIO $ print ret
|
cdepillabout/haskell-sqlitetest-error
|
Main.hs
|
bsd-3-clause
| 1,553
| 0
| 15
| 258
| 391
| 211
| 180
| 37
| 1
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DataKinds #-}
--
-- Peripheral.hs --- ADC peripheral driver for the STM32F4.
--
-- Copyright (C) 2015, Galois, Inc.
-- All Rights Reserved.
--
module Ivory.BSP.STM32.Peripheral.ADC.Peripheral where
import Ivory.BSP.STM32.Interrupt
import Ivory.BSP.STM32.Peripheral.ADC.Regs
import Ivory.BSP.STM32.Peripheral.ADC.RegTypes
import Ivory.HW
import Ivory.Language
data ADCPeriph = ADCPeriph
{ adcRegSR :: BitDataReg ADC_SR
, adcRegCR1 :: BitDataReg ADC_CR1
, adcRegCR2 :: BitDataReg ADC_CR2
, adcRegSQR1 :: BitDataReg ADC_SQR1
, adcRegSQR2 :: BitDataReg ADC_SQR2
, adcRegSQR3 :: BitDataReg ADC_SQR3
, adcRegDR :: BitDataReg ADC_DR
, adcRCCEnable :: forall eff . Ivory eff ()
, adcRCCDisable :: forall eff . Ivory eff ()
, adcInt :: HasSTM32Interrupt
, adcName :: String
}
mkADCPeriph :: (STM32Interrupt i)
=> Integer -- ^ Base
-> (forall eff . Ivory eff ()) -- ^ RCC Enable
-> (forall eff . Ivory eff ()) -- ^ RCC Disable
-> i -- ^ global adc interrupt. NB: shared with other adc periphs!
-> String -- ^ Name
-> ADCPeriph
mkADCPeriph base rccen rccdis int n =
ADCPeriph
{ adcRegSR = reg 0x00 "sr"
, adcRegCR1 = reg 0x04 "cr1"
, adcRegCR2 = reg 0x08 "cr2"
-- TODO: remaining registers
, adcRegSQR1 = reg 0x2C "sqr1"
, adcRegSQR2 = reg 0x30 "sqr2"
, adcRegSQR3 = reg 0x34 "sqr3"
-- TODO: remaining registers
, adcRegDR = reg 0x4C "dr"
, adcRCCEnable = rccen
, adcRCCDisable = rccdis
, adcInt = HasSTM32Interrupt int
, adcName = n
}
where
reg :: (IvoryIOReg (BitDataRep d)) => Integer -> String -> BitDataReg d
reg offs name = mkBitDataRegNamed (base + offs) (n ++ "->" ++ name)
adcInit :: ADCPeriph
-> ADCResolution -- ^ how many bits of precision to use in conversion?
-> IBool -- ^ left-align converted bits in 16-bit data register?
-> Ivory eff ()
adcInit periph res align = do
adcRCCEnable periph
modifyReg (adcRegCR1 periph) $ do
setField adc_cr1_res res
modifyReg (adcRegCR2 periph) $ do
setField adc_cr2_align $ boolToBit align
setBit adc_cr2_adon
adcStartConversion :: ADCPeriph -> Int -> Ivory eff ()
adcStartConversion periph chan = do
setReg (adcRegSQR3 periph) $ do
setField adc_sqr3_sq1 $ fromRep $ fromIntegral chan
setReg (adcRegSQR1 periph) $ do
setField adc_sqr1_l $ fromRep 1
modifyReg (adcRegCR2 periph) $ do
setBit adc_cr2_swstart
clearBit adc_cr2_eocs
clearBit adc_cr2_dma
clearBit adc_cr2_cont
adcGetConversion :: ADCPeriph -> Ivory eff Uint16
adcGetConversion periph = do
dr <- getReg (adcRegDR periph)
return (toRep (dr #. adc_dr_data))
|
GaloisInc/ivory-tower-stm32
|
ivory-bsp-stm32/src/Ivory/BSP/STM32/Peripheral/ADC/Peripheral.hs
|
bsd-3-clause
| 2,875
| 0
| 12
| 688
| 727
| 384
| 343
| 71
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances,
FlexibleContexts, PolyKinds, TemplateHaskell #-}
module Data.Profunctor.Product.Default
( module Data.Profunctor.Product.Default
, module Data.Profunctor.Product.Default.Class
) where
import Control.Applicative (Const (Const))
import Data.Functor.Identity (Identity (Identity))
import Data.Profunctor (Profunctor, dimap)
-- TODO: vv this imports a lot of names. Should we list them all?
import Data.Profunctor.Product
import Data.Tagged (Tagged (Tagged))
import Data.Profunctor.Product.Default.Class
import Data.Profunctor.Product.Tuples.TH (mkDefaultNs, maxTupleSize)
cdef :: Default (PPOfContravariant u) a a => u a
cdef = unPPOfContravariant def
instance (Profunctor p, Default p a b) => Default p (Identity a) (Identity b)
where
def = dimap (\(Identity a) -> a) Identity def
instance (Profunctor p, Default p a b) => Default p (Const a c) (Const b c')
where
def = dimap (\(Const a) -> a) Const def
instance (Profunctor p, Default p a b) => Default p (Tagged s a) (Tagged s' b)
where
def = dimap (\(Tagged a) -> a) Tagged def
mkDefaultNs (0:[2..maxTupleSize])
|
karamaan/product-profunctors
|
Data/Profunctor/Product/Default.hs
|
bsd-3-clause
| 1,202
| 0
| 10
| 198
| 382
| 217
| 165
| 22
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Waldo.StoryExample (
loadScriptGen
) where
import Control.Monad
import Data.ByteString.Char8 ()
import Data.Text ()
import Waldo.Stalk
import Waldo.Script
import Waldo.Story
import Waldo.CityLoc
loadScriptGen :: IO (PersonalData -> IO Script)
loadScriptGen = do
defaultScript <- loadDefaultScript
stories <- story1example
return $ selectStory (knapsackSizer 100) defaultScript stories
-- If they really defeat our snooping, they get this one.
loadDefaultScript :: IO Script
loadDefaultScript = do
p1 <- loadImagePanels 1 1 0
p2 <- loadImagePanels 1 2 0
p3 <- loadImagePanels 1 3 0
p4 <- loadImagePanels 1 4 0
return $ mkScript "failback" alt $ map head [p1, p2, p3, p4 ]
where
alt = "This is the testiest test ever!"
story1example :: IO [StoryOption]
story1example = do
s1p1c0 <- loadImagePanels 1 1 0
s1p1c1 <- loadImagePanels 1 1 1
s1p1c2 <- loadImagePanels 1 1 2
s1p1c3 <- loadImagePanels 1 1 3
s1p2 <- loadImagePanels 1 2 0
s1p3 <- loadImagePanels 1 3 0
s1p4c0 <- loadImagePanels 1 4 0
s1p4c1 <- loadImagePanels 1 4 1
return [
do
isIn "NA" -- Only for North Americans
p1 <- msum [ orgIs "Massachusetts Institute of Technology" `allocate` s1p1c1
, closeTo sydney `allocate` s1p1c2
, closeTo sanFran `allocate` s1p1c3
, return s1p1c0
]
p4 <- msum [ osIs BSD `allocate` s1p4c1
, osIs Linux `allocate` s1p4c0
]
return $ Story {
storyAltText = "Alt"
, storyPanelSets = [ p1, s1p2, s1p3, p4 ]
, storyPadX = 0, storyPadY = 0
, storyName = "s01"
}
]
|
davean/waldo
|
Waldo/StoryExample.hs
|
bsd-3-clause
| 1,800
| 0
| 15
| 558
| 502
| 256
| 246
| 47
| 1
|
module FoldMapAllTheThings where
import Data.Monoid
-- | Implement toList, minimum and foldr using foldMap (4 kyu)
-- | Link: https://biturl.io/FoldMap
-- | My original solution
myToList :: Foldable t => t a -> [a]
myToList = foldMap (: [])
newtype Min a = Min
{ getMin :: Maybe a
}
instance Ord a => Monoid (Min a) where
mempty = Min Nothing
(Min Nothing) `mappend` m = m
m `mappend` (Min Nothing) = m
m@(Min (Just x)) `mappend` n@(Min (Just y))
| x <= y = m
| otherwise = n
myMinimum :: (Ord a, Foldable t) => t a -> Maybe a
myMinimum xs = getMin $ foldMap (Min . Just) xs
myFoldr :: Foldable t => (a -> b -> b) -> b -> t a -> b
myFoldr f z t = appEndo (foldMap (Endo . f) t) z
|
Eugleo/Code-Wars
|
src/haskell-kata/FoldMapAllTheThings.hs
|
bsd-3-clause
| 707
| 0
| 11
| 169
| 318
| 166
| 152
| 17
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.