code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Fetch
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : cabal-devel@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- The cabal fetch command
-----------------------------------------------------------------------------
module Distribution.Client.Fetch (
fetch,
) where
import Distribution.Client.Types
import Distribution.Client.Targets
import Distribution.Client.FetchUtils hiding (fetchPackage)
import Distribution.Client.Dependency
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.Setup
( GlobalFlags(..), FetchFlags(..) )
import Distribution.Package
( packageId )
import Distribution.Simple.Compiler
( Compiler(compilerId), PackageDBStack )
import Distribution.Simple.PackageIndex (PackageIndex)
import Distribution.Simple.Program
( ProgramConfiguration )
import Distribution.Simple.Setup
( fromFlag )
import Distribution.Simple.Utils
( die, notice, debug )
import Distribution.System
( Platform )
import Distribution.Text
( display )
import Distribution.Verbosity
( Verbosity )
import Control.Monad
( filterM )
-- ------------------------------------------------------------
-- * The fetch command
-- ------------------------------------------------------------
--TODO:
-- * add fetch -o support
-- * support tarball URLs via ad-hoc download cache (or in -o mode?)
-- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied
-- * Port various flags from install:
-- * --updage-dependencies
-- * --constraint and --preference
-- * --only-dependencies, but note it conflicts with --no-deps
-- | Fetch a list of packages and their dependencies.
--
fetch :: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> GlobalFlags
-> FetchFlags
-> [UserTarget]
-> IO ()
fetch verbosity _ _ _ _ _ _ _ [] =
notice verbosity "No packages requested. Nothing to do."
fetch verbosity packageDBs repos comp platform conf
globalFlags fetchFlags userTargets = do
mapM_ checkTarget userTargets
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
pkgSpecifiers <- resolveUserTargets verbosity
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
pkgs <- planPackages
verbosity comp platform fetchFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs
if null pkgs'
--TODO: when we add support for remote tarballs then this message
-- will need to be changed because for remote tarballs we fetch them
-- at the earlier phase.
then notice verbosity $ "No packages need to be fetched. "
++ "All the requested packages are already local "
++ "or cached locally."
else if dryRun
then notice verbosity $ unlines $
"The following packages would be fetched:"
: map (display . packageId) pkgs'
else mapM_ (fetchPackage verbosity . packageSource) pkgs'
where
dryRun = fromFlag (fetchDryRun fetchFlags)
planPackages :: Verbosity
-> Compiler
-> Platform
-> FetchFlags
-> PackageIndex
-> SourcePackageDb
-> [PackageSpecifier SourcePackage]
-> IO [SourcePackage]
planPackages verbosity comp platform fetchFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
| includeDependencies = do
solver <- chooseSolver verbosity
(fromFlag (fetchSolver fetchFlags)) (compilerId comp)
notice verbosity "Resolving dependencies..."
installPlan <- foldProgress logMsg die return $
resolveDependencies
platform (compilerId comp)
solver
resolverParams
-- The packages we want to fetch are those packages the 'InstallPlan'
-- that are in the 'InstallPlan.Configured' state.
return
[ pkg
| (InstallPlan.Configured (InstallPlan.ConfiguredPackage pkg _ _ _))
<- InstallPlan.toList installPlan ]
| otherwise =
either (die . unlines . map show) return $
resolveWithoutDependencies resolverParams
where
resolverParams =
setMaxBackjumps (if maxBackjumps < 0 then Nothing
else Just maxBackjumps)
. setIndependentGoals independentGoals
. setReorderGoals reorderGoals
. setShadowPkgs shadowPkgs
-- Reinstall the targets given on the command line so that the dep
-- resolver will decide that they need fetching, even if they're
-- already installed. Since we want to get the source packages of
-- things we might have installed (but not have the sources for).
. reinstallTargets
$ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers
includeDependencies = fromFlag (fetchDeps fetchFlags)
logMsg message rest = debug verbosity message >> rest
reorderGoals = fromFlag (fetchReorderGoals fetchFlags)
independentGoals = fromFlag (fetchIndependentGoals fetchFlags)
shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags)
maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags)
checkTarget :: UserTarget -> IO ()
checkTarget target = case target of
UserTargetRemoteTarball _uri
-> die $ "The 'fetch' command does not yet support remote tarballs. "
++ "In the meantime you can use the 'unpack' commands."
_ -> return ()
fetchPackage :: Verbosity -> PackageLocation a -> IO ()
fetchPackage verbosity pkgsrc = case pkgsrc of
LocalUnpackedPackage _dir -> return ()
LocalTarballPackage _file -> return ()
RemoteTarballPackage _uri _ ->
die $ "The 'fetch' command does not yet support remote tarballs. "
++ "In the meantime you can use the 'unpack' commands."
RepoTarballPackage repo pkgid _ -> do
_ <- fetchRepoTarball verbosity repo pkgid
return ()
| jwiegley/ghc-release | libraries/Cabal/cabal-install/Distribution/Client/Fetch.hs | gpl-3.0 | 6,699 | 0 | 16 | 1,801 | 1,108 | 583 | 525 | 121 | 4 |
{-# LANGUAGE RankNTypes, CPP #-}
{-| Provides all lens-related functions.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Lens
( module Control.Lens
, lensWith
, makeCustomLenses
, makeCustomLenses'
, traverseOf2
, mapMOf2
, atSet
) where
import Prelude ()
import Ganeti.Prelude
import Control.Applicative (WrappedMonad(..))
import Control.Lens
import Control.Monad
import Data.Functor.Compose (Compose(..))
import qualified Data.Set as S
import Language.Haskell.TH
-- | Creates an optimized lens where the setter also gets the original value
-- from the getter.
lensWith :: (s -> a) -> (s -> a -> b -> t) -> Lens s t a b
lensWith sa sbt f s = uncurry (sbt s) <$> (\a -> fmap ((,) a) (f a)) (sa s)
lensFieldName :: String -> String
lensFieldName = (++ "L")
-- | Internal helper method for constructing partial set of lenses.
makeCustomLensesFiltered :: (String -> Bool) -> Name -> Q [Dec]
makeCustomLensesFiltered f = makeLensesWith customRules
where
customRules :: LensRules
customRules = set lensField nameFun lensRules
#if MIN_VERSION_lens(4,5,0)
nameFun :: Name -> [Name] -> Name -> [DefName]
nameFun _ _ = liftM (TopName . mkName) . nameFilter . nameBase
#elif MIN_VERSION_lens(4,4,0)
nameFun :: [Name] -> Name -> [DefName]
nameFun _ = liftM (TopName . mkName) . nameFilter . nameBase
#else
nameFun :: String -> Maybe String
nameFun = nameFilter
#endif
nameFilter :: (MonadPlus m) => String -> m String
nameFilter = liftM lensFieldName . mfilter f . return
-- | Create lenses for all fields of a given data type.
makeCustomLenses :: Name -> Q [Dec]
makeCustomLenses = makeCustomLensesFiltered (const True)
-- | Create lenses for some fields of a given data type.
makeCustomLenses' :: Name -> [Name] -> Q [Dec]
makeCustomLenses' name lst = makeCustomLensesFiltered f name
where
allowed = S.fromList . map nameBase $ lst
f = flip S.member allowed
-- | Traverses over a composition of two functors.
-- Most often the @g@ functor is @(,) r@ and 'traverseOf2' is used to
-- traverse an effectful computation that also returns an additional output
-- value.
traverseOf2 :: Over (->) (Compose f g) s t a b
-> (a -> f (g b)) -> s -> f (g t)
traverseOf2 k f = getCompose . traverseOf k (Compose . f)
-- | Traverses over a composition of a monad and a functor.
-- See 'traverseOf2'.
mapMOf2 :: Over (->) (Compose (WrappedMonad m) g) s t a b
-> (a -> m (g b)) -> s -> m (g t)
mapMOf2 k f = unwrapMonad . traverseOf2 k (WrapMonad . f)
-- | A helper lens over sets.
-- While a similar lens exists in the package (as @Lens' Set (Maybe ())@),
-- it's available only in most recent versions.
-- And using @Bool@ instead of @Maybe ()@ is more convenient.
atSet :: (Ord a) => a -> Lens' (S.Set a) Bool
atSet k = lensWith (S.member k) f
where
f s True False = S.delete k s
f s False True = S.insert k s
f s _ _ = s
| leshchevds/ganeti | src/Ganeti/Lens.hs | bsd-2-clause | 4,190 | 0 | 12 | 814 | 782 | 422 | 360 | 46 | 3 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main (main, resourcesApp, Widget, WorldId) where
import Control.Concurrent (runInUnboundThread)
import Control.Monad (forM)
import Control.Monad.Logger (runNoLoggingT)
import Control.Monad.Primitive (PrimState)
import Control.Monad.Trans.Resource (InternalState)
import qualified Data.ByteString.Char8 as C8
import Data.Pool (Pool)
import Data.IORef (newIORef)
import Data.Function (on)
import Data.List (sortBy)
import Data.Text (Text)
import Database.Persist
import qualified Database.Persist.Postgresql as Pg
import Database.Persist.Sql
import Database.Persist.TH (mkPersist, mpsGeneric,
persistLowerCase, sqlSettings)
import qualified Network.Wai.Handler.Warp as Warp
import System.Environment (getArgs)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.Random.MWC as R
import Text.Blaze.Html
import Yesod
import Data.Maybe (fromJust)
mkPersist sqlSettings { mpsGeneric = True } [persistLowerCase|
World sql=world
randomNumber Int sql=randomnumber
|]
mkPersist sqlSettings { mpsGeneric = True } [persistLowerCase|
Fortune sql=Fortune
message Text sql=message
|]
instance ToJSON (Entity World) where
toJSON (Entity wId wRow) = object [
"id" .= wId
,"randomNumber" .= (worldRandomNumber wRow)
]
instance ToMarkup FortuneId where
toMarkup = toMarkup . fromSqlKey
data App = App
{ appGen :: !(R.Gen (PrimState IO))
, appDbPool :: !(Pool Pg.SqlBackend)
}
mkYesod "App" [parseRoutes|
/json JsonR GET
/plaintext PlaintextR GET
/db DbR GET
/queries/#Int QueriesR GET
!/queries/#Text DefaultQueriesR GET
/fortunes FortunesR GET
/updates/#Int UpdatesR GET
!/updates/#Text DefaultUpdatesR GET
|]
fakeInternalState :: InternalState
fakeInternalState = unsafePerformIO $ newIORef $ error "fakeInternalState forced"
{-# NOINLINE fakeInternalState #-}
instance Yesod App where
makeSessionBackend _ = return Nothing
{-# INLINE makeSessionBackend #-}
shouldLog _ _ _ = False
{-# INLINE shouldLog #-}
yesodMiddleware = id
{-# INLINE yesodMiddleware #-}
cleanPath _ = Right
{-# INLINE cleanPath #-}
yesodWithInternalState _ _ = ($ fakeInternalState)
{-# INLINE yesodWithInternalState #-}
maximumContentLength _ _ = Nothing
{-# INLINE maximumContentLength #-}
getJsonR :: Handler Value
getJsonR = returnJson $ object ["message" .= ("Hello, World!" :: Text)]
runPg dbAction = do
app <- getYesod
runSqlPool dbAction (appDbPool app)
getRandomRow = do
app <- getYesod
randomNumber <- liftIO $ ((R.uniformR (1, 10000) (appGen app)) :: IO Int)
let wId = (toSqlKey $ fromIntegral randomNumber) :: WorldId
get wId >>= \case
Nothing -> return Nothing
Just x -> return $ Just (Entity wId x)
getDbR :: Handler Value
getDbR = do
(runPg getRandomRow) >>= \case
-- TODO: Throw appropriate HTTP response
Nothing -> error "This shouldn't be happening"
Just worldE -> returnJson worldE
getQueriesR :: Int -> Handler Value
getQueriesR cnt = do
result <- (runPg $ forM [1..sanitizedCnt] (\_ -> fmap fromJust getRandomRow))
returnJson result
where
sanitizedCnt
| cnt<1 = 1
| cnt>500 = 500
| otherwise = cnt
getDefaultQueriesR :: Text -> Handler Value
getDefaultQueriesR _ = getQueriesR 1
getFortunesR :: Handler Html
getFortunesR = do
fortunesFromDb <- runPg $ selectList [] []
let fortunes = sortBy (compare `on` fortuneMessage . entityVal) $ (Entity (toSqlKey 0) Fortune{fortuneMessage="Additional fortune added at request time."}):fortunesFromDb
defaultLayout $ do
setTitle "Fortunes"
[whamlet|
<table>
<tr>
<th>id
<th>message
$forall fortune <- fortunes
<tr>
<td>#{entityKey fortune}
<td>#{fortuneMessage $ entityVal fortune}
|]
getUpdatesR :: Int -> Handler Value
getUpdatesR cnt = do
worldRows <- runPg $ forM [1..sanitizedCount] (\_ -> fmap fromJust getRandomRow)
app <- getYesod
updatedWorldRows <- runPg $ mapM (replaceWorldRow app) worldRows
returnJson updatedWorldRows
where
sanitizedCount
| cnt<1 = 1
| cnt>500 = 500
| otherwise = cnt
replaceWorldRow app (Entity wId wRow) = do
randomNumber <- liftIO $ ((R.uniformR (1, 10000) (appGen app)) :: IO Int)
-- TODO: Should I be using replace, or update, or updateGet -- which is
-- idiomatic Yesod code for this operation?
let newRow = wRow{worldRandomNumber=randomNumber}
replace wId newRow
return (Entity wId newRow)
getDefaultUpdatesR :: Text -> Handler Value
getDefaultUpdatesR _ = getUpdatesR 1
getPlaintextR :: Handler Text
getPlaintextR = return "Hello, World!"
main :: IO ()
main = R.withSystemRandom $ \gen -> do
[cores, host] <- getArgs
let connString = ("host=" ++ host ++ " port=5432 user=benchmarkdbuser password=benchmarkdbpass dbname=hello_world")
dbPool <- runNoLoggingT $ Pg.createPostgresqlPool (C8.pack connString) 256
app <- toWaiAppPlain App
{ appGen = gen
, appDbPool = dbPool
}
runInUnboundThread $ Warp.runSettings
( Warp.setPort 8000
$ Warp.setHost "*"
$ Warp.setOnException (\_ _ -> return ())
Warp.defaultSettings
) app
| saturday06/FrameworkBenchmarks | frameworks/Haskell/yesod/yesod-postgres/src/Main.hs | bsd-3-clause | 6,496 | 0 | 17 | 1,889 | 1,411 | 753 | 658 | -1 | -1 |
module FrontEnd.Tc.Class(
Pred,
ClassHierarchy(),
splitPreds,
generalize,
splitReduce,
topDefaults,
freeMetaVarsPreds,
simplify,
assertEntailment,
assertEquivalant,
Preds
)where
import Control.Monad.Reader
import Data.Monoid
import Util.Std
import qualified Data.Map as Map
import qualified Data.Set as Set
import Doc.PPrint
import FrontEnd.Class
import FrontEnd.Diagnostic
import FrontEnd.Rename
import FrontEnd.Tc.Monad
import FrontEnd.Tc.Type
import Name.Name
import Name.Names
import Options
import Support.CanType
import Util.DocLike
import qualified FlagDump as FD
import qualified FlagOpts as FO
generalize :: [Pred] -> Rho -> Tc Sigma
generalize ps r = do
ch <- getClassHierarchy
r <- flattenType r
fmvenv <- freeMetaVarsEnv
let mvs = freeMetaVars r `Set.difference` fmvenv
--(nps,rp) <- splitPreds ch (Set.toList fmvenv) ps
(mvs',nps,rp) <- splitReduce fmvenv mvs (simplify ch ps)
addPreds nps
quantify mvs' rp r
freeMetaVarsPreds :: Preds -> Set.Set MetaVar
freeMetaVarsPreds ps = Set.unions (map freeMetaVarsPred ps)
freeMetaVarsPred :: Pred -> Set.Set MetaVar
freeMetaVarsPred (IsIn _ t) = freeMetaVars t
freeMetaVarsPred (IsEq t1 t2) = freeMetaVars t1 `Set.union` freeMetaVars t2
-- | split predicates into ones that only mention metavars in the list vs other ones
splitPreds :: Monad m
=> ClassHierarchy
-> Set.Set MetaVar
-> Preds
-> m (Preds, Preds)
splitPreds h fs ps = do
ps' <- toHnfs h ps
return $ partition (\p -> let fv = freeMetaVarsPred p in not (Set.null fv) && fv `Set.isSubsetOf` fs) $ simplify h ps'
toHnfs :: Monad m => ClassHierarchy -> [Pred] -> m [Pred]
toHnfs h ps = mapM (toHnf h) ps >>= return . concat
toHnf :: Monad m => ClassHierarchy -> Pred -> m [Pred]
toHnf h p
| inHnf p = return [p]
| otherwise = case reducePred h p of
Nothing -> do
fail $ "No instance for: " ++ (pprint p)
Just ps -> toHnfs h ps
inHnf :: Pred -> Bool
inHnf (IsEq t1 t2) = True
inHnf (IsIn c t) = hnf t
where hnf (TVar v) = True
hnf TMetaVar {} = True
hnf (TCon tc) = False
hnf (TAp t _) = hnf t
hnf (TArrow _t1 _t2) = False
hnf TForAll {} = False
hnf TExists {} = False
hnf TAssoc {} = True
reducePred :: Monad m => ClassHierarchy -> Pred -> m [Pred]
reducePred h p@(IsEq t1 t2) = fail "reducePred" -- return [p]
reducePred h p@(IsIn c t)
| Just x <- foldr mplus Nothing poss = return x
| otherwise = fail "reducePred"
where poss = map (byInst p) (instsOf h c)
simplify :: ClassHierarchy -> [Pred] -> [Pred]
simplify h ps = loop [] ps where
loop rs [] = rs
loop rs (p:ps)
| entails h (rs ++ ps) p = loop rs ps
| otherwise = loop (p:rs) ps
-- | returns true when set of predicates implies some other predicate is satisfied.
entails :: ClassHierarchy -> [Pred] -> Pred -> Bool
--entails h ps e@(IsEq {}) = error $ pprint (ps,e)
entails h ps p = (p `elem` concatMap (bySuper h) ps) ||
case reducePred h p of
Nothing -> False
Just qs -> all (entails h ps) qs
bySuper :: ClassHierarchy -> Pred -> [Pred]
bySuper h p@IsEq {} = [p]
bySuper h p@(IsIn c t)
= p : concatMap (bySuper h) supers
where supers = [ IsIn c' t | c' <- supersOf h c ]
byInst :: Monad m => Pred -> Inst -> m [Pred]
byInst p Inst { instHead = ps :=> h } = do
u <- matchPred h p
return (map (inst mempty (Map.fromList [ (tyvarName mv,t) | (mv,t) <- u ])) ps)
matchPred :: Monad m => Pred -> Pred -> m [(Tyvar,Type)]
matchPred x@(IsIn c t) y@(IsIn c' t')
| c == c' = match t t'
matchPred x y = fail $ "Classes do not match: " ++ show (x,y)
supersOf :: ClassHierarchy -> Class -> [Class]
supersOf ch c = asksClassRecord ch c classSupers
instsOf :: ClassHierarchy -> Class -> [Inst]
--instsOf ch c = asksClassRecord ch c classInsts
instsOf ch c = findClassInsts ch c
match :: Monad m => Type -> Type -> m [(Tyvar,Type)]
match x y = do match' x y
match' (TAp l r) (TAp l' r') = do
sl <- match l l'
sr <- match r r'
return $ mappend sl sr
match' (TArrow l r) (TArrow l' r') = do
sl <- match l l'
sr <- match r r'
return $ mappend sl sr
match' (TVar u) (TVar t) | u == t = return mempty
match' (TVar mv) t | getType mv == getType t = return [(mv,t)]
--match' (TMetaVar mv) t | kind mv == kind t = return [(mv,t)]
match' (TCon tc1) (TCon tc2) | tc1==tc2 = return mempty
match' t1 t2 = fail $ "match: " ++ show (t1,t2)
splitReduce :: Set.Set MetaVar -- ^ Meta vars from the environment
-> Set.Set MetaVar -- ???
-> [Pred] -- ^ Relevant predicates
-> Tc ([MetaVar], [Pred], [Pred]) -- ^ (retained ??? meta-vars, untouched predicates, altered predicates)
splitReduce fs gs ps = do
h <- getClassHierarchy
--liftIO $ putStrLn $ pprint (fs,gs,ps)
(ds, rs) <- splitPreds h fs ps
--liftIO $ putStrLn $ pprint (ds,rs)
let (rs',sub) = genDefaults h (fs `Set.union` gs) rs
--liftIO $ putStrLn $ pprint (rs',sub)
let leftovers = freeMetaVarsPreds rs' Set.\\ (Set.union fs gs)
when (not $ Set.null leftovers) $ do
fail $ "Ambiguous types exist:" <+> pprint rs'
flip mapM_ sub $ \ (x,y) -> do
let msg = "defaulting:" <+> pprint x <+> "=>" <+> prettyPrintType y
wdump FD.BoxySteps $ liftIO $ putStrLn msg
--addWarn "type-defaults" msg
sequence_ [ varBind x y | (x,y) <- nub sub]
return (Set.toList gs Util.Std.\\ map fst sub, ds, rs')
-- | Return retained predicates and a defaulting substitution
genDefaults :: ClassHierarchy
-> Set.Set MetaVar -- ^ Variables to be considered known
-> [Pred] -- ^ Predicates to examine
-> ([Pred], [(MetaVar,Type)])
genDefaults h vs ps = (ps \\ ps', vs')
where ams = [ (v,qs,t) | (v,qs,t:ts) <- ambig h vs ps ]
ps' = [ p | (v,qs,ts) <- ams, p <-qs ]
vs' = [ (v,t) | (v,qs,t) <- ams ]
-- ambiguities from THIH + call to candidates
ambig :: ClassHierarchy
-> Set.Set MetaVar -- ^ Variables that are to be considered known
-> [Pred] -- ^ Predicates to consider
-> [(MetaVar, [Pred], [Type])] -- ^ List of (ambiguous meta var, predicates involving it, potential defaults)
ambig h vs ps
= [ (v, qs, defs h v qs) |
v <- Set.toList (freeMetaVarsPreds ps `Set.difference` vs),
let qs = [ p | p<-ps, v `Set.member` freeMetaVarsPred p ] ]
assertEntailment :: Preds -> Preds -> Tc ()
assertEntailment qs ps = do
-- liftIO $ putStrLn $ "Asserting entailment: " ++ pprint (qs,ps)
ch <- getClassHierarchy
let ns = [ p | p <- ps, not $ entails ch qs p ]
if null ns then return () else do
let f (IsIn c ty) = do
ty <- denameType ty
nm <- asks tcImports
return $ IsIn (unrenameName nm c) ty
f (IsEq ty1 ty2) = IsEq <$> denameType ty1 <*> denameType ty2
qs <- mapM f qs
ns <- mapM f ns
diagnosis <- asks tcDiagnostics
fatalError =<< typeError WarnFailure
(text "Class signature is too weak." <+> (tupled $ map pprint qs) <+> text "does not imply" <+> (tupled $ map pprint ns)) diagnosis
assertEquivalant :: Preds -> Preds -> Tc ()
assertEquivalant qs ps = do
assertEntailment qs ps
assertEntailment ps qs
{-
reduce :: OptionMonad m => ClassHierarchy -> [Tyvar] -> [Tyvar] -> [Pred] -> m ([Pred], [Pred])
reduce h fs gs ps = do
(ds, rs) <- split h fs ps
rs' <- useDefaults h (fs++gs) rs
return (ds,rs')
-}
-- 'candidates' from THIH
defs :: ClassHierarchy -> MetaVar -> [Pred] -> [Type]
defs h v qs = [ t | all ((TMetaVar v)==) ts,
all (`elem` stdClasses) cs, -- XXX needs fixing
any (`elem` numClasses) cs, -- XXX needs fixing
t <- defaults, -- XXX needs fixing
and [ entails h [] (IsIn c t) | c <- cs ]]
where cs = [ c | (IsIn c t) <- qs ]
ts = [ t | (IsIn c t) <- qs ]
-- FIXME use @default@ declarations!
defaults :: [Type]
defaults
| not $ fopts FO.Defaulting = []
| otherwise = map (\name -> TCon (Tycon name kindStar)) [tc_Integer, tc_Double]
topDefaults :: [Pred] -> Tc ()
topDefaults ps = do
h <- getClassHierarchy
let ams = ambig h Set.empty ps
tss = [ ts | (v,qs,ts) <- ams ]
_vs = [ v | (v,qs,ts) <- ams ]
when (any null tss) $ fail $ "Top Level ambiguity " ++ (pprint ps)
return ()
-- | otherwise -> return $ Map.fromList (zip vs (map head tss))
-- where ams = ambig h [] ps
-- tss = [ ts | (v,qs,ts) <- ams ]
-- vs = [ v | (v,qs,ts) <- ams ]
numClasses,stdClasses :: [Name]
stdClasses = [
class_Eq,
class_Ord,
class_Enum,
class_Bounded,
class_Show,
class_Read,
class_Ix,
class_Functor,
class_Monad,
class_Num ,
class_Real,
class_Integral,
class_Fractional,
class_Floating,
class_RealFrac,
class_RealFloat
]
numClasses = [
class_Num ,
class_Real,
class_Integral,
class_Fractional,
class_Floating,
class_RealFrac,
class_RealFloat
]
| hvr/jhc | src/FrontEnd/Tc/Class.hs | mit | 9,270 | 0 | 19 | 2,573 | 3,309 | 1,705 | 1,604 | 219 | 8 |
module SDL.Raw.Platform (
-- * Platform Detection
getPlatform
) where
import Control.Monad.IO.Class
import Foreign.C.String
foreign import ccall "SDL.h SDL_GetPlatform" getPlatform' :: IO CString
getPlatform :: MonadIO m => m CString
getPlatform = liftIO getPlatform'
{-# INLINE getPlatform #-}
| bj4rtmar/sdl2 | src/SDL/Raw/Platform.hs | bsd-3-clause | 302 | 0 | 6 | 45 | 65 | 38 | 27 | 8 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, MagicHash #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.List
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : stable
-- Portability : portable
--
-- Operations on lists.
--
-----------------------------------------------------------------------------
module Data.List
(
#ifdef __NHC__
[] (..)
,
#endif
-- * Basic functions
(++) -- :: [a] -> [a] -> [a]
, head -- :: [a] -> a
, last -- :: [a] -> a
, tail -- :: [a] -> [a]
, init -- :: [a] -> [a]
, null -- :: [a] -> Bool
, length -- :: [a] -> Int
-- * List transformations
, map -- :: (a -> b) -> [a] -> [b]
, reverse -- :: [a] -> [a]
, intersperse -- :: a -> [a] -> [a]
, intercalate -- :: [a] -> [[a]] -> [a]
, transpose -- :: [[a]] -> [[a]]
, subsequences -- :: [a] -> [[a]]
, permutations -- :: [a] -> [[a]]
-- * Reducing lists (folds)
, foldl -- :: (a -> b -> a) -> a -> [b] -> a
, foldl' -- :: (a -> b -> a) -> a -> [b] -> a
, foldl1 -- :: (a -> a -> a) -> [a] -> a
, foldl1' -- :: (a -> a -> a) -> [a] -> a
, foldr -- :: (a -> b -> b) -> b -> [a] -> b
, foldr1 -- :: (a -> a -> a) -> [a] -> a
-- ** Special folds
, concat -- :: [[a]] -> [a]
, concatMap -- :: (a -> [b]) -> [a] -> [b]
, and -- :: [Bool] -> Bool
, or -- :: [Bool] -> Bool
, any -- :: (a -> Bool) -> [a] -> Bool
, all -- :: (a -> Bool) -> [a] -> Bool
, sum -- :: (Num a) => [a] -> a
, product -- :: (Num a) => [a] -> a
, maximum -- :: (Ord a) => [a] -> a
, minimum -- :: (Ord a) => [a] -> a
-- * Building lists
-- ** Scans
, scanl -- :: (a -> b -> a) -> a -> [b] -> [a]
, scanl1 -- :: (a -> a -> a) -> [a] -> [a]
, scanr -- :: (a -> b -> b) -> b -> [a] -> [b]
, scanr1 -- :: (a -> a -> a) -> [a] -> [a]
-- ** Accumulating maps
, mapAccumL -- :: (a -> b -> (a,c)) -> a -> [b] -> (a,[c])
, mapAccumR -- :: (a -> b -> (a,c)) -> a -> [b] -> (a,[c])
-- ** Infinite lists
, iterate -- :: (a -> a) -> a -> [a]
, repeat -- :: a -> [a]
, replicate -- :: Int -> a -> [a]
, cycle -- :: [a] -> [a]
-- ** Unfolding
, unfoldr -- :: (b -> Maybe (a, b)) -> b -> [a]
-- * Sublists
-- ** Extracting sublists
, take -- :: Int -> [a] -> [a]
, drop -- :: Int -> [a] -> [a]
, splitAt -- :: Int -> [a] -> ([a], [a])
, takeWhile -- :: (a -> Bool) -> [a] -> [a]
, dropWhile -- :: (a -> Bool) -> [a] -> [a]
, dropWhileEnd -- :: (a -> Bool) -> [a] -> [a]
, span -- :: (a -> Bool) -> [a] -> ([a], [a])
, break -- :: (a -> Bool) -> [a] -> ([a], [a])
, stripPrefix -- :: Eq a => [a] -> [a] -> Maybe [a]
, group -- :: Eq a => [a] -> [[a]]
, inits -- :: [a] -> [[a]]
, tails -- :: [a] -> [[a]]
-- ** Predicates
, isPrefixOf -- :: (Eq a) => [a] -> [a] -> Bool
, isSuffixOf -- :: (Eq a) => [a] -> [a] -> Bool
, isInfixOf -- :: (Eq a) => [a] -> [a] -> Bool
-- * Searching lists
-- ** Searching by equality
, elem -- :: a -> [a] -> Bool
, notElem -- :: a -> [a] -> Bool
, lookup -- :: (Eq a) => a -> [(a,b)] -> Maybe b
-- ** Searching with a predicate
, find -- :: (a -> Bool) -> [a] -> Maybe a
, filter -- :: (a -> Bool) -> [a] -> [a]
, partition -- :: (a -> Bool) -> [a] -> ([a], [a])
-- * Indexing lists
-- | These functions treat a list @xs@ as a indexed collection,
-- with indices ranging from 0 to @'length' xs - 1@.
, (!!) -- :: [a] -> Int -> a
, elemIndex -- :: (Eq a) => a -> [a] -> Maybe Int
, elemIndices -- :: (Eq a) => a -> [a] -> [Int]
, findIndex -- :: (a -> Bool) -> [a] -> Maybe Int
, findIndices -- :: (a -> Bool) -> [a] -> [Int]
-- * Zipping and unzipping lists
, zip -- :: [a] -> [b] -> [(a,b)]
, zip3
, zip4, zip5, zip6, zip7
, zipWith -- :: (a -> b -> c) -> [a] -> [b] -> [c]
, zipWith3
, zipWith4, zipWith5, zipWith6, zipWith7
, unzip -- :: [(a,b)] -> ([a],[b])
, unzip3
, unzip4, unzip5, unzip6, unzip7
-- * Special lists
-- ** Functions on strings
, lines -- :: String -> [String]
, words -- :: String -> [String]
, unlines -- :: [String] -> String
, unwords -- :: [String] -> String
-- ** \"Set\" operations
, nub -- :: (Eq a) => [a] -> [a]
, delete -- :: (Eq a) => a -> [a] -> [a]
, (\\) -- :: (Eq a) => [a] -> [a] -> [a]
, union -- :: (Eq a) => [a] -> [a] -> [a]
, intersect -- :: (Eq a) => [a] -> [a] -> [a]
-- ** Ordered lists
, sort -- :: (Ord a) => [a] -> [a]
, insert -- :: (Ord a) => a -> [a] -> [a]
-- * Generalized functions
-- ** The \"@By@\" operations
-- | By convention, overloaded functions have a non-overloaded
-- counterpart whose name is suffixed with \`@By@\'.
--
-- It is often convenient to use these functions together with
-- 'Data.Function.on', for instance @'sortBy' ('compare'
-- \`on\` 'fst')@.
-- *** User-supplied equality (replacing an @Eq@ context)
-- | The predicate is assumed to define an equivalence.
, nubBy -- :: (a -> a -> Bool) -> [a] -> [a]
, deleteBy -- :: (a -> a -> Bool) -> a -> [a] -> [a]
, deleteFirstsBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, unionBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, intersectBy -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
, groupBy -- :: (a -> a -> Bool) -> [a] -> [[a]]
-- *** User-supplied comparison (replacing an @Ord@ context)
-- | The function is assumed to define a total ordering.
, sortBy -- :: (a -> a -> Ordering) -> [a] -> [a]
, insertBy -- :: (a -> a -> Ordering) -> a -> [a] -> [a]
, maximumBy -- :: (a -> a -> Ordering) -> [a] -> a
, minimumBy -- :: (a -> a -> Ordering) -> [a] -> a
-- ** The \"@generic@\" operations
-- | The prefix \`@generic@\' indicates an overloaded function that
-- is a generalized version of a "Prelude" function.
, genericLength -- :: (Integral a) => [b] -> a
, genericTake -- :: (Integral a) => a -> [b] -> [b]
, genericDrop -- :: (Integral a) => a -> [b] -> [b]
, genericSplitAt -- :: (Integral a) => a -> [b] -> ([b], [b])
, genericIndex -- :: (Integral a) => [b] -> a -> b
, genericReplicate -- :: (Integral a) => a -> b -> [b]
) where
#ifdef __NHC__
import Prelude
#endif
import Data.Maybe
import Data.Char ( isSpace )
#ifdef __GLASGOW_HASKELL__
import GHC.Num
import GHC.Real
import GHC.List
import GHC.Base
#endif
infix 5 \\ -- comment to fool cpp
-- -----------------------------------------------------------------------------
-- List functions
-- | The 'dropWhileEnd' function drops the largest suffix of a list
-- in which the given predicate holds for all elements. For example:
--
-- > dropWhileEnd isSpace "foo\n" == "foo"
-- > dropWhileEnd isSpace "foo bar" == "foo bar"
-- > dropWhileEnd isSpace ("foo\n" ++ undefined) == "foo" ++ undefined
dropWhileEnd :: (a -> Bool) -> [a] -> [a]
dropWhileEnd p = foldr (\x xs -> if p x && null xs then [] else x : xs) []
-- | The 'stripPrefix' function drops the given prefix from a list.
-- It returns 'Nothing' if the list did not start with the prefix
-- given, or 'Just' the list after the prefix, if it does.
--
-- > stripPrefix "foo" "foobar" == Just "bar"
-- > stripPrefix "foo" "foo" == Just ""
-- > stripPrefix "foo" "barfoo" == Nothing
-- > stripPrefix "foo" "barfoobaz" == Nothing
stripPrefix :: Eq a => [a] -> [a] -> Maybe [a]
stripPrefix [] ys = Just ys
stripPrefix (x:xs) (y:ys)
| x == y = stripPrefix xs ys
stripPrefix _ _ = Nothing
-- | The 'elemIndex' function returns the index of the first element
-- in the given list which is equal (by '==') to the query element,
-- or 'Nothing' if there is no such element.
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex x = findIndex (x==)
-- | The 'elemIndices' function extends 'elemIndex', by returning the
-- indices of all elements equal to the query element, in ascending order.
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices x = findIndices (x==)
-- | The 'find' function takes a predicate and a list and returns the
-- first element in the list matching the predicate, or 'Nothing' if
-- there is no such element.
find :: (a -> Bool) -> [a] -> Maybe a
find p = listToMaybe . filter p
-- | The 'findIndex' function takes a predicate and a list and returns
-- the index of the first element in the list satisfying the predicate,
-- or 'Nothing' if there is no such element.
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p = listToMaybe . findIndices p
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
findIndices :: (a -> Bool) -> [a] -> [Int]
#if defined(USE_REPORT_PRELUDE) || !defined(__GLASGOW_HASKELL__)
findIndices p xs = [ i | (x,i) <- zip xs [0..], p x]
#else
-- Efficient definition
findIndices p ls = loop 0# ls
where
loop _ [] = []
loop n (x:xs) | p x = I# n : loop (n +# 1#) xs
| otherwise = loop (n +# 1#) xs
#endif /* USE_REPORT_PRELUDE */
-- | The 'isPrefixOf' function takes two lists and returns 'True'
-- iff the first list is a prefix of the second.
isPrefixOf :: (Eq a) => [a] -> [a] -> Bool
isPrefixOf [] _ = True
isPrefixOf _ [] = False
isPrefixOf (x:xs) (y:ys)= x == y && isPrefixOf xs ys
-- | The 'isSuffixOf' function takes two lists and returns 'True'
-- iff the first list is a suffix of the second.
-- Both lists must be finite.
isSuffixOf :: (Eq a) => [a] -> [a] -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
-- | The 'isInfixOf' function takes two lists and returns 'True'
-- iff the first list is contained, wholly and intact,
-- anywhere within the second.
--
-- Example:
--
-- >isInfixOf "Haskell" "I really like Haskell." == True
-- >isInfixOf "Ial" "I really like Haskell." == False
isInfixOf :: (Eq a) => [a] -> [a] -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
-- | /O(n^2)/. The 'nub' function removes duplicate elements from a list.
-- In particular, it keeps only the first occurrence of each element.
-- (The name 'nub' means \`essence\'.)
-- It is a special case of 'nubBy', which allows the programmer to supply
-- their own equality test.
nub :: (Eq a) => [a] -> [a]
#ifdef USE_REPORT_PRELUDE
nub = nubBy (==)
#else
-- stolen from HBC
nub l = nub' l [] -- '
where
nub' [] _ = [] -- '
nub' (x:xs) ls -- '
| x `elem` ls = nub' xs ls -- '
| otherwise = x : nub' xs (x:ls) -- '
#endif
-- | The 'nubBy' function behaves just like 'nub', except it uses a
-- user-supplied equality predicate instead of the overloaded '=='
-- function.
nubBy :: (a -> a -> Bool) -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
nubBy eq [] = []
nubBy eq (x:xs) = x : nubBy eq (filter (\ y -> not (eq x y)) xs)
#else
nubBy eq l = nubBy' l []
where
nubBy' [] _ = []
nubBy' (y:ys) xs
| elem_by eq y xs = nubBy' ys xs
| otherwise = y : nubBy' ys (y:xs)
-- Not exported:
-- Note that we keep the call to `eq` with arguments in the
-- same order as in the reference implementation
-- 'xs' is the list of things we've seen so far,
-- 'y' is the potential new element
elem_by :: (a -> a -> Bool) -> a -> [a] -> Bool
elem_by _ _ [] = False
elem_by eq y (x:xs) = y `eq` x || elem_by eq y xs
#endif
-- | 'delete' @x@ removes the first occurrence of @x@ from its list argument.
-- For example,
--
-- > delete 'a' "banana" == "bnana"
--
-- It is a special case of 'deleteBy', which allows the programmer to
-- supply their own equality test.
delete :: (Eq a) => a -> [a] -> [a]
delete = deleteBy (==)
-- | The 'deleteBy' function behaves like 'delete', but takes a
-- user-supplied equality predicate.
deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteBy _ _ [] = []
deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys
-- | The '\\' function is list difference (non-associative).
-- In the result of @xs@ '\\' @ys@, the first occurrence of each element of
-- @ys@ in turn (if any) has been removed from @xs@. Thus
--
-- > (xs ++ ys) \\ xs == ys.
--
-- It is a special case of 'deleteFirstsBy', which allows the programmer
-- to supply their own equality test.
(\\) :: (Eq a) => [a] -> [a] -> [a]
(\\) = foldl (flip delete)
-- | The 'union' function returns the list union of the two lists.
-- For example,
--
-- > "dog" `union` "cow" == "dogcw"
--
-- Duplicates, and elements of the first list, are removed from the
-- the second list, but if the first list contains duplicates, so will
-- the result.
-- It is a special case of 'unionBy', which allows the programmer to supply
-- their own equality test.
union :: (Eq a) => [a] -> [a] -> [a]
union = unionBy (==)
-- | The 'unionBy' function is the non-overloaded version of 'union'.
unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs
-- | The 'intersect' function takes the list intersection of two lists.
-- For example,
--
-- > [1,2,3,4] `intersect` [2,4,6,8] == [2,4]
--
-- If the first list contains duplicates, so will the result.
--
-- > [1,2,2,3,4] `intersect` [6,4,4,2] == [2,2,4]
--
-- It is a special case of 'intersectBy', which allows the programmer to
-- supply their own equality test. If the element is found in both the first
-- and the second list, the element from the first list will be used.
intersect :: (Eq a) => [a] -> [a] -> [a]
intersect = intersectBy (==)
-- | The 'intersectBy' function is the non-overloaded version of 'intersect'.
intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy _ [] _ = []
intersectBy _ _ [] = []
intersectBy eq xs ys = [x | x <- xs, any (eq x) ys]
-- | The 'intersperse' function takes an element and a list and
-- \`intersperses\' that element between the elements of the list.
-- For example,
--
-- > intersperse ',' "abcde" == "a,b,c,d,e"
intersperse :: a -> [a] -> [a]
intersperse _ [] = []
intersperse sep (x:xs) = x : prependToAll sep xs
-- Not exported:
-- We want to make every element in the 'intersperse'd list available
-- as soon as possible to avoid space leaks. Experiments suggested that
-- a separate top-level helper is more efficient than a local worker.
prependToAll :: a -> [a] -> [a]
prependToAll _ [] = []
prependToAll sep (x:xs) = sep : x : prependToAll sep xs
-- | 'intercalate' @xs xss@ is equivalent to @('concat' ('intersperse' xs xss))@.
-- It inserts the list @xs@ in between the lists in @xss@ and concatenates the
-- result.
intercalate :: [a] -> [[a]] -> [a]
intercalate xs xss = concat (intersperse xs xss)
-- | The 'transpose' function transposes the rows and columns of its argument.
-- For example,
--
-- > transpose [[1,2,3],[4,5,6]] == [[1,4],[2,5],[3,6]]
transpose :: [[a]] -> [[a]]
transpose [] = []
transpose ([] : xss) = transpose xss
transpose ((x:xs) : xss) = (x : [h | (h:_) <- xss]) : transpose (xs : [ t | (_:t) <- xss])
-- | The 'partition' function takes a predicate a list and returns
-- the pair of lists of elements which do and do not satisfy the
-- predicate, respectively; i.e.,
--
-- > partition p xs == (filter p xs, filter (not . p) xs)
partition :: (a -> Bool) -> [a] -> ([a],[a])
{-# INLINE partition #-}
partition p xs = foldr (select p) ([],[]) xs
select :: (a -> Bool) -> a -> ([a], [a]) -> ([a], [a])
select p x ~(ts,fs) | p x = (x:ts,fs)
| otherwise = (ts, x:fs)
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a list, passing
-- an accumulating parameter from left to right, and returning a final
-- value of this accumulator together with the new list.
mapAccumL :: (acc -> x -> (acc, y)) -- Function of elt of input list
-- and accumulator, returning new
-- accumulator and elt of result list
-> acc -- Initial accumulator
-> [x] -- Input list
-> (acc, [y]) -- Final accumulator and result list
mapAccumL _ s [] = (s, [])
mapAccumL f s (x:xs) = (s'',y:ys)
where (s', y ) = f s x
(s'',ys) = mapAccumL f s' xs
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a list, passing
-- an accumulating parameter from right to left, and returning a final
-- value of this accumulator together with the new list.
mapAccumR :: (acc -> x -> (acc, y)) -- Function of elt of input list
-- and accumulator, returning new
-- accumulator and elt of result list
-> acc -- Initial accumulator
-> [x] -- Input list
-> (acc, [y]) -- Final accumulator and result list
mapAccumR _ s [] = (s, [])
mapAccumR f s (x:xs) = (s'', y:ys)
where (s'',y ) = f s' x
(s', ys) = mapAccumR f s xs
-- | The 'insert' function takes an element and a list and inserts the
-- element into the list at the first position where it is less
-- than or equal to the next element. In particular, if the list
-- is sorted before the call, the result will also be sorted.
-- It is a special case of 'insertBy', which allows the programmer to
-- supply their own comparison function.
insert :: Ord a => a -> [a] -> [a]
insert e ls = insertBy (compare) e ls
-- | The non-overloaded version of 'insert'.
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy _ x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
#ifdef __GLASGOW_HASKELL__
-- | 'maximum' returns the maximum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.maximumBy', which allows the
-- programmer to supply their own comparison function.
maximum :: (Ord a) => [a] -> a
maximum [] = errorEmptyList "maximum"
maximum xs = foldl1 max xs
{-# RULES
"maximumInt" maximum = (strictMaximum :: [Int] -> Int);
"maximumInteger" maximum = (strictMaximum :: [Integer] -> Integer)
#-}
-- We can't make the overloaded version of maximum strict without
-- changing its semantics (max might not be strict), but we can for
-- the version specialised to 'Int'.
strictMaximum :: (Ord a) => [a] -> a
strictMaximum [] = errorEmptyList "maximum"
strictMaximum xs = foldl1' max xs
-- | 'minimum' returns the minimum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.minimumBy', which allows the
-- programmer to supply their own comparison function.
minimum :: (Ord a) => [a] -> a
minimum [] = errorEmptyList "minimum"
minimum xs = foldl1 min xs
{-# RULES
"minimumInt" minimum = (strictMinimum :: [Int] -> Int);
"minimumInteger" minimum = (strictMinimum :: [Integer] -> Integer)
#-}
strictMinimum :: (Ord a) => [a] -> a
strictMinimum [] = errorEmptyList "minimum"
strictMinimum xs = foldl1' min xs
#endif /* __GLASGOW_HASKELL__ */
-- | The 'maximumBy' function takes a comparison function and a list
-- and returns the greatest element of the list by the comparison function.
-- The list must be finite and non-empty.
maximumBy :: (a -> a -> Ordering) -> [a] -> a
maximumBy _ [] = error "List.maximumBy: empty list"
maximumBy cmp xs = foldl1 maxBy xs
where
maxBy x y = case cmp x y of
GT -> x
_ -> y
-- | The 'minimumBy' function takes a comparison function and a list
-- and returns the least element of the list by the comparison function.
-- The list must be finite and non-empty.
minimumBy :: (a -> a -> Ordering) -> [a] -> a
minimumBy _ [] = error "List.minimumBy: empty list"
minimumBy cmp xs = foldl1 minBy xs
where
minBy x y = case cmp x y of
GT -> y
_ -> x
-- | The 'genericLength' function is an overloaded version of 'length'. In
-- particular, instead of returning an 'Int', it returns any type which is
-- an instance of 'Num'. It is, however, less efficient than 'length'.
genericLength :: (Num i) => [b] -> i
genericLength [] = 0
genericLength (_:l) = 1 + genericLength l
{-# RULES
"genericLengthInt" genericLength = (strictGenericLength :: [a] -> Int);
"genericLengthInteger" genericLength = (strictGenericLength :: [a] -> Integer);
#-}
strictGenericLength :: (Num i) => [b] -> i
strictGenericLength l = gl l 0
where
gl [] a = a
gl (_:xs) a = let a' = a + 1 in a' `seq` gl xs a'
-- | The 'genericTake' function is an overloaded version of 'take', which
-- accepts any 'Integral' value as the number of elements to take.
genericTake :: (Integral i) => i -> [a] -> [a]
genericTake n _ | n <= 0 = []
genericTake _ [] = []
genericTake n (x:xs) = x : genericTake (n-1) xs
-- | The 'genericDrop' function is an overloaded version of 'drop', which
-- accepts any 'Integral' value as the number of elements to drop.
genericDrop :: (Integral i) => i -> [a] -> [a]
genericDrop n xs | n <= 0 = xs
genericDrop _ [] = []
genericDrop n (_:xs) = genericDrop (n-1) xs
-- | The 'genericSplitAt' function is an overloaded version of 'splitAt', which
-- accepts any 'Integral' value as the position at which to split.
genericSplitAt :: (Integral i) => i -> [b] -> ([b],[b])
genericSplitAt n xs | n <= 0 = ([],xs)
genericSplitAt _ [] = ([],[])
genericSplitAt n (x:xs) = (x:xs',xs'') where
(xs',xs'') = genericSplitAt (n-1) xs
-- | The 'genericIndex' function is an overloaded version of '!!', which
-- accepts any 'Integral' value as the index.
genericIndex :: (Integral a) => [b] -> a -> b
genericIndex (x:_) 0 = x
genericIndex (_:xs) n
| n > 0 = genericIndex xs (n-1)
| otherwise = error "List.genericIndex: negative argument."
genericIndex _ _ = error "List.genericIndex: index too large."
-- | The 'genericReplicate' function is an overloaded version of 'replicate',
-- which accepts any 'Integral' value as the number of repetitions to make.
genericReplicate :: (Integral i) => i -> a -> [a]
genericReplicate n x = genericTake n (repeat x)
-- | The 'zip4' function takes four lists and returns a list of
-- quadruples, analogous to 'zip'.
zip4 :: [a] -> [b] -> [c] -> [d] -> [(a,b,c,d)]
zip4 = zipWith4 (,,,)
-- | The 'zip5' function takes five lists and returns a list of
-- five-tuples, analogous to 'zip'.
zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a,b,c,d,e)]
zip5 = zipWith5 (,,,,)
-- | The 'zip6' function takes six lists and returns a list of six-tuples,
-- analogous to 'zip'.
zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[(a,b,c,d,e,f)]
zip6 = zipWith6 (,,,,,)
-- | The 'zip7' function takes seven lists and returns a list of
-- seven-tuples, analogous to 'zip'.
zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[g] -> [(a,b,c,d,e,f,g)]
zip7 = zipWith7 (,,,,,,)
-- | The 'zipWith4' function takes a function which combines four
-- elements, as well as four lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith4 :: (a->b->c->d->e) -> [a]->[b]->[c]->[d]->[e]
zipWith4 z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4 z as bs cs ds
zipWith4 _ _ _ _ _ = []
-- | The 'zipWith5' function takes a function which combines five
-- elements, as well as five lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith5 :: (a->b->c->d->e->f) ->
[a]->[b]->[c]->[d]->[e]->[f]
zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es)
= z a b c d e : zipWith5 z as bs cs ds es
zipWith5 _ _ _ _ _ _ = []
-- | The 'zipWith6' function takes a function which combines six
-- elements, as well as six lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith6 :: (a->b->c->d->e->f->g) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]
zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs)
= z a b c d e f : zipWith6 z as bs cs ds es fs
zipWith6 _ _ _ _ _ _ _ = []
-- | The 'zipWith7' function takes a function which combines seven
-- elements, as well as seven lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith7 :: (a->b->c->d->e->f->g->h) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]->[h]
zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs)
= z a b c d e f g : zipWith7 z as bs cs ds es fs gs
zipWith7 _ _ _ _ _ _ _ _ = []
-- | The 'unzip4' function takes a list of quadruples and returns four
-- lists, analogous to 'unzip'.
unzip4 :: [(a,b,c,d)] -> ([a],[b],[c],[d])
unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) ->
(a:as,b:bs,c:cs,d:ds))
([],[],[],[])
-- | The 'unzip5' function takes a list of five-tuples and returns five
-- lists, analogous to 'unzip'.
unzip5 :: [(a,b,c,d,e)] -> ([a],[b],[c],[d],[e])
unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) ->
(a:as,b:bs,c:cs,d:ds,e:es))
([],[],[],[],[])
-- | The 'unzip6' function takes a list of six-tuples and returns six
-- lists, analogous to 'unzip'.
unzip6 :: [(a,b,c,d,e,f)] -> ([a],[b],[c],[d],[e],[f])
unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs))
([],[],[],[],[],[])
-- | The 'unzip7' function takes a list of seven-tuples and returns
-- seven lists, analogous to 'unzip'.
unzip7 :: [(a,b,c,d,e,f,g)] -> ([a],[b],[c],[d],[e],[f],[g])
unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs))
([],[],[],[],[],[],[])
-- | The 'deleteFirstsBy' function takes a predicate and two lists and
-- returns the first list with the first occurrence of each element of
-- the second list removed.
deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
deleteFirstsBy eq = foldl (flip (deleteBy eq))
-- | The 'group' function takes a list and returns a list of lists such
-- that the concatenation of the result is equal to the argument. Moreover,
-- each sublist in the result contains only equal elements. For example,
--
-- > group "Mississippi" = ["M","i","ss","i","ss","i","pp","i"]
--
-- It is a special case of 'groupBy', which allows the programmer to supply
-- their own equality test.
group :: Eq a => [a] -> [[a]]
group = groupBy (==)
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy _ [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
-- | The 'inits' function returns all initial segments of the argument,
-- shortest first. For example,
--
-- > inits "abc" == ["","a","ab","abc"]
--
-- Note that 'inits' has the following strictness property:
-- @inits _|_ = [] : _|_@
inits :: [a] -> [[a]]
inits xs = [] : case xs of
[] -> []
x : xs' -> map (x :) (inits xs')
-- | The 'tails' function returns all final segments of the argument,
-- longest first. For example,
--
-- > tails "abc" == ["abc", "bc", "c",""]
--
-- Note that 'tails' has the following strictness property:
-- @tails _|_ = _|_ : _|_@
tails :: [a] -> [[a]]
tails xs = xs : case xs of
[] -> []
_ : xs' -> tails xs'
-- | The 'subsequences' function returns the list of all subsequences of the argument.
--
-- > subsequences "abc" == ["","a","b","ab","c","ac","bc","abc"]
subsequences :: [a] -> [[a]]
subsequences xs = [] : nonEmptySubsequences xs
-- | The 'nonEmptySubsequences' function returns the list of all subsequences of the argument,
-- except for the empty list.
--
-- > nonEmptySubsequences "abc" == ["a","b","ab","c","ac","bc","abc"]
nonEmptySubsequences :: [a] -> [[a]]
nonEmptySubsequences [] = []
nonEmptySubsequences (x:xs) = [x] : foldr f [] (nonEmptySubsequences xs)
where f ys r = ys : (x : ys) : r
-- | The 'permutations' function returns the list of all permutations of the argument.
--
-- > permutations "abc" == ["abc","bac","cba","bca","cab","acb"]
permutations :: [a] -> [[a]]
permutations xs0 = xs0 : perms xs0 []
where
perms [] _ = []
perms (t:ts) is = foldr interleave (perms ts (t:is)) (permutations is)
where interleave xs r = let (_,zs) = interleave' id xs r in zs
interleave' _ [] r = (ts, r)
interleave' f (y:ys) r = let (us,zs) = interleave' (f . (y:)) ys r
in (y:us, f (t:y:us) : zs)
------------------------------------------------------------------------------
-- Quick Sort algorithm taken from HBC's QSort library.
-- | The 'sort' function implements a stable sorting algorithm.
-- It is a special case of 'sortBy', which allows the programmer to supply
-- their own comparison function.
sort :: (Ord a) => [a] -> [a]
-- | The 'sortBy' function is the non-overloaded version of 'sort'.
sortBy :: (a -> a -> Ordering) -> [a] -> [a]
#ifdef USE_REPORT_PRELUDE
sort = sortBy compare
sortBy cmp = foldr (insertBy cmp) []
#else
{-
GHC's mergesort replaced by a better implementation, 24/12/2009.
This code originally contributed to the nhc12 compiler by Thomas Nordin
in 2002. Rumoured to have been based on code by Lennart Augustsson, e.g.
http://www.mail-archive.com/haskell@haskell.org/msg01822.html
and possibly to bear similarities to a 1982 paper by Richard O'Keefe:
"A smooth applicative merge sort".
Benchmarks show it to be often 2x the speed of the previous implementation.
Fixes ticket http://hackage.haskell.org/trac/ghc/ticket/2143
-}
sort = sortBy compare
sortBy cmp = mergeAll . sequences
where
sequences (a:b:xs)
| a `cmp` b == GT = descending b [a] xs
| otherwise = ascending b (a:) xs
sequences xs = [xs]
descending a as (b:bs)
| a `cmp` b == GT = descending b (a:as) bs
descending a as bs = (a:as): sequences bs
ascending a as (b:bs)
| a `cmp` b /= GT = ascending b (\ys -> as (a:ys)) bs
ascending a as bs = as [a]: sequences bs
mergeAll [x] = x
mergeAll xs = mergeAll (mergePairs xs)
mergePairs (a:b:xs) = merge a b: mergePairs xs
mergePairs xs = xs
merge as@(a:as') bs@(b:bs')
| a `cmp` b == GT = b:merge as bs'
| otherwise = a:merge as' bs
merge [] bs = bs
merge as [] = as
{-
sortBy cmp l = mergesort cmp l
sort l = mergesort compare l
Quicksort replaced by mergesort, 14/5/2002.
From: Ian Lynagh <igloo@earth.li>
I am curious as to why the List.sort implementation in GHC is a
quicksort algorithm rather than an algorithm that guarantees n log n
time in the worst case? I have attached a mergesort implementation along
with a few scripts to time it's performance, the results of which are
shown below (* means it didn't finish successfully - in all cases this
was due to a stack overflow).
If I heap profile the random_list case with only 10000 then I see
random_list peaks at using about 2.5M of memory, whereas in the same
program using List.sort it uses only 100k.
Input style Input length Sort data Sort alg User time
stdin 10000 random_list sort 2.82
stdin 10000 random_list mergesort 2.96
stdin 10000 sorted sort 31.37
stdin 10000 sorted mergesort 1.90
stdin 10000 revsorted sort 31.21
stdin 10000 revsorted mergesort 1.88
stdin 100000 random_list sort *
stdin 100000 random_list mergesort *
stdin 100000 sorted sort *
stdin 100000 sorted mergesort *
stdin 100000 revsorted sort *
stdin 100000 revsorted mergesort *
func 10000 random_list sort 0.31
func 10000 random_list mergesort 0.91
func 10000 sorted sort 19.09
func 10000 sorted mergesort 0.15
func 10000 revsorted sort 19.17
func 10000 revsorted mergesort 0.16
func 100000 random_list sort 3.85
func 100000 random_list mergesort *
func 100000 sorted sort 5831.47
func 100000 sorted mergesort 2.23
func 100000 revsorted sort 5872.34
func 100000 revsorted mergesort 2.24
mergesort :: (a -> a -> Ordering) -> [a] -> [a]
mergesort cmp = mergesort' cmp . map wrap
mergesort' :: (a -> a -> Ordering) -> [[a]] -> [a]
mergesort' _ [] = []
mergesort' _ [xs] = xs
mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss)
merge_pairs :: (a -> a -> Ordering) -> [[a]] -> [[a]]
merge_pairs _ [] = []
merge_pairs _ [xs] = [xs]
merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss
merge :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
merge _ [] ys = ys
merge _ xs [] = xs
merge cmp (x:xs) (y:ys)
= case x `cmp` y of
GT -> y : merge cmp (x:xs) ys
_ -> x : merge cmp xs (y:ys)
wrap :: a -> [a]
wrap x = [x]
OLDER: qsort version
-- qsort is stable and does not concatenate.
qsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
qsort _ [] r = r
qsort _ [x] r = x:r
qsort cmp (x:xs) r = qpart cmp x xs [] [] r
-- qpart partitions and sorts the sublists
qpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a]
qpart cmp x [] rlt rge r =
-- rlt and rge are in reverse order and must be sorted with an
-- anti-stable sorting
rqsort cmp rlt (x:rqsort cmp rge r)
qpart cmp x (y:ys) rlt rge r =
case cmp x y of
GT -> qpart cmp x ys (y:rlt) rge r
_ -> qpart cmp x ys rlt (y:rge) r
-- rqsort is as qsort but anti-stable, i.e. reverses equal elements
rqsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
rqsort _ [] r = r
rqsort _ [x] r = x:r
rqsort cmp (x:xs) r = rqpart cmp x xs [] [] r
rqpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a]
rqpart cmp x [] rle rgt r =
qsort cmp rle (x:qsort cmp rgt r)
rqpart cmp x (y:ys) rle rgt r =
case cmp y x of
GT -> rqpart cmp x ys rle (y:rgt) r
_ -> rqpart cmp x ys (y:rle) rgt r
-}
#endif /* USE_REPORT_PRELUDE */
-- | The 'unfoldr' function is a \`dual\' to 'foldr': while 'foldr'
-- reduces a list to a summary value, 'unfoldr' builds a list from
-- a seed value. The function takes the element and returns 'Nothing'
-- if it is done producing the list or returns 'Just' @(a,b)@, in which
-- case, @a@ is a prepended to the list and @b@ is used as the next
-- element in a recursive call. For example,
--
-- > iterate f == unfoldr (\x -> Just (x, f x))
--
-- In some cases, 'unfoldr' can undo a 'foldr' operation:
--
-- > unfoldr f' (foldr f z xs) == xs
--
-- if the following holds:
--
-- > f' (f x y) = Just (x,y)
-- > f' z = Nothing
--
-- A simple use of unfoldr:
--
-- > unfoldr (\b -> if b == 0 then Nothing else Just (b, b-1)) 10
-- > [10,9,8,7,6,5,4,3,2,1]
--
unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
unfoldr f b =
case f b of
Just (a,new_b) -> a : unfoldr f new_b
Nothing -> []
-- -----------------------------------------------------------------------------
-- | A strict version of 'foldl'.
foldl' :: (a -> b -> a) -> a -> [b] -> a
#ifdef __GLASGOW_HASKELL__
foldl' f z0 xs0 = lgo z0 xs0
where lgo z [] = z
lgo z (x:xs) = let z' = f z x in z' `seq` lgo z' xs
#else
foldl' f a [] = a
foldl' f a (x:xs) = let a' = f a x in a' `seq` foldl' f a' xs
#endif
#ifdef __GLASGOW_HASKELL__
-- | 'foldl1' is a variant of 'foldl' that has no starting value argument,
-- and thus must be applied to non-empty lists.
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 f (x:xs) = foldl f x xs
foldl1 _ [] = errorEmptyList "foldl1"
#endif /* __GLASGOW_HASKELL__ */
-- | A strict version of 'foldl1'
foldl1' :: (a -> a -> a) -> [a] -> a
foldl1' f (x:xs) = foldl' f x xs
foldl1' _ [] = errorEmptyList "foldl1'"
#ifdef __GLASGOW_HASKELL__
-- -----------------------------------------------------------------------------
-- List sum and product
{-# SPECIALISE sum :: [Int] -> Int #-}
{-# SPECIALISE sum :: [Integer] -> Integer #-}
{-# SPECIALISE product :: [Int] -> Int #-}
{-# SPECIALISE product :: [Integer] -> Integer #-}
-- | The 'sum' function computes the sum of a finite list of numbers.
sum :: (Num a) => [a] -> a
-- | The 'product' function computes the product of a finite list of numbers.
product :: (Num a) => [a] -> a
#ifdef USE_REPORT_PRELUDE
sum = foldl (+) 0
product = foldl (*) 1
#else
sum l = sum' l 0
where
sum' [] a = a
sum' (x:xs) a = sum' xs (a+x)
product l = prod l 1
where
prod [] a = a
prod (x:xs) a = prod xs (a*x)
#endif
-- -----------------------------------------------------------------------------
-- Functions on strings
-- | 'lines' breaks a string up into a list of strings at newline
-- characters. The resulting strings do not contain newlines.
lines :: String -> [String]
lines "" = []
#ifdef __GLASGOW_HASKELL__
-- Somehow GHC doesn't detect the selector thunks in the below code,
-- so s' keeps a reference to the first line via the pair and we have
-- a space leak (cf. #4334).
-- So we need to make GHC see the selector thunks with a trick.
lines s = cons (case break (== '\n') s of
(l, s') -> (l, case s' of
[] -> []
_:s'' -> lines s''))
where
cons ~(h, t) = h : t
#else
lines s = let (l, s') = break (== '\n') s
in l : case s' of
[] -> []
(_:s'') -> lines s''
#endif
-- | 'unlines' is an inverse operation to 'lines'.
-- It joins lines, after appending a terminating newline to each.
unlines :: [String] -> String
#ifdef USE_REPORT_PRELUDE
unlines = concatMap (++ "\n")
#else
-- HBC version (stolen)
-- here's a more efficient version
unlines [] = []
unlines (l:ls) = l ++ '\n' : unlines ls
#endif
-- | 'words' breaks a string up into a list of words, which were delimited
-- by white space.
words :: String -> [String]
words s = case dropWhile {-partain:Char.-}isSpace s of
"" -> []
s' -> w : words s''
where (w, s'') =
break {-partain:Char.-}isSpace s'
-- | 'unwords' is an inverse operation to 'words'.
-- It joins words with separating spaces.
unwords :: [String] -> String
#ifdef USE_REPORT_PRELUDE
unwords [] = ""
unwords ws = foldr1 (\w s -> w ++ ' ':s) ws
#else
-- HBC version (stolen)
-- here's a more efficient version
unwords [] = ""
unwords [w] = w
unwords (w:ws) = w ++ ' ' : unwords ws
#endif
#else /* !__GLASGOW_HASKELL__ */
errorEmptyList :: String -> a
errorEmptyList fun =
error ("Prelude." ++ fun ++ ": empty list")
#endif /* !__GLASGOW_HASKELL__ */
| beni55/haste-compiler | libraries/ghc-7.8/base/Data/List.hs | bsd-3-clause | 43,092 | 3 | 16 | 13,863 | 7,156 | 4,167 | 2,989 | 364 | 8 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, QuasiQuotes, TypeFamilies, MultiParamTypeClasses, ViewPatterns #-}
module YesodCoreTest.Json (specs, Widget) where
import Yesod.Core
import Test.Hspec
import qualified Data.Map as Map
import Network.Wai.Test
import Data.Text (Text)
import Data.ByteString.Lazy (ByteString)
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET
/has-multiple-pieces/#Int/#Int MultiplePiecesR GET
|]
instance Yesod App
getHomeR :: Handler RepPlain
getHomeR = do
val <- requireJsonBody
case Map.lookup ("foo" :: Text) val of
Nothing -> invalidArgs ["foo not found"]
Just foo -> return $ RepPlain $ toContent (foo :: Text)
getMultiplePiecesR :: Int -> Int -> Handler ()
getMultiplePiecesR _ _ = return ()
test :: String
-> ByteString
-> (SResponse -> Session ())
-> Spec
test name rbody f = it name $ do
app <- toWaiApp App
flip runSession app $ do
sres <- srequest SRequest
{ simpleRequest = defaultRequest
, simpleRequestBody = rbody
}
f sres
specs :: Spec
specs = describe "Yesod.Json" $ do
test "parses valid content" "{\"foo\":\"bar\"}" $ \sres -> do
assertStatus 200 sres
assertBody "bar" sres
test "400 for bad JSON" "{\"foo\":\"bar\"" $ \sres -> do
assertStatus 400 sres
test "400 for bad structure" "{\"foo2\":\"bar\"}" $ \sres -> do
assertStatus 400 sres
assertBodyContains "foo not found" sres
| pikajude/yesod | yesod-core/test/YesodCoreTest/Json.hs | mit | 1,493 | 0 | 14 | 355 | 405 | 204 | 201 | 40 | 2 |
-- !!! Testing the Int Enum instances.
{-# LANGUAGE CPP #-}
module Main(main) where
import Control.Exception
#if __GLASGOW_HASKELL__ < 705
import Prelude hiding (catch)
#endif
import Data.Int
main = do
putStrLn "Testing Enum Int8:"
testEnumInt8
putStrLn "Testing Enum Int16:"
testEnumInt16
putStrLn "Testing Enum Int32:"
testEnumInt32
putStrLn "Testing Enum Int64:"
testEnumInt64
#define printTest(x) (do{ putStr ( " " ++ "x" ++ " = " ) ; print (x) })
testEnumInt8 :: IO ()
testEnumInt8 = do
-- succ
printTest ((succ (0::Int8)))
printTest ((succ (minBound::Int8)))
mayBomb (printTest ((succ (maxBound::Int8))))
-- pred
printTest (pred (1::Int8))
printTest (pred (maxBound::Int8))
mayBomb (printTest (pred (minBound::Int8)))
-- toEnum
printTest ((map (toEnum::Int->Int8) [1, fromIntegral (minBound::Int8), fromIntegral (maxBound::Int8)]))
mayBomb (printTest ((toEnum (maxBound::Int))::Int8))
-- fromEnum
printTest ((map fromEnum [(1::Int8),minBound,maxBound]))
-- [x..] aka enumFrom
printTest ((take 7 [(1::Int8)..]))
printTest ((take 7 [((maxBound::Int8)-5)..])) -- just in case it doesn't catch the upper bound..
-- [x,y..] aka enumFromThen
printTest ((take 7 [(1::Int8),2..]))
printTest ((take 7 [(1::Int8),7..]))
printTest ((take 7 [(1::Int8),1..]))
printTest ((take 7 [(1::Int8),0..]))
printTest ((take 7 [(5::Int8),2..]))
let x = (minBound::Int8) + 1
printTest ((take 7 [x, x-1 ..]))
let x = (minBound::Int8) + 5
printTest ((take 7 [x, x-1 ..]))
let x = (maxBound::Int8) - 5
printTest ((take 7 [x, (x+1) ..]))
-- [x..y] aka enumFromTo
printTest ((take 7 ([(1::Int8) .. 5])))
printTest ((take 4 ([(1::Int8) .. 1])))
printTest ((take 7 ([(1::Int8) .. 0])))
printTest ((take 7 ([(5::Int8) .. 0])))
printTest ((take 7 ([(maxBound-(5::Int8)) .. maxBound])))
printTest ((take 7 ([(minBound+(5::Int8)) .. minBound])))
-- [x,y..z] aka enumFromThenTo
printTest ((take 7 [(5::Int8),4..1]))
printTest ((take 7 [(5::Int8),3..1]))
printTest ((take 7 [(5::Int8),3..2]))
printTest ((take 7 [(1::Int8),2..1]))
printTest ((take 7 [(2::Int8),1..2]))
printTest ((take 7 [(2::Int8),1..1]))
printTest ((take 7 [(2::Int8),3..1]))
let x = (maxBound::Int8) - 4
printTest ((take 7 [x,(x+1)..maxBound]))
let x = (minBound::Int8) + 5
printTest ((take 7 [x,(x-1)..minBound]))
testEnumInt16 :: IO ()
testEnumInt16 = do
-- succ
printTest ((succ (0::Int16)))
printTest ((succ (minBound::Int16)))
mayBomb (printTest ((succ (maxBound::Int16))))
-- pred
printTest (pred (1::Int16))
printTest (pred (maxBound::Int16))
mayBomb (printTest (pred (minBound::Int16)))
-- toEnum
printTest ((map (toEnum::Int->Int16) [1, fromIntegral (minBound::Int16), fromIntegral (maxBound::Int16)]))
mayBomb (printTest ((toEnum (maxBound::Int))::Int16))
-- fromEnum
printTest ((map fromEnum [(1::Int16),minBound,maxBound]))
-- [x..] aka enumFrom
printTest ((take 7 [(1::Int16)..]))
printTest ((take 7 [((maxBound::Int16)-5)..])) -- just in case it doesn't catch the upper bound..
-- [x,y..] aka enumFromThen
printTest ((take 7 [(1::Int16),2..]))
printTest ((take 7 [(1::Int16),7..]))
printTest ((take 7 [(1::Int16),1..]))
printTest ((take 7 [(1::Int16),0..]))
printTest ((take 7 [(5::Int16),2..]))
let x = (minBound::Int16) + 1
printTest ((take 7 [x, x-1 ..]))
let x = (minBound::Int16) + 5
printTest ((take 7 [x, x-1 ..]))
let x = (maxBound::Int16) - 5
printTest ((take 7 [x, (x+1) ..]))
-- [x..y] aka enumFromTo
printTest ((take 7 ([(1::Int16) .. 5])))
printTest ((take 4 ([(1::Int16) .. 1])))
printTest ((take 7 ([(1::Int16) .. 0])))
printTest ((take 7 ([(5::Int16) .. 0])))
printTest ((take 7 ([(maxBound-(5::Int16)) .. maxBound])))
printTest ((take 7 ([(minBound+(5::Int16)) .. minBound])))
-- [x,y..z] aka enumFromThenTo
printTest ((take 7 [(5::Int16),4..1]))
printTest ((take 7 [(5::Int16),3..1]))
printTest ((take 7 [(5::Int16),3..2]))
printTest ((take 7 [(1::Int16),2..1]))
printTest ((take 7 [(2::Int16),1..2]))
printTest ((take 7 [(2::Int16),1..1]))
printTest ((take 7 [(2::Int16),3..1]))
let x = (maxBound::Int16) - 4
printTest ((take 7 [x,(x+1)..maxBound]))
let x = (minBound::Int16) + 5
printTest ((take 7 [x,(x-1)..minBound]))
testEnumInt32 :: IO ()
testEnumInt32 = do
-- succ
printTest ((succ (0::Int32)))
printTest ((succ (minBound::Int32)))
mayBomb (printTest ((succ (maxBound::Int32))))
-- pred
printTest (pred (1::Int32))
printTest (pred (maxBound::Int32))
mayBomb (printTest (pred (minBound::Int32)))
-- toEnum
printTest ((map (toEnum::Int->Int32) [1, fromIntegral (minBound::Int32), fromIntegral (maxBound::Int32)]))
mayBomb (printTest ((toEnum (maxBound::Int))::Int32))
-- fromEnum
printTest ((map fromEnum [(1::Int32),minBound,maxBound]))
-- [x..] aka enumFrom
printTest ((take 7 [(1::Int32)..]))
printTest ((take 7 [((maxBound::Int32)-5)..])) -- just in case it doesn't catch the upper bound..
-- [x,y..] aka enumFromThen
printTest ((take 7 [(1::Int32),2..]))
printTest ((take 7 [(1::Int32),7..]))
printTest ((take 7 [(1::Int32),1..]))
printTest ((take 7 [(1::Int32),0..]))
printTest ((take 7 [(5::Int32),2..]))
let x = (minBound::Int32) + 1
printTest ((take 7 [x, x-1 ..]))
let x = (minBound::Int32) + 5
printTest ((take 7 [x, x-1 ..]))
let x = (maxBound::Int32) - 5
printTest ((take 7 [x, (x+1) ..]))
-- [x..y] aka enumFromTo
printTest ((take 7 ([(1::Int32) .. 5])))
printTest ((take 4 ([(1::Int32) .. 1])))
printTest ((take 7 ([(1::Int32) .. 0])))
printTest ((take 7 ([(5::Int32) .. 0])))
printTest ((take 7 ([(maxBound-(5::Int32)) .. maxBound])))
printTest ((take 7 ([(minBound+(5::Int32)) .. minBound])))
-- [x,y..z] aka enumFromThenTo
printTest ((take 7 [(5::Int32),4..1]))
printTest ((take 7 [(5::Int32),3..1]))
printTest ((take 7 [(5::Int32),3..2]))
printTest ((take 7 [(1::Int32),2..1]))
printTest ((take 7 [(2::Int32),1..2]))
printTest ((take 7 [(2::Int32),1..1]))
printTest ((take 7 [(2::Int32),3..1]))
let x = (maxBound::Int32) - 4
printTest ((take 7 [x,(x+1)..maxBound]))
let x = (minBound::Int32) + 5
printTest ((take 7 [x,(x-1)..minBound]))
testEnumInt64 :: IO ()
testEnumInt64 = do
-- succ
printTest ((succ (0::Int64)))
printTest ((succ (minBound::Int64)))
mayBomb (printTest ((succ (maxBound::Int64))))
-- pred
printTest (pred (1::Int64))
printTest (pred (maxBound::Int64))
mayBomb (printTest (pred (minBound::Int64)))
-- toEnum
mayBomb (printTest ((map (toEnum::Int->Int64) [1, fromIntegral (minBound::Int64), fromIntegral (maxBound::Int64)])))
mayBomb (printTest ((toEnum (maxBound::Int))::Int64))
-- fromEnum
printTest ((map fromEnum [(1::Int64),fromIntegral (minBound::Int) ,fromIntegral (maxBound::Int)]))
mayBomb (printTest (fromEnum (maxBound::Int64)))
-- [x..] aka enumFrom
printTest ((take 7 [(1::Int64)..]))
printTest ((take 7 [((maxBound::Int64)-5)..])) -- just in case it doesn't catch the upper bound..
-- [x,y..] aka enumFromThen
printTest ((take 7 [(1::Int64),2..]))
printTest ((take 7 [(1::Int64),7..]))
printTest ((take 7 [(1::Int64),1..]))
printTest ((take 7 [(1::Int64),0..]))
printTest ((take 7 [(5::Int64),2..]))
let x = (minBound::Int64) + 1
printTest ((take 7 [x, x-1 ..]))
let x = (minBound::Int64) + 5
printTest ((take 7 [x, x-1 ..]))
let x = (maxBound::Int64) - 5
printTest ((take 7 [x, (x+1) ..]))
-- [x..y] aka enumFromTo
printTest ((take 7 ([(1::Int64) .. 5])))
printTest ((take 4 ([(1::Int64) .. 1])))
printTest ((take 7 ([(1::Int64) .. 0])))
printTest ((take 7 ([(5::Int64) .. 0])))
printTest ((take 7 ([(maxBound-(5::Int64)) .. maxBound])))
printTest ((take 7 ([(minBound+(5::Int64)) .. minBound])))
-- [x,y..z] aka enumFromThenTo
printTest ((take 7 [(5::Int64),4..1]))
printTest ((take 7 [(5::Int64),3..1]))
printTest ((take 7 [(5::Int64),3..2]))
printTest ((take 7 [(1::Int64),2..1]))
printTest ((take 7 [(2::Int64),1..2]))
printTest ((take 7 [(2::Int64),1..1]))
printTest ((take 7 [(2::Int64),3..1]))
let x = (maxBound::Int64) - 4
printTest ((take 7 [x,(x+1)..maxBound]))
let x = (minBound::Int64) + 5
printTest ((take 7 [x,(x-1)..minBound]))
--
--
-- Utils
--
--
mayBomb x = catch x (\(ErrorCall e) -> putStrLn ("error " ++ show e))
`catch` (\e -> putStrLn ("Fail: " ++ show (e :: SomeException)))
| beni55/ghcjs | test/pkg/base/enum02.hs | mit | 8,604 | 0 | 15 | 1,585 | 4,768 | 2,607 | 2,161 | 181 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts, UndecidableInstances #-}
-- This is a copy of typecheck/should_run/T3500b, but it's here for
-- a different reason: at one time, it sent the compiler into a loop.
-- ANd T3500b isn't tested 'fast' mode
module T9565 where
newtype Mu f = Mu (f (Mu f))
type family Id m
type instance Id m = m
instance Show (Id (f (Mu f))) => Show (Mu f) where
show (Mu f) = show f
showMu :: Mu (Either ()) -> String
showMu = show
item :: Mu (Either ())
item = Mu (Right (Mu (Left ())))
main = print (showMu item)
| urbanslug/ghc | testsuite/tests/simplCore/should_compile/T9565.hs | bsd-3-clause | 551 | 0 | 12 | 115 | 184 | 98 | 86 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module T4003B where
import {-# SOURCE #-} T4003A (HsExpr)
import Data.Data
data HsLit = HsChar
deriving (Data, Typeable)
data HsOverLit id
= OverLit (HsExpr id)
deriving (Data, Typeable)
data OverLitVal = HsIntegral
deriving (Data, Typeable)
| urbanslug/ghc | testsuite/tests/rename/should_compile/T4003B.hs | bsd-3-clause | 294 | 0 | 8 | 55 | 82 | 48 | 34 | 11 | 0 |
{-# LANGUAGE OverloadedLists #-}
-----------------------------------------------------------------------------
-- |
-- Module : Algebra.Graph.Test.Label
-- Copyright : (c) Andrey Mokhov 2016-2022
-- License : MIT (see the file LICENSE)
-- Maintainer : andrey.mokhov@gmail.com
-- Stability : experimental
--
-- Testsuite for "Algebra.Graph.Label".
-----------------------------------------------------------------------------
module Algebra.Graph.Test.Label (
-- * Testsuite
testLabel
) where
import Algebra.Graph.Test
import Algebra.Graph.Label
import Data.Monoid
type Unary a = a -> a
type Binary a = a -> a -> a
type Additive a = Binary a
type Multiplicative a = Binary a
type Star a = Unary a
type Identity a = a
type Zero a = a
type One a = a
associative :: Eq a => Binary a -> a -> a -> a -> Property
associative (<>) a b c = (a <> b) <> c == a <> (b <> c) // "Associative"
commutative :: Eq a => Binary a -> a -> a -> Property
commutative (<>) a b = a <> b == b <> a // "Commutative"
idempotent :: Eq a => Binary a -> a -> Property
idempotent (<>) a = a <> a == a // "Idempotent"
annihilatingZero :: Eq a => Binary a -> Zero a -> a -> Property
annihilatingZero (<>) z a = conjoin
[ a <> z == z // "Left"
, z <> a == z // "Right" ] // "Annihilating zero"
closure :: Eq a => Additive a -> Multiplicative a -> One a -> Star a -> a -> Property
closure (+) (*) o s a = conjoin
[ s a == o + (a * s a) // "Left"
, s a == o + (s a * a) // "Right" ] // "Closure"
leftDistributive :: Eq a => Additive a -> Multiplicative a -> a -> a -> a -> Property
leftDistributive (+) (*) a b c =
a * (b + c) == (a * b) + (a * c) // "Left distributive"
rightDistributive :: Eq a => Additive a -> Multiplicative a -> a -> a -> a -> Property
rightDistributive (+) (*) a b c =
(a + b) * c == (a * c) + (b * c) // "Right distributive"
distributive :: Eq a => Additive a -> Multiplicative a -> a -> a -> a -> Property
distributive p m a b c = conjoin
[ leftDistributive p m a b c
, rightDistributive p m a b c ] // "Distributive"
identity :: Eq a => Binary a -> Identity a -> a -> Property
identity (<>) e a = conjoin
[ a <> e == a // "Left"
, e <> a == a // "Right" ] // "Identity"
semigroup :: Eq a => Binary a -> a -> a -> a -> Property
semigroup f a b c = associative f a b c // "Semigroup"
monoid :: Eq a => Binary a -> Identity a -> a -> a -> a -> Property
monoid f e a b c = conjoin
[ semigroup f a b c
, identity f e a ] // "Monoid"
commutativeMonoid :: Eq a => Binary a -> Identity a -> a -> a -> a -> Property
commutativeMonoid f e a b c = conjoin
[ monoid f e a b c
, commutative f a b ] // "Commutative monoid"
leftNearRing :: Eq a => Additive a -> Zero a -> Multiplicative a -> One a -> a -> a -> a -> Property
leftNearRing (+) z (*) o a b c = conjoin
[ commutativeMonoid (+) z a b c
, monoid (*) o a b c
, leftDistributive (+) (*) a b c
, annihilatingZero (*) z a ] // "Left near ring"
semiring :: Eq a => Additive a -> Zero a -> Multiplicative a -> One a -> a -> a -> a -> Property
semiring (+) z (*) o a b c = conjoin
[ commutativeMonoid (+) z a b c
, monoid (*) o a b c
, distributive (+) (*) a b c
, annihilatingZero (*) z a ] // "Semiring"
dioid :: Eq a => Additive a -> Zero a -> Multiplicative a -> One a -> a -> a -> a -> Property
dioid (+) z (*) o a b c = conjoin
[ semiring (+) z (*) o a b c
, idempotent (+) a ] // "Dioid"
starSemiring :: Eq a => Additive a -> Zero a -> Multiplicative a -> One a -> Star a -> a -> a -> a -> Property
starSemiring (+) z (*) o s a b c = conjoin
[ semiring (+) z (*) o a b c
, closure (+) (*) o s a ] // "Star semiring"
testLeftNearRing :: (Eq a, Semiring a) => a -> a -> a -> Property
testLeftNearRing = leftNearRing (<+>) zero (<.>) one
testSemiring :: (Eq a, Semiring a) => a -> a -> a -> Property
testSemiring = semiring (<+>) zero (<.>) one
testDioid :: (Eq a, Dioid a) => a -> a -> a -> Property
testDioid = dioid (<+>) zero (<.>) one
testStarSemiring :: (Eq a, StarSemiring a) => a -> a -> a -> Property
testStarSemiring = starSemiring (<+>) zero (<.>) one star
testLabel :: IO ()
testLabel = do
putStrLn "\n============ Graph.Label ============"
putStrLn "\n============ Any: instances ============"
test "Semiring" $ testSemiring @Any
test "StarSemiring" $ testStarSemiring @Any
test "Dioid" $ testDioid @Any
putStrLn "\n============ Distance Int: instances ============"
test "Semiring" $ testSemiring @(Distance Int)
test "StarSemiring" $ testStarSemiring @(Distance Int)
test "Dioid" $ testDioid @(Distance Int)
putStrLn "\n============ Capacity Int: instances ============"
test "Semiring" $ testSemiring @(Capacity Int)
test "StarSemiring" $ testStarSemiring @(Capacity Int)
test "Dioid" $ testDioid @(Capacity Int)
putStrLn "\n============ Minimum (Path Int): instances ============"
test "LeftNearRing" $ testLeftNearRing @(Minimum (Path Int))
putStrLn "\n============ PowerSet (Path Int): instances ============"
test "Semiring" $ size10 $ testSemiring @(PowerSet (Path Int))
test "Dioid" $ size10 $ testDioid @(PowerSet (Path Int))
putStrLn "\n============ Count Int: instances ============"
test "Semiring" $ testSemiring @(Count Int)
test "StarSemiring" $ testStarSemiring @(Count Int)
| snowleopard/alga | test/Algebra/Graph/Test/Label.hs | mit | 5,510 | 0 | 13 | 1,384 | 2,192 | 1,119 | 1,073 | -1 | -1 |
{-|
Module : ElfParser
Description : ELF Parser
Copyright : (c) Mathieu Suen, 2014
License : MIT
Maintainer : mathk.sue@gmail.com
-}
module ElfParser
(
ELFInfo,
ELFSection(..),
ELFHeader(..),
ELFProgramHeader(..),
ELFSectionHeader(..),
sectionHeader,
-- * Accessing elf object
sectionHeaders, programHeaders, header, size, sections,
sectionFromName, sectionFromIndex, sectionFromHeader,
sectionName,
-- * Symbol related function
symbolAt, symbolName,
-- * Parsing function
parseFile, sectionContent,
parse
) where
import qualified Data.ByteString as B
import Control.Applicative
import Control.Monad (forM)
import Control.Monad.State
import Control.Monad.Trans
import qualified FileDecoding as F
import qualified Data.Map as Map
import Text.Printf
import Data.Word
import Data.List
import Data.Ord
import Data.Int (Int64)
import Data.Maybe
import Data.Bits
{- ELF Data type -}
data ELFHeaderMagic = ELFHeaderMagic Word8 String
data ELFHeaderVersion = ELFDefaultVersion | ELFOtherVersion
data ELFHeaderABI = ELFHeaderABI Word8
data ELFHeaderType = ELFRelocatable | ELFExecutable | ELFShared | ELFCore
data ELFProgramHeaderType =
ELFPHTNull |
ELFPHTLoad |
ELFPHTDynamic |
ELFPHTInterp |
ELFPHTNote |
ELFPHTShlib |
ELFPHTPhdr |
ELFPHTTls |
ELFPHTLoos |
ELFPHTHios |
ELFPHTLoProc |
ELFPHTHiProc |
ELFPHTGnuEhFrame|
ELFPHTGnuStack |
ELFPHTGnuRelro |
ELFPHTArmExUnwind
data ELFSectionHeaderType =
ELFSHTNull |
ELFSHTProgBits |
ELFSHTSymTab |
ELFSHTStrTab |
ELFSHTRela |
ELFSHTHash |
ELFSHTDynamic |
ELFSHTNote |
ELFSHTNoBits |
ELFSHTRel |
ELFSHTShlib |
ELFSHTDynSym |
ELFSHTInitArray |
ELFSHTFiniArray |
ELFSHTPreinitArray |
ELFSHTGroup |
ELFSHTSymTabShndx|
ELFSHTLoos |
ELFSHTHios |
ELFSHTLoProc |
ELFSHTArmExIdx |
ELFSHTArmPreemptMap |
ELFSHTArmAttributs |
ELFSHTArmDebugOverlay |
ELFSHTArmOverlaySection |
ELFSHTHiProc |
ELFSHTLoUser |
ELFSHTHiUser
data ELFHeaderMachine =
ELFSPARC |
ELFx86 |
ELFMIPS |
ELFPowerPC |
ELFARM |
ELFSuperH |
ELFIA64 |
ELFx86_64 |
ELFAArch64
data ELFHeader = ELFHeader {
magic :: ELFHeaderMagic,
format :: F.AddressSize,
fileEndianness :: F.Endianness,
version :: ELFHeaderVersion,
osabi :: ELFHeaderABI,
objectType :: ELFHeaderType,
machine :: ELFHeaderMachine,
entry :: Int64,
phoff :: Int64,
shoff :: Int64,
flags :: Word32,
hsize :: Word16,
phentsize :: Word16,
phnum :: Word16,
shentsize :: Word16,
shnum :: Word16,
shstrndx :: Word16
}
data ELFProgramHeader = ELFProgramHeader {
phtype :: ELFProgramHeaderType,
phoffset :: Int64,
phvaddr :: Int64,
phpaddr :: Int64,
phfilesz :: Int64,
phmemsz :: Int64,
phflags :: Int64,
phalign :: Int64
}
data ELFSectionHeader = ELFSectionHeader {
shname :: Word32,
shtype :: ELFSectionHeaderType,
shflags :: Int64,
shaddr :: Int64,
shoffset :: Int64,
shsize :: Int64,
shlink :: Word32,
shinfo :: Word32,
shaddralign :: Int64,
shentrysize :: Int64
}
data ELFSymbolBind = SbLocal | SbGlobal | SbWeak | SbOsUndefine | SbProcUndefine
data ELFSymbolType = StNoType | StObject | StFunc | StSection | StFile | StCommon | StTls | StOsUndefine | StProcUndefine
data ELFSymbol = ELFSymbol {
symname :: String,
symaddr :: Int64,
symsize :: Int64,
symbind :: ELFSymbolBind,
symtype :: ELFSymbolType,
symother :: Word8,
symndx :: Word16
}
data ELFInfo = ELFInfo {
elfHeader :: ELFHeader,
elfProgramHeaders :: [ELFProgramHeader],
elfSectionHeaders :: [ELFSectionHeader],
elfFileSize :: Int,
elfSections :: Map.Map Word32 ELFSection
}
data ParseState = ParseState {
elfOffset :: Int64,
elfString :: B.ByteString,
elfEndianness :: F.Endianness,
elfSize :: F.AddressSize,
elfOffsetState :: [Int64]
}
-- | Represent content of a section
data ELFSection =
-- ^ Section of type ELFSHTProgBits
BinarySection Int64 B.ByteString |
-- ^ Section of type ELFSHTStrTab
StringTableSection (Map.Map Word32 String) |
-- ^ Symbol table
SymbolTable (Map.Map Int64 ELFSymbol)
type ParseElf a = F.Parse ParseState a
{- Instance declaration -}
instance Show ELFSection where
show (StringTableSection map) = (show map)
show (BinarySection off _) = printf "%d: Binary data..." off
show (SymbolTable list) = (show list)
instance Show ELFHeaderMagic where
show (ELFHeaderMagic w s) = printf "0x%02X %s" w s
instance Show ELFHeaderVersion where
show ELFDefaultVersion = "Original"
show ELFOtherVersion = "Other"
instance Show ELFHeaderType where
show ELFRelocatable = "relocatable"
show ELFExecutable = "executalbe"
show ELFShared = "shared"
show ELFCore = "core"
instance Show ELFHeaderMachine where
show ELFSPARC = "SPARC"
show ELFx86 = "x86"
show ELFMIPS = "MIPS"
show ELFPowerPC = "PowerPC"
show ELFARM = "ARM"
show ELFSuperH = "SuperH"
show ELFIA64 = "IA-64"
show ELFx86_64 = "x86-64"
show ELFAArch64 = "AArch64"
instance Show ELFProgramHeaderType where
show ELFPHTNull = "Null Header"
show ELFPHTLoad = "Loadable Segment"
show ELFPHTDynamic = "Dynamic Linking Information"
show ELFPHTInterp = "Interpreter Path"
show ELFPHTNote = "Auxiliary Information"
show ELFPHTShlib = "Shlib"
show ELFPHTPhdr = "Program Header"
show ELFPHTTls = "Thread Local Storage"
show ELFPHTLoos = "Loos OS specific information"
show ELFPHTHios = "Hios OS specific information"
show ELFPHTLoProc = "LoProc Processor specific information"
show ELFPHTHiProc = "HiProc Processor specific information"
show ELFPHTGnuEhFrame= "Exception Handling Information"
show ELFPHTGnuStack = "Stack Permissions"
show ELFPHTGnuRelro = "Read Only Relocation Segment"
show ELFPHTArmExUnwind= "Excpetion Unwind Table"
instance Show ELFSectionHeaderType where
show ELFSHTNull = "Null Header"
show ELFSHTProgBits = "Program Information"
show ELFSHTSymTab = "Symbol Table"
show ELFSHTStrTab = "String Table"
show ELFSHTRela = "Relocation Entries with Addends"
show ELFSHTHash = "Hash Table"
show ELFSHTDynamic = "Dynamic Linking Information"
show ELFSHTNote = "Note Section"
show ELFSHTNoBits = "No Bits"
show ELFSHTRel = "Relocation Entries without Addends"
show ELFSHTShlib = "Reserved"
show ELFSHTDynSym = "Symbol Table"
show ELFSHTInitArray = "Array of Initialization Functions"
show ELFSHTFiniArray = "Array of termination Function"
show ELFSHTPreinitArray = "Pre initialization Array of Functions"
show ELFSHTGroup = "Group Sections"
show ELFSHTSymTabShndx = "Array of Symbol Table Index"
show ELFSHTLoos = "Loos OS Range"
show ELFSHTHios = "Hios OS Range"
show ELFSHTLoProc = "LoProc Prcocessor Range"
show ELFSHTHiProc = "HiProc Processor Range"
show ELFSHTArmExIdx = "Exception Index Table"
show ELFSHTArmPreemptMap= "BPABI DLL dynamic linking pre-emption map"
show ELFSHTArmAttributs = "Object file compatibility attributes"
show ELFSHTArmDebugOverlay = "Debug Overlay"
show ELFSHTArmOverlaySection = "Overlay Section"
show ELFSHTLoUser = "LoUser User Range"
show ELFSHTHiUser = "HiUser User Range"
instance Show ELFHeader where
show ELFHeader { magic=m, format=c, fileEndianness=e, version=v, osabi=abi, objectType=t, machine=arch, entry=ent, phoff=ph, shoff=sh, flags=f, hsize=hs, phentsize=phes, phnum=phn, shentsize=shes, shnum=shn, shstrndx=shsi} =
printf "Magic: %s\nClass: %s\nF.Endianness: %s\nVersion: %s\nOSABI: %s\nType: %s\nMachine: %s\nEntry point: %s\nPhoff: %s\nShoff: %s\nFlags: 0x%08X\nHeader Size: %d\nProgram Header Size: %d\nProgram Header Entry Number: %d\nSection Header Size: %d\nSection Header Entry Number: %d\nIndex Section Name: %d"
(show m)
(show c)
(show e)
(show v)
(show abi)
(show t)
(show arch)
(show ent)
(show ph)
(show sh)
f
hs
phes
phn
shes
shn
shsi
instance Show ELFProgramHeader where
show ELFProgramHeader {phtype=pht, phoffset=pho, phvaddr=phv, phpaddr=php, phfilesz=phfs, phmemsz=phm, phflags=phf, phalign=pha} =
printf "{\nProgram Header Type: %s\nProgram Header Offset: %s\nVirtual Address: %s\nPhysical Address: %s\nSegment File Size: %s\nSegment Memory Size: %s\nFlags: %s\nSegment Alignment: %s\n}"
(show pht)
(show pho)
(show phv)
(show php)
(show phfs)
(show phm)
(show phf)
(show pha)
instance Show ELFSectionHeader where
show ELFSectionHeader {shname=shn, shtype=sht, shflags=shflgs, shaddr=sha, shoffset=sho, shsize=shs, shlink=shl, shinfo=shi, shaddralign=shaa, shentrysize=shes} =
printf "%s { \nSection Type: %s\nSection Flags: %s\nSection Address: %s\nSection Offset: %s\nSection Size: %s\nSection Link: %s\nSection Info: %s\nSection Address Align: %s\nSection Entry Size: %s\n}"
(show shn)
(show sht)
(show shflgs)
(show sha)
(show sho)
(show shs)
(show shl)
(show shi)
(show shaa)
(show shes)
instance Show ELFHeaderABI where
show (ELFHeaderABI abi) = printf "ABI(0x%02X)" abi
instance Show ELFSymbol where
show (ELFSymbol {symname=name,symndx=ndx,symaddr=addr}) = printf "{%s:\n\tIn section=%d\n\tAddress=%08X\n}" name ndx addr
instance F.ParseStateAccess ParseState where
offset = elfOffset
string = elfString
endianness = elfEndianness
putOffset a off = a { elfOffset = off }
pushOffset a@ParseState {elfOffsetState=x} off = a { elfOffset=off, elfOffsetState=(elfOffset a):x}
popOffset a@ParseState {elfOffsetState=x:xs} = a {elfOffset=x, elfOffsetState=xs}
{-- ELF Manipulation --}
-- | Transform a word to a ELFSymbolBind
wordToSymbolBind :: Word8 -> ELFSymbolBind
wordToSymbolBind 0 = SbLocal
wordToSymbolBind 1 = SbLocal
wordToSymbolBind 2 = SbLocal
wordToSymbolBind w
| w >= 10 && w <=12 = SbOsUndefine
| w >= 13 && w <= 15 = SbProcUndefine
-- | Transform a word to a ELFSymbolType
wordToSymbolType :: Word8 -> ELFSymbolType
wordToSymbolType 0 = StNoType
wordToSymbolType 1 = StObject
wordToSymbolType 2 = StFunc
wordToSymbolType 3 = StSection
wordToSymbolType 4 = StFile
wordToSymbolType 5 = StCommon
wordToSymbolType 6 = StTls
wordToSymbolType w
| w >= 10 && w <=12 = StOsUndefine
| w >= 13 && w <= 15 = StProcUndefine
{------------------------------------------------------------------------------
- Function to search in a ELFInfo data.
-----------------------------------------------------------------------------}
-- | Get the size in byte of the pars file
fileSize :: ELFInfo -> Int
fileSize ELFInfo {elfFileSize=s} = s
-- | Get the list of header program
programHeaders :: ELFInfo -> [ELFProgramHeader]
programHeaders ELFInfo{elfProgramHeaders=phs} = phs
-- | Get the list of section header
sectionHeaders :: ELFInfo -> [ELFSectionHeader]
sectionHeaders ELFInfo{elfSectionHeaders=shs} = shs
-- | Get the elf header
header :: ELFInfo -> ELFHeader
header ELFInfo{elfHeader=h} = h
sections :: ELFInfo -> Map.Map Word32 ELFSection
sections ELFInfo{elfSections=s} = s
-- | Get the size in byte of the elf file
size :: ELFInfo -> Int
size ELFInfo{elfFileSize=s} = s
-- | Get Section from it section header
sectionFromHeader :: ELFSectionHeader -> ELFInfo -> Maybe ELFSection
sectionFromHeader h info = Map.lookup (shname h) (elfSections info)
-- | Get a section base on the index in the section header table
sectionFromIndex :: Word16 -> ELFInfo -> Maybe ELFSection
sectionFromIndex index info = sectionFromHeader ((elfSectionHeaders info) !! fromIntegral index) info
-- | Get the section from its name
sectionFromName :: String -> ELFInfo -> Maybe ELFSection
sectionFromName name info = do
h <- sectionHeader info name
sectionFromHeader h info
-- | Retrive the section containing symbol table
symbolTable :: ELFInfo -> Maybe ELFSection
symbolTable info = case sectionFromName ".dynsym" info of
Just s -> return s
Nothing -> sectionFromName ".symtab" info
-- | Get the symbol at a specific offset
symbolAt :: ELFInfo -> Int64 -> Maybe ELFSymbol
symbolAt info offset = do
SymbolTable table <- symbolTable info
Map.lookup offset table
symbolName :: ELFSymbol -> String
symbolName = symname
-- | Get the specific section header
--
-- Example usage:
--
-- > sectionHeader elfFile ".text"
sectionHeader :: ELFInfo -> String -> Maybe ELFSectionHeader
sectionHeader info@ELFInfo {elfSectionHeaders=sh} searchName = find matchName sh
where matchName sh = maybe False (\sname -> sname == searchName) (sectionName info sh)
stringTableSectionHeader :: ELFInfo -> ELFSectionHeader
stringTableSectionHeader (ELFInfo {elfHeader=h, elfSectionHeaders=shs}) = shs !! (fromIntegral (shstrndx h))
stringFromOffset :: ELFSection -> Word32 -> Maybe String
stringFromOffset (StringTableSection st) offset = do
(k,v) <- Map.lookupLE offset st
return $ drop (fromIntegral $ offset - k) v
stringFromOffset _ _ = Nothing
-- | Get the name of a section
sectionName :: ELFInfo -> ELFSectionHeader -> Maybe String
sectionName (info@ELFInfo {elfSections=sections}) (ELFSectionHeader {shname=off}) = do
s <- Map.lookup (shname $ stringTableSectionHeader info) sections
stringFromOffset s off
{- ELf specific routine -}
parseHeaderClass :: ParseElf F.AddressSize
parseHeaderClass = do
b <- F.parseByte
case b of
1 -> do
state <- F.getState
F.putState state {elfSize = F.S32}
return F.S32
2 -> do
state <- F.getState
F.putState state {elfSize = F.S64}
return F.S64
_ -> F.bail $ printf "Unknown class (0x02X)" b
parseHeaderMagic :: ParseElf ELFHeaderMagic
parseHeaderMagic = do
magicByte <- F.parseByte
F.assert (magicByte == 0x7F) "First magic byte is wrong"
ident <- F.parseIdentifier
F.assert (ident == "ELF") (printf "Magic string is not ELF %s" ident)
return (ELFHeaderMagic magicByte ident)
parseHeaderEndianness :: ParseElf F.Endianness
parseHeaderEndianness = do
b <- F.parseByte
case b of
1 -> do
state <- F.getState
F.putState state {elfEndianness = F.LittleEndian }
return F.LittleEndian
0 -> do
state <- F.getState
F.putState state {elfEndianness = F.BigEndian}
return F.BigEndian
_ -> F.bail $ printf "Bad endianness (0x%02X)" b
parseHeaderType :: ParseElf ELFHeaderType
parseHeaderType = do
b <- F.parseHalf
case b of
1 -> return ELFRelocatable
2 -> return ELFExecutable
3 -> return ELFShared
4 -> return ELFCore
_ -> F.bail $ printf "Bad elf type (0x%02X)" b
parseHeaderMachine :: ParseElf ELFHeaderMachine
parseHeaderMachine = do
b <- F.parseHalf
case b of
0x02 -> return ELFSPARC
0x03 -> return ELFx86
0x08 -> return ELFMIPS
0x14 -> return ELFPowerPC
0x28 -> return ELFARM
0x2A -> return ELFSuperH
0x32 -> return ELFIA64
0x3E -> return ELFx86_64
0xB7 -> return ELFAArch64
_ -> F.bail $ printf "Unknown machine (0x%02X)" b
parseHeaderVersion :: ParseElf ELFHeaderVersion
parseHeaderVersion = do
b <- F.parseByte
case b of
1 -> return ELFDefaultVersion
_ -> return ELFOtherVersion
parseHeaderABI :: ParseElf ELFHeaderABI
parseHeaderABI = do
b <- F.parseByte
return (ELFHeaderABI b)
{-|
This funcition parse the ELF header extracting all the usefull information
-}
parseHeader :: ParseElf ELFHeader
parseHeader = do
m <- parseHeaderMagic
f <- parseHeaderClass
endian <- parseHeaderEndianness
v <- parseHeaderVersion
abi <- parseHeaderABI
F.skip 8
t <- parseHeaderType
arch <- parseHeaderMachine
F.skip 4
e <- parseMachineDepWord
ph <- parseMachineDepWord
sh <- parseMachineDepWord
flgs <- F.parseWord
hs <- F.parseHalf
phes <- F.parseHalf
phn <- F.parseHalf
shes <- F.parseHalf
shn <- F.parseHalf
shsi <- F.parseHalf
return ELFHeader {magic=m, format=f, fileEndianness=endian, version=v, osabi=abi, objectType=t, machine=arch, entry=e, phoff=ph, shoff=sh, flags=flgs, hsize=hs, phentsize=phes, phnum=phn, shentsize=shes, shnum=shn, shstrndx=shsi}
-- | Either a 4 byte or 8 byte word
-- depending on the machine word size.
-- This can be used to parse address and offset
parseMachineDepWord :: ParseElf Int64
parseMachineDepWord = do
state <- F.getState
case elfSize state of
F.S32 -> fromIntegral <$> F.parseWord
F.S64 -> fromIntegral <$> F.parseGWord
parseProgramHeaderType :: ParseElf ELFProgramHeaderType
parseProgramHeaderType = do
w <- F.parseWord
case w of
0 -> return ELFPHTNull
1 -> return ELFPHTLoad
2 -> return ELFPHTDynamic
3 -> return ELFPHTInterp
4 -> return ELFPHTNote
5 -> return ELFPHTShlib
6 -> return ELFPHTPhdr
7 -> return ELFPHTTls
0x60000000 -> return ELFPHTLoos
0x6474e550 -> return ELFPHTGnuEhFrame
0x6474e551 -> return ELFPHTGnuStack
0x6474e552 -> return ELFPHTGnuRelro
0x6FFFFFFF -> return ELFPHTHios
0x70000000 -> return ELFPHTLoProc
0x70000001 -> return ELFPHTArmExUnwind
0x7FFFFFFF -> return ELFPHTHiProc
_ -> F.bail $ printf "Unrecognized program header type 0x%08X" w
parseSectionHeaderType :: ParseElf ELFSectionHeaderType
parseSectionHeaderType = do
w <- F.parseWord
case w of
0 -> return ELFSHTNull
1 -> return ELFSHTProgBits
2 -> return ELFSHTSymTab
3 -> return ELFSHTStrTab
4 -> return ELFSHTRela
5 -> return ELFSHTHash
6 -> return ELFSHTDynamic
7 -> return ELFSHTNote
8 -> return ELFSHTNoBits
9 -> return ELFSHTRel
10 -> return ELFSHTShlib
11 -> return ELFSHTDynSym
14 -> return ELFSHTInitArray
15 -> return ELFSHTFiniArray
16 -> return ELFSHTPreinitArray
17 -> return ELFSHTGroup
18 -> return ELFSHTSymTabShndx
0x60000000 -> return ELFSHTLoos
0x6FFFFFFF -> return ELFSHTHios
0x70000000 -> return ELFSHTLoProc
0x70000001 -> return ELFSHTArmExIdx
0x70000002 -> return ELFSHTArmPreemptMap
0x70000003 -> return ELFSHTArmAttributs
0x70000004 -> return ELFSHTArmDebugOverlay
0x70000005 -> return ELFSHTArmOverlaySection
0x7FFFFFFF -> return ELFSHTHiProc
0x80000000 -> return ELFSHTLoUser
0x8FFFFFFF -> return ELFSHTHiUser
_ -> F.bail $ printf "Unrecognized section header type 0x%08X" w
parseString :: ParseElf String
parseString = fmap F.w2c <$> F.parseWhile (\w -> not $ w == 0)
parseProgramHeader :: ParseElf ELFProgramHeader
parseProgramHeader = do
pht <- parseProgramHeaderType
pho <- parseMachineDepWord
phv <- parseMachineDepWord
php <- parseMachineDepWord
phfs <- parseMachineDepWord
phm <- parseMachineDepWord
phf <- parseMachineDepWord
pha <- parseMachineDepWord
return ELFProgramHeader {phtype=pht, phoffset=pho, phvaddr=phv, phpaddr=php, phfilesz=phfs, phmemsz=phm, phflags=phf, phalign=pha}
parseSectionHeader :: ParseElf ELFSectionHeader
parseSectionHeader = do
shn <- F.parseWord
sht <- parseSectionHeaderType
shflgs <- parseMachineDepWord
sha <- parseMachineDepWord
sho <- parseMachineDepWord
shs <- parseMachineDepWord
shl <- F.parseWord
shi <- F.parseWord
shaa <- parseMachineDepWord
shes <- parseMachineDepWord
return ELFSectionHeader {shname=shn, shtype=sht, shflags=shflgs, shaddr=sha, shoffset=sho, shsize=shs, shlink=shl, shinfo=shi, shaddralign=shaa, shentrysize=shes}
parseArray :: ParseElf a -> Int -> ParseElf [a]
parseArray parser 0 = return []
parseArray parser n
| n > 0 = do
h <- parser
(h:) <$> (parseArray parser (n - 1))
| otherwise = F.bail "Can not parse negative number of array element"
parseProgramHeaders :: Int -> ParseElf [ELFProgramHeader]
parseProgramHeaders = parseArray parseProgramHeader
parseSectionHeaders :: Int -> ParseElf [ELFSectionHeader]
parseSectionHeaders = parseArray parseSectionHeader
-- | Move to the section that contain the name of all section
moveToStringSectionName :: ELFInfo -> ParseElf ()
moveToStringSectionName (ELFInfo {elfHeader=h, elfSectionHeaders=shs}) =
F.moveTo $ shoffset (shs !! (fromIntegral (shstrndx h)))
-- | Get the section content given the section name
sectionContent :: ELFInfo -> String -> ParseElf (ELFSectionHeader, B.ByteString)
sectionContent info string = do
case sectionHeader info string of
Just h@(ELFSectionHeader {shoffset=off, shsize=size}) -> do
F.moveTo off
b <- F.parseRaw size
return (h,b)
Nothing -> F.bail "Section not found"
stringsMapUpTo :: Int64 -> Int64 -> ParseElf (Map.Map Word32 String)
stringsMapUpTo beginOff maxOff = do
currentOff <- F.offset <$> F.getState
if currentOff + 1 >= maxOff
then return (Map.singleton 0 "NullString")
else do
F.moveTo $ currentOff + 1
Map.insert (fromIntegral (currentOff + 1 - beginOff)) <$> parseString <*> stringsMapUpTo beginOff maxOff
-- | Inner function that recurse over the symbol table to build a map of
-- symbols
symbolTableUpTo :: String -> Int64 -> StateT ELFInfo (F.Parse ParseState) (Map.Map Int64 ELFSymbol)
symbolTableUpTo stringTable maxOffset = do
currentOff <- lift (F.offset <$> F.getState)
if currentOff + 1 >= maxOffset
then return Map.empty
else do
sym@(ELFSymbol {symaddr=addr}) <- parseSymbol stringTable
Map.insert addr sym <$> (symbolTableUpTo stringTable maxOffset)
parseStringTable :: ELFSectionHeader -> ParseElf ELFSection
parseStringTable (ELFSectionHeader {shtype=ELFSHTStrTab, shoffset=offset, shsize=size}) = do
F.moveTo offset
map <- stringsMapUpTo offset (size+offset)
return $ StringTableSection map
parseSymbolTable :: ELFSectionHeader -> StateT ELFInfo (F.Parse ParseState) ELFSection
parseSymbolTable (ELFSectionHeader {shtype=ELFSHTSymTab,shoffset=off,shsize=size}) = do
lift $ F.moveTo off
SymbolTable <$> (symbolTableUpTo ".strtab" $ off+size)
parseSymbolTable (ELFSectionHeader {shtype=ELFSHTDynSym,shoffset=off,shsize=size}) = do
lift $ F.moveTo off
SymbolTable <$> (symbolTableUpTo ".dynstr" $ off+size)
-- | Parse one saymbol entry in a symbol table.
-- TODO: 32 and 64 bit ELF has different way of parsing this structure
parseSymbol :: String -> StateT ELFInfo (F.Parse ParseState) ELFSymbol
parseSymbol stringTable = do
shnameidx <- lift F.parseWord
shadd <- lift parseMachineDepWord
shsize <- lift parseMachineDepWord
shinfo <- lift F.parseByte
shother <- lift F.parseByte
shndx <- lift F.parseHalf
sectionTable <- sectionFromName stringTable <$> get
let shbind = wordToSymbolBind $ shinfo `shiftR` 4
shtype = wordToSymbolType $ shinfo .&. 0xF
symbolName
| isJust sectionTable = maybe "" id $ stringFromOffset (fromJust sectionTable) shnameidx
| otherwise = "Null symbol"
in return $ ELFSymbol symbolName shadd shsize shbind shtype shother shndx
-- | Add a section to the state
addSection :: ELFSectionHeader -> ELFSection -> StateT ELFInfo (F.Parse ParseState) ()
addSection h s = modify (\info@ELFInfo{elfSections=map} -> info {elfSections=Map.insert (shname h) s map})
-- | Set the name of the section
setSectionName :: StateT ELFInfo (F.Parse ParseState) ()
setSectionName = do
info@ELFInfo{elfSections=s} <- get
section <- lift $ parseStringTable (stringTableSectionHeader info)
put $ info {elfSections=Map.insert ((shname . stringTableSectionHeader) info) section s}
-- | Set the symbol table in the ELFInfo structure
setSymbolTable :: String -> StateT ELFInfo (F.Parse ParseState) ()
setSymbolTable sectionName = do
info <-get
case sectionHeader info sectionName of
Just h -> do
section <- parseSymbolTable h
addSection h section
Nothing -> return ()
-- | Get the .text section and store it into the ELFInfo
setTextSection :: StateT ELFInfo (F.Parse ParseState) ()
setTextSection = do
info@ELFInfo{elfSections=s} <- get
(ELFSectionHeader {shoffset=o,shname=n},b) <- lift (sectionContent info ".text")
put $ info {elfSections=(Map.insert n (BinarySection o b) s)}
-- | Parse a string table and add it to the sections.
setStringTable :: String -> StateT ELFInfo (F.Parse ParseState) ()
setStringTable sectionName = do
info <- get
case sectionHeader info sectionName of
Just h -> do
section <- lift $ parseStringTable h
addSection h section
Nothing -> return ()
-- | Post initialise the info data
annotateELFInfo :: StateT ELFInfo (F.Parse ParseState) ()
annotateELFInfo = do
setSectionName
setStringTable ".strtab"
setStringTable ".dynstr"
setSymbolTable ".dynsym"
setTextSection
-- | Parse an ELF file.
-- This function first parse the different header in the ParseElf monad
-- end then continue using the transformer State monad.
parseFile :: ParseElf ELFInfo
parseFile = do
hdr <- parseHeader
F.moveTo $ phoff hdr
phs <- parseProgramHeaders $ fromIntegral (phnum hdr)
F.moveTo $ shoff hdr
shs <- parseSectionHeaders $ fromIntegral (shnum hdr)
state <- F.getState
execStateT annotateELFInfo ELFInfo {elfHeader=hdr, elfProgramHeaders=phs, elfSectionHeaders=shs, elfFileSize=(fromIntegral $ B.length $ F.string state), elfSections=Map.empty }
parse :: ParseElf a -> B.ByteString -> Either String a
parse parser string = F.parse ParseState {elfOffset=0, elfSize=F.S32, elfEndianness=F.LittleEndian, elfString=string, elfOffsetState=[] } parser string
| mathk/arm-isa | ElfParser.hs | mit | 28,272 | 0 | 17 | 7,825 | 6,801 | 3,549 | 3,252 | 625 | 29 |
-- Type.hs ---
--
-- Filename: Type.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Mon Oct 6 13:20:53 2014 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Mon May 8 08:42:49 2017 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 115
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
--
--
-- Code:
module Data.Rewriting.ARA.ByInferenceRules.InfTreeNode.Type
( InfTreeNode (..)
, InfTreeNodeView (..)
, FunSig (..)
)
where
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCondition
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCost
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerDatatype
import Data.Rewriting.ARA.ByInferenceRules.Vector.Type
import Data.Maybe
import Data.Rewriting.Typed.Term
data InfTreeNode f v dt = InfTreeNode
{ preConditions :: [(v, ADatatype dt Int)] -- ^ e.g. (x, r(0)).
, costs :: [ACostCondition Int] -- ^ costs
, postCondition :: Maybe (Term f v, ADatatype dt Int) -- ^ the statement
, functionName :: (f, String, Bool, [ACostCondition Int], Int, Maybe [(f, Int)])
-- ^ functionName, isChildInfTreeNode (constructors), cstsOfRoot,
-- signatureNrOfRoot, isCostFreeDerivationBranch, Maybe idxOfCfSig
, history :: [(Int, String, InfTreeNodeView)] -- ^ history of the context
} deriving (Eq,Show)
data InfTreeNodeView = InfTreeNodeView
[(String, ADatatype String Vector)] -- ^ preConditions
[ACostCondition Vector] -- ^ costs
(Term String String, ADatatype String Vector) -- ^ postCondition
| InfTreeNodeLeafView
FunSig -- ^ cost-full signature
(Maybe FunSig) -- ^ cost-free signature
| InfTreeNodeLeafEmpty
deriving (Eq)
data FunSig = FunSig
String -- ^ function name
[ADatatype String Vector] -- ^ preConditions
[ACostCondition Vector] -- ^ costs
(ADatatype String Vector) -- ^ postCondition
deriving (Eq)
-- instance Show InfTreeNode where
-- show (InfTreeNode pre c post _ history') =
-- showListWithSep show pre ", "++ " |-" ++ show c ++
-- "- " ++ show post ++ "\n\n\t"
-- ++ showListWithSep show history' "\n\t"
instance Show InfTreeNodeView where
show InfTreeNodeLeafEmpty = ""
show (InfTreeNodeView pre c post) =
showListWithSep show pre ", "++ " |-" ++ show c ++ "- " ++ show post
show (InfTreeNodeLeafView sig cfSig) =
printSig sig ++ if isNothing cfSig
then ""
else "\t" ++ printSig (fromJust cfSig)
where printSig (FunSig f pre c post) =
f ++ " :: " ++ showListWithSep show pre " x "++ " -" ++ show c ++
"-> " ++ show post
showListWithSep :: Show a => (a -> String) -> [a] -> String -> String
showListWithSep _ [] _ = []
showListWithSep f [x] _ = f x
showListWithSep f (x:xs) sep = f x ++ sep ++ showListWithSep f xs sep
--
-- Type.hs ends here
| ComputationWithBoundedResources/ara-inference | src/Data/Rewriting/ARA/ByInferenceRules/InfTreeNode/Type.hs | mit | 3,342 | 0 | 13 | 1,049 | 662 | 391 | 271 | 47 | 1 |
{-# LANGUAGE BangPatterns #-}
module Data.Smashy.Number where
import Data.Bits (unsafeShiftL, unsafeShiftR, (.&.), (.|.))
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VM
import Data.Word (Word8)
import Control.Monad.ST.Strict (runST)
newtype Number = Number (VS.Vector Word8) deriving Show
fromInt :: Int -> Number
fromInt n
| n < 0 = error "Passed negative"
| n < 128 = Number . VS.singleton . fromIntegral $ n
| otherwise = Number $ runST $ do
v <- VM.unsafeNew 9
VM.unsafeWrite v 0 255
go v n 1
where
go v _ 9 = VS.unsafeFreeze v
go v x i = do
VM.unsafeWrite v i (fromIntegral $ 255 .&. x)
go v (unsafeShiftR x 8) (i+1)
toInt :: Number -> Int
toInt (Number v)
| VS.length v == 1 = fromIntegral . VS.head $ v
| otherwise = go 0 0 1
where
go !x _ 9 = x
go x o i = go (x .|. (fromIntegral ( VS.unsafeIndex v i) `unsafeShiftL` o)) (o+8) (i+1)
| jahaynes/smashy2 | src/Data/Smashy/Number.hs | mit | 1,017 | 0 | 15 | 289 | 419 | 219 | 200 | 26 | 2 |
module Main where
import LI11718
import SimulateT6
import qualified ${player1} as P1
import qualified ${player2} as P2
import qualified ${player3} as P3
import qualified ${player4} as P4
import System.Environment
import Text.Read
main = do
let guiargs = GUIArgs (${mapa}) (${pista}) (${bot1}) (${bot2}) (${bot3}) (${bot4})
simulaT6 guiargs | hpacheco/HAAP | examples/plab/oracle/SimulateT6Match.hs | mit | 352 | 14 | 12 | 58 | 109 | 80 | 29 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : Operation
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Operation (
Operation (..)
, fromChar
, inverse
, negFor
, toChar
, time
) where
data Operation = Mul | Sum | Div | Diff deriving (Eq)
instance Show Operation where
show o = [toChar o]
inverse :: Operation -> Operation
inverse Mul = Div
inverse Div = Mul
inverse Sum = Diff
inverse Diff = Sum
negFor :: Operation -> Operation -> Bool
negFor Mul Div = True
negFor Sum Diff = True
negFor _ _ = False
time :: Operation -> Int
time Sum = 1
time Diff = 1
time Mul = 2
time Div = 4
fromChar :: Char -> Operation
fromChar '+' = Sum
fromChar '-' = Diff
fromChar '*' = Mul
fromChar '/' = Div
fromChar c = error $ "Invalid Opration character: " ++ [c]
toChar :: Operation -> Char
toChar Mul = '*'
toChar Div = '/'
toChar Sum = '+'
toChar Diff = '-'
| uvNikita/SyntacticAnalyzer | src/Operation.hs | mit | 1,141 | 0 | 7 | 258 | 313 | 174 | 139 | 36 | 1 |
{-# LANGUAGE MagicHash, UnboxedTuples, CPP, RankNTypes #-}
{-# OPTIONS_GHC -fno-full-laziness #-}
module Data.TrieVector.ArrayPrim (
Array
, MArray
, new
, write
, read
, thaw
, index
, freeze
, unsafeFreeze
, unsafeThaw
, sizeof
, sizeofMut
, copy
, copyMut
, clone
, cloneMut
, cas
, sameMut
, run
) where
import GHC.Prim
import GHC.Types
import GHC.Base (realWorld#)
import Prelude hiding (read)
type Array = Array#
type MArray = MutableArray#
new = newArray#
write = writeArray#
read = readArray#
thaw = thawArray#
index = indexArray#
freeze = freezeArray#
unsafeFreeze = unsafeFreezeArray#
unsafeThaw = unsafeThawArray#
sizeof = sizeofArray#
sizeofMut = sizeofMutableArray#
copy = copyArray#
copyMut = copyMutableArray#
clone = cloneArray#
cloneMut = cloneMutableArray#
cas = casArray#
sameMut = sameMutableArray#
run :: (forall s. State# s -> (# State# s, Array a #)) -> Array a
run strep = case strep realWorld# of
(# _, arr #) -> arr
{-# INLINE [0] run #-}
{-# INLINE new #-}
{-# INLINE write #-}
{-# INLINE read #-}
{-# INLINE thaw #-}
{-# INLINE index #-}
{-# INLINE freeze #-}
{-# INLINE unsafeFreeze #-}
{-# INLINE unsafeThaw #-}
{-# INLINE sizeof #-}
{-# INLINE sizeofMut #-}
{-# INLINE copy #-}
{-# INLINE copyMut #-}
{-# INLINE clone #-}
{-# INLINE cloneMut #-}
{-# INLINE cas #-}
{-# INLINE sameMut #-}
| AndrasKovacs/trie-vector | Data/TrieVector/ArrayPrim.hs | mit | 1,413 | 0 | 10 | 312 | 270 | 169 | 101 | 64 | 1 |
module Prepare.Decompose where
import Data.Text (Text)
import qualified Data.Text as Text
-- decompose using the canonical mapping but without affecting the order
decompose :: Text -> Text
decompose = Text.concatMap decomposeChar
-- generated from ucd
decomposeChar :: Char -> Text
decomposeChar '\x00C0' = "\x0041\x0300"
decomposeChar '\x00C1' = "\x0041\x0301"
decomposeChar '\x00C2' = "\x0041\x0302"
decomposeChar '\x00C3' = "\x0041\x0303"
decomposeChar '\x00C4' = "\x0041\x0308"
decomposeChar '\x00C5' = "\x0041\x030A"
decomposeChar '\x00C7' = "\x0043\x0327"
decomposeChar '\x00C8' = "\x0045\x0300"
decomposeChar '\x00C9' = "\x0045\x0301"
decomposeChar '\x00CA' = "\x0045\x0302"
decomposeChar '\x00CB' = "\x0045\x0308"
decomposeChar '\x00CC' = "\x0049\x0300"
decomposeChar '\x00CD' = "\x0049\x0301"
decomposeChar '\x00CE' = "\x0049\x0302"
decomposeChar '\x00CF' = "\x0049\x0308"
decomposeChar '\x00D1' = "\x004E\x0303"
decomposeChar '\x00D2' = "\x004F\x0300"
decomposeChar '\x00D3' = "\x004F\x0301"
decomposeChar '\x00D4' = "\x004F\x0302"
decomposeChar '\x00D5' = "\x004F\x0303"
decomposeChar '\x00D6' = "\x004F\x0308"
decomposeChar '\x00D9' = "\x0055\x0300"
decomposeChar '\x00DA' = "\x0055\x0301"
decomposeChar '\x00DB' = "\x0055\x0302"
decomposeChar '\x00DC' = "\x0055\x0308"
decomposeChar '\x00DD' = "\x0059\x0301"
decomposeChar '\x00E0' = "\x0061\x0300"
decomposeChar '\x00E1' = "\x0061\x0301"
decomposeChar '\x00E2' = "\x0061\x0302"
decomposeChar '\x00E3' = "\x0061\x0303"
decomposeChar '\x00E4' = "\x0061\x0308"
decomposeChar '\x00E5' = "\x0061\x030A"
decomposeChar '\x00E7' = "\x0063\x0327"
decomposeChar '\x00E8' = "\x0065\x0300"
decomposeChar '\x00E9' = "\x0065\x0301"
decomposeChar '\x00EA' = "\x0065\x0302"
decomposeChar '\x00EB' = "\x0065\x0308"
decomposeChar '\x00EC' = "\x0069\x0300"
decomposeChar '\x00ED' = "\x0069\x0301"
decomposeChar '\x00EE' = "\x0069\x0302"
decomposeChar '\x00EF' = "\x0069\x0308"
decomposeChar '\x00F1' = "\x006E\x0303"
decomposeChar '\x00F2' = "\x006F\x0300"
decomposeChar '\x00F3' = "\x006F\x0301"
decomposeChar '\x00F4' = "\x006F\x0302"
decomposeChar '\x00F5' = "\x006F\x0303"
decomposeChar '\x00F6' = "\x006F\x0308"
decomposeChar '\x00F9' = "\x0075\x0300"
decomposeChar '\x00FA' = "\x0075\x0301"
decomposeChar '\x00FB' = "\x0075\x0302"
decomposeChar '\x00FC' = "\x0075\x0308"
decomposeChar '\x00FD' = "\x0079\x0301"
decomposeChar '\x00FF' = "\x0079\x0308"
decomposeChar '\x0100' = "\x0041\x0304"
decomposeChar '\x0101' = "\x0061\x0304"
decomposeChar '\x0102' = "\x0041\x0306"
decomposeChar '\x0103' = "\x0061\x0306"
decomposeChar '\x0104' = "\x0041\x0328"
decomposeChar '\x0105' = "\x0061\x0328"
decomposeChar '\x0106' = "\x0043\x0301"
decomposeChar '\x0107' = "\x0063\x0301"
decomposeChar '\x0108' = "\x0043\x0302"
decomposeChar '\x0109' = "\x0063\x0302"
decomposeChar '\x010A' = "\x0043\x0307"
decomposeChar '\x010B' = "\x0063\x0307"
decomposeChar '\x010C' = "\x0043\x030C"
decomposeChar '\x010D' = "\x0063\x030C"
decomposeChar '\x010E' = "\x0044\x030C"
decomposeChar '\x010F' = "\x0064\x030C"
decomposeChar '\x0112' = "\x0045\x0304"
decomposeChar '\x0113' = "\x0065\x0304"
decomposeChar '\x0114' = "\x0045\x0306"
decomposeChar '\x0115' = "\x0065\x0306"
decomposeChar '\x0116' = "\x0045\x0307"
decomposeChar '\x0117' = "\x0065\x0307"
decomposeChar '\x0118' = "\x0045\x0328"
decomposeChar '\x0119' = "\x0065\x0328"
decomposeChar '\x011A' = "\x0045\x030C"
decomposeChar '\x011B' = "\x0065\x030C"
decomposeChar '\x011C' = "\x0047\x0302"
decomposeChar '\x011D' = "\x0067\x0302"
decomposeChar '\x011E' = "\x0047\x0306"
decomposeChar '\x011F' = "\x0067\x0306"
decomposeChar '\x0120' = "\x0047\x0307"
decomposeChar '\x0121' = "\x0067\x0307"
decomposeChar '\x0122' = "\x0047\x0327"
decomposeChar '\x0123' = "\x0067\x0327"
decomposeChar '\x0124' = "\x0048\x0302"
decomposeChar '\x0125' = "\x0068\x0302"
decomposeChar '\x0128' = "\x0049\x0303"
decomposeChar '\x0129' = "\x0069\x0303"
decomposeChar '\x012A' = "\x0049\x0304"
decomposeChar '\x012B' = "\x0069\x0304"
decomposeChar '\x012C' = "\x0049\x0306"
decomposeChar '\x012D' = "\x0069\x0306"
decomposeChar '\x012E' = "\x0049\x0328"
decomposeChar '\x012F' = "\x0069\x0328"
decomposeChar '\x0130' = "\x0049\x0307"
decomposeChar '\x0134' = "\x004A\x0302"
decomposeChar '\x0135' = "\x006A\x0302"
decomposeChar '\x0136' = "\x004B\x0327"
decomposeChar '\x0137' = "\x006B\x0327"
decomposeChar '\x0139' = "\x004C\x0301"
decomposeChar '\x013A' = "\x006C\x0301"
decomposeChar '\x013B' = "\x004C\x0327"
decomposeChar '\x013C' = "\x006C\x0327"
decomposeChar '\x013D' = "\x004C\x030C"
decomposeChar '\x013E' = "\x006C\x030C"
decomposeChar '\x0143' = "\x004E\x0301"
decomposeChar '\x0144' = "\x006E\x0301"
decomposeChar '\x0145' = "\x004E\x0327"
decomposeChar '\x0146' = "\x006E\x0327"
decomposeChar '\x0147' = "\x004E\x030C"
decomposeChar '\x0148' = "\x006E\x030C"
decomposeChar '\x014C' = "\x004F\x0304"
decomposeChar '\x014D' = "\x006F\x0304"
decomposeChar '\x014E' = "\x004F\x0306"
decomposeChar '\x014F' = "\x006F\x0306"
decomposeChar '\x0150' = "\x004F\x030B"
decomposeChar '\x0151' = "\x006F\x030B"
decomposeChar '\x0154' = "\x0052\x0301"
decomposeChar '\x0155' = "\x0072\x0301"
decomposeChar '\x0156' = "\x0052\x0327"
decomposeChar '\x0157' = "\x0072\x0327"
decomposeChar '\x0158' = "\x0052\x030C"
decomposeChar '\x0159' = "\x0072\x030C"
decomposeChar '\x015A' = "\x0053\x0301"
decomposeChar '\x015B' = "\x0073\x0301"
decomposeChar '\x015C' = "\x0053\x0302"
decomposeChar '\x015D' = "\x0073\x0302"
decomposeChar '\x015E' = "\x0053\x0327"
decomposeChar '\x015F' = "\x0073\x0327"
decomposeChar '\x0160' = "\x0053\x030C"
decomposeChar '\x0161' = "\x0073\x030C"
decomposeChar '\x0162' = "\x0054\x0327"
decomposeChar '\x0163' = "\x0074\x0327"
decomposeChar '\x0164' = "\x0054\x030C"
decomposeChar '\x0165' = "\x0074\x030C"
decomposeChar '\x0168' = "\x0055\x0303"
decomposeChar '\x0169' = "\x0075\x0303"
decomposeChar '\x016A' = "\x0055\x0304"
decomposeChar '\x016B' = "\x0075\x0304"
decomposeChar '\x016C' = "\x0055\x0306"
decomposeChar '\x016D' = "\x0075\x0306"
decomposeChar '\x016E' = "\x0055\x030A"
decomposeChar '\x016F' = "\x0075\x030A"
decomposeChar '\x0170' = "\x0055\x030B"
decomposeChar '\x0171' = "\x0075\x030B"
decomposeChar '\x0172' = "\x0055\x0328"
decomposeChar '\x0173' = "\x0075\x0328"
decomposeChar '\x0174' = "\x0057\x0302"
decomposeChar '\x0175' = "\x0077\x0302"
decomposeChar '\x0176' = "\x0059\x0302"
decomposeChar '\x0177' = "\x0079\x0302"
decomposeChar '\x0178' = "\x0059\x0308"
decomposeChar '\x0179' = "\x005A\x0301"
decomposeChar '\x017A' = "\x007A\x0301"
decomposeChar '\x017B' = "\x005A\x0307"
decomposeChar '\x017C' = "\x007A\x0307"
decomposeChar '\x017D' = "\x005A\x030C"
decomposeChar '\x017E' = "\x007A\x030C"
decomposeChar '\x01A0' = "\x004F\x031B"
decomposeChar '\x01A1' = "\x006F\x031B"
decomposeChar '\x01AF' = "\x0055\x031B"
decomposeChar '\x01B0' = "\x0075\x031B"
decomposeChar '\x01CD' = "\x0041\x030C"
decomposeChar '\x01CE' = "\x0061\x030C"
decomposeChar '\x01CF' = "\x0049\x030C"
decomposeChar '\x01D0' = "\x0069\x030C"
decomposeChar '\x01D1' = "\x004F\x030C"
decomposeChar '\x01D2' = "\x006F\x030C"
decomposeChar '\x01D3' = "\x0055\x030C"
decomposeChar '\x01D4' = "\x0075\x030C"
decomposeChar '\x01D5' = "\x0055\x0308\x0304"
decomposeChar '\x01D6' = "\x0075\x0308\x0304"
decomposeChar '\x01D7' = "\x0055\x0308\x0301"
decomposeChar '\x01D8' = "\x0075\x0308\x0301"
decomposeChar '\x01D9' = "\x0055\x0308\x030C"
decomposeChar '\x01DA' = "\x0075\x0308\x030C"
decomposeChar '\x01DB' = "\x0055\x0308\x0300"
decomposeChar '\x01DC' = "\x0075\x0308\x0300"
decomposeChar '\x01DE' = "\x0041\x0308\x0304"
decomposeChar '\x01DF' = "\x0061\x0308\x0304"
decomposeChar '\x01E0' = "\x0041\x0307\x0304"
decomposeChar '\x01E1' = "\x0061\x0307\x0304"
decomposeChar '\x01E2' = "\x00C6\x0304"
decomposeChar '\x01E3' = "\x00E6\x0304"
decomposeChar '\x01E6' = "\x0047\x030C"
decomposeChar '\x01E7' = "\x0067\x030C"
decomposeChar '\x01E8' = "\x004B\x030C"
decomposeChar '\x01E9' = "\x006B\x030C"
decomposeChar '\x01EA' = "\x004F\x0328"
decomposeChar '\x01EB' = "\x006F\x0328"
decomposeChar '\x01EC' = "\x004F\x0328\x0304"
decomposeChar '\x01ED' = "\x006F\x0328\x0304"
decomposeChar '\x01EE' = "\x01B7\x030C"
decomposeChar '\x01EF' = "\x0292\x030C"
decomposeChar '\x01F0' = "\x006A\x030C"
decomposeChar '\x01F4' = "\x0047\x0301"
decomposeChar '\x01F5' = "\x0067\x0301"
decomposeChar '\x01F8' = "\x004E\x0300"
decomposeChar '\x01F9' = "\x006E\x0300"
decomposeChar '\x01FA' = "\x0041\x030A\x0301"
decomposeChar '\x01FB' = "\x0061\x030A\x0301"
decomposeChar '\x01FC' = "\x00C6\x0301"
decomposeChar '\x01FD' = "\x00E6\x0301"
decomposeChar '\x01FE' = "\x00D8\x0301"
decomposeChar '\x01FF' = "\x00F8\x0301"
decomposeChar '\x0200' = "\x0041\x030F"
decomposeChar '\x0201' = "\x0061\x030F"
decomposeChar '\x0202' = "\x0041\x0311"
decomposeChar '\x0203' = "\x0061\x0311"
decomposeChar '\x0204' = "\x0045\x030F"
decomposeChar '\x0205' = "\x0065\x030F"
decomposeChar '\x0206' = "\x0045\x0311"
decomposeChar '\x0207' = "\x0065\x0311"
decomposeChar '\x0208' = "\x0049\x030F"
decomposeChar '\x0209' = "\x0069\x030F"
decomposeChar '\x020A' = "\x0049\x0311"
decomposeChar '\x020B' = "\x0069\x0311"
decomposeChar '\x020C' = "\x004F\x030F"
decomposeChar '\x020D' = "\x006F\x030F"
decomposeChar '\x020E' = "\x004F\x0311"
decomposeChar '\x020F' = "\x006F\x0311"
decomposeChar '\x0210' = "\x0052\x030F"
decomposeChar '\x0211' = "\x0072\x030F"
decomposeChar '\x0212' = "\x0052\x0311"
decomposeChar '\x0213' = "\x0072\x0311"
decomposeChar '\x0214' = "\x0055\x030F"
decomposeChar '\x0215' = "\x0075\x030F"
decomposeChar '\x0216' = "\x0055\x0311"
decomposeChar '\x0217' = "\x0075\x0311"
decomposeChar '\x0218' = "\x0053\x0326"
decomposeChar '\x0219' = "\x0073\x0326"
decomposeChar '\x021A' = "\x0054\x0326"
decomposeChar '\x021B' = "\x0074\x0326"
decomposeChar '\x021E' = "\x0048\x030C"
decomposeChar '\x021F' = "\x0068\x030C"
decomposeChar '\x0226' = "\x0041\x0307"
decomposeChar '\x0227' = "\x0061\x0307"
decomposeChar '\x0228' = "\x0045\x0327"
decomposeChar '\x0229' = "\x0065\x0327"
decomposeChar '\x022A' = "\x004F\x0308\x0304"
decomposeChar '\x022B' = "\x006F\x0308\x0304"
decomposeChar '\x022C' = "\x004F\x0303\x0304"
decomposeChar '\x022D' = "\x006F\x0303\x0304"
decomposeChar '\x022E' = "\x004F\x0307"
decomposeChar '\x022F' = "\x006F\x0307"
decomposeChar '\x0230' = "\x004F\x0307\x0304"
decomposeChar '\x0231' = "\x006F\x0307\x0304"
decomposeChar '\x0232' = "\x0059\x0304"
decomposeChar '\x0233' = "\x0079\x0304"
decomposeChar '\x0340' = "\x0300"
decomposeChar '\x0341' = "\x0301"
decomposeChar '\x0343' = "\x0313"
decomposeChar '\x0344' = "\x0308\x0301"
decomposeChar '\x0374' = "\x02B9"
decomposeChar '\x037E' = "\x003B"
decomposeChar '\x0385' = "\x00A8\x0301"
decomposeChar '\x0386' = "\x0391\x0301"
decomposeChar '\x0387' = "\x00B7"
decomposeChar '\x0388' = "\x0395\x0301"
decomposeChar '\x0389' = "\x0397\x0301"
decomposeChar '\x038A' = "\x0399\x0301"
decomposeChar '\x038C' = "\x039F\x0301"
decomposeChar '\x038E' = "\x03A5\x0301"
decomposeChar '\x038F' = "\x03A9\x0301"
decomposeChar '\x0390' = "\x03B9\x0308\x0301"
decomposeChar '\x03AA' = "\x0399\x0308"
decomposeChar '\x03AB' = "\x03A5\x0308"
decomposeChar '\x03AC' = "\x03B1\x0301"
decomposeChar '\x03AD' = "\x03B5\x0301"
decomposeChar '\x03AE' = "\x03B7\x0301"
decomposeChar '\x03AF' = "\x03B9\x0301"
decomposeChar '\x03B0' = "\x03C5\x0308\x0301"
decomposeChar '\x03CA' = "\x03B9\x0308"
decomposeChar '\x03CB' = "\x03C5\x0308"
decomposeChar '\x03CC' = "\x03BF\x0301"
decomposeChar '\x03CD' = "\x03C5\x0301"
decomposeChar '\x03CE' = "\x03C9\x0301"
decomposeChar '\x03D3' = "\x03D2\x0301"
decomposeChar '\x03D4' = "\x03D2\x0308"
decomposeChar '\x0400' = "\x0415\x0300"
decomposeChar '\x0401' = "\x0415\x0308"
decomposeChar '\x0403' = "\x0413\x0301"
decomposeChar '\x0407' = "\x0406\x0308"
decomposeChar '\x040C' = "\x041A\x0301"
decomposeChar '\x040D' = "\x0418\x0300"
decomposeChar '\x040E' = "\x0423\x0306"
decomposeChar '\x0419' = "\x0418\x0306"
decomposeChar '\x0439' = "\x0438\x0306"
decomposeChar '\x0450' = "\x0435\x0300"
decomposeChar '\x0451' = "\x0435\x0308"
decomposeChar '\x0453' = "\x0433\x0301"
decomposeChar '\x0457' = "\x0456\x0308"
decomposeChar '\x045C' = "\x043A\x0301"
decomposeChar '\x045D' = "\x0438\x0300"
decomposeChar '\x045E' = "\x0443\x0306"
decomposeChar '\x0476' = "\x0474\x030F"
decomposeChar '\x0477' = "\x0475\x030F"
decomposeChar '\x04C1' = "\x0416\x0306"
decomposeChar '\x04C2' = "\x0436\x0306"
decomposeChar '\x04D0' = "\x0410\x0306"
decomposeChar '\x04D1' = "\x0430\x0306"
decomposeChar '\x04D2' = "\x0410\x0308"
decomposeChar '\x04D3' = "\x0430\x0308"
decomposeChar '\x04D6' = "\x0415\x0306"
decomposeChar '\x04D7' = "\x0435\x0306"
decomposeChar '\x04DA' = "\x04D8\x0308"
decomposeChar '\x04DB' = "\x04D9\x0308"
decomposeChar '\x04DC' = "\x0416\x0308"
decomposeChar '\x04DD' = "\x0436\x0308"
decomposeChar '\x04DE' = "\x0417\x0308"
decomposeChar '\x04DF' = "\x0437\x0308"
decomposeChar '\x04E2' = "\x0418\x0304"
decomposeChar '\x04E3' = "\x0438\x0304"
decomposeChar '\x04E4' = "\x0418\x0308"
decomposeChar '\x04E5' = "\x0438\x0308"
decomposeChar '\x04E6' = "\x041E\x0308"
decomposeChar '\x04E7' = "\x043E\x0308"
decomposeChar '\x04EA' = "\x04E8\x0308"
decomposeChar '\x04EB' = "\x04E9\x0308"
decomposeChar '\x04EC' = "\x042D\x0308"
decomposeChar '\x04ED' = "\x044D\x0308"
decomposeChar '\x04EE' = "\x0423\x0304"
decomposeChar '\x04EF' = "\x0443\x0304"
decomposeChar '\x04F0' = "\x0423\x0308"
decomposeChar '\x04F1' = "\x0443\x0308"
decomposeChar '\x04F2' = "\x0423\x030B"
decomposeChar '\x04F3' = "\x0443\x030B"
decomposeChar '\x04F4' = "\x0427\x0308"
decomposeChar '\x04F5' = "\x0447\x0308"
decomposeChar '\x04F8' = "\x042B\x0308"
decomposeChar '\x04F9' = "\x044B\x0308"
decomposeChar '\x0622' = "\x0627\x0653"
decomposeChar '\x0623' = "\x0627\x0654"
decomposeChar '\x0624' = "\x0648\x0654"
decomposeChar '\x0625' = "\x0627\x0655"
decomposeChar '\x0626' = "\x064A\x0654"
decomposeChar '\x06C0' = "\x06D5\x0654"
decomposeChar '\x06C2' = "\x06C1\x0654"
decomposeChar '\x06D3' = "\x06D2\x0654"
decomposeChar '\x0929' = "\x0928\x093C"
decomposeChar '\x0931' = "\x0930\x093C"
decomposeChar '\x0934' = "\x0933\x093C"
decomposeChar '\x0958' = "\x0915\x093C"
decomposeChar '\x0959' = "\x0916\x093C"
decomposeChar '\x095A' = "\x0917\x093C"
decomposeChar '\x095B' = "\x091C\x093C"
decomposeChar '\x095C' = "\x0921\x093C"
decomposeChar '\x095D' = "\x0922\x093C"
decomposeChar '\x095E' = "\x092B\x093C"
decomposeChar '\x095F' = "\x092F\x093C"
decomposeChar '\x09CB' = "\x09C7\x09BE"
decomposeChar '\x09CC' = "\x09C7\x09D7"
decomposeChar '\x09DC' = "\x09A1\x09BC"
decomposeChar '\x09DD' = "\x09A2\x09BC"
decomposeChar '\x09DF' = "\x09AF\x09BC"
decomposeChar '\x0A33' = "\x0A32\x0A3C"
decomposeChar '\x0A36' = "\x0A38\x0A3C"
decomposeChar '\x0A59' = "\x0A16\x0A3C"
decomposeChar '\x0A5A' = "\x0A17\x0A3C"
decomposeChar '\x0A5B' = "\x0A1C\x0A3C"
decomposeChar '\x0A5E' = "\x0A2B\x0A3C"
decomposeChar '\x0B48' = "\x0B47\x0B56"
decomposeChar '\x0B4B' = "\x0B47\x0B3E"
decomposeChar '\x0B4C' = "\x0B47\x0B57"
decomposeChar '\x0B5C' = "\x0B21\x0B3C"
decomposeChar '\x0B5D' = "\x0B22\x0B3C"
decomposeChar '\x0B94' = "\x0B92\x0BD7"
decomposeChar '\x0BCA' = "\x0BC6\x0BBE"
decomposeChar '\x0BCB' = "\x0BC7\x0BBE"
decomposeChar '\x0BCC' = "\x0BC6\x0BD7"
decomposeChar '\x0C48' = "\x0C46\x0C56"
decomposeChar '\x0CC0' = "\x0CBF\x0CD5"
decomposeChar '\x0CC7' = "\x0CC6\x0CD5"
decomposeChar '\x0CC8' = "\x0CC6\x0CD6"
decomposeChar '\x0CCA' = "\x0CC6\x0CC2"
decomposeChar '\x0CCB' = "\x0CC6\x0CC2\x0CD5"
decomposeChar '\x0D4A' = "\x0D46\x0D3E"
decomposeChar '\x0D4B' = "\x0D47\x0D3E"
decomposeChar '\x0D4C' = "\x0D46\x0D57"
decomposeChar '\x0DDA' = "\x0DD9\x0DCA"
decomposeChar '\x0DDC' = "\x0DD9\x0DCF"
decomposeChar '\x0DDD' = "\x0DD9\x0DCF\x0DCA"
decomposeChar '\x0DDE' = "\x0DD9\x0DDF"
decomposeChar '\x0F43' = "\x0F42\x0FB7"
decomposeChar '\x0F4D' = "\x0F4C\x0FB7"
decomposeChar '\x0F52' = "\x0F51\x0FB7"
decomposeChar '\x0F57' = "\x0F56\x0FB7"
decomposeChar '\x0F5C' = "\x0F5B\x0FB7"
decomposeChar '\x0F69' = "\x0F40\x0FB5"
decomposeChar '\x0F73' = "\x0F71\x0F72"
decomposeChar '\x0F75' = "\x0F71\x0F74"
decomposeChar '\x0F76' = "\x0FB2\x0F80"
decomposeChar '\x0F78' = "\x0FB3\x0F80"
decomposeChar '\x0F81' = "\x0F71\x0F80"
decomposeChar '\x0F93' = "\x0F92\x0FB7"
decomposeChar '\x0F9D' = "\x0F9C\x0FB7"
decomposeChar '\x0FA2' = "\x0FA1\x0FB7"
decomposeChar '\x0FA7' = "\x0FA6\x0FB7"
decomposeChar '\x0FAC' = "\x0FAB\x0FB7"
decomposeChar '\x0FB9' = "\x0F90\x0FB5"
decomposeChar '\x1026' = "\x1025\x102E"
decomposeChar '\x1B06' = "\x1B05\x1B35"
decomposeChar '\x1B08' = "\x1B07\x1B35"
decomposeChar '\x1B0A' = "\x1B09\x1B35"
decomposeChar '\x1B0C' = "\x1B0B\x1B35"
decomposeChar '\x1B0E' = "\x1B0D\x1B35"
decomposeChar '\x1B12' = "\x1B11\x1B35"
decomposeChar '\x1B3B' = "\x1B3A\x1B35"
decomposeChar '\x1B3D' = "\x1B3C\x1B35"
decomposeChar '\x1B40' = "\x1B3E\x1B35"
decomposeChar '\x1B41' = "\x1B3F\x1B35"
decomposeChar '\x1B43' = "\x1B42\x1B35"
decomposeChar '\x1E00' = "\x0041\x0325"
decomposeChar '\x1E01' = "\x0061\x0325"
decomposeChar '\x1E02' = "\x0042\x0307"
decomposeChar '\x1E03' = "\x0062\x0307"
decomposeChar '\x1E04' = "\x0042\x0323"
decomposeChar '\x1E05' = "\x0062\x0323"
decomposeChar '\x1E06' = "\x0042\x0331"
decomposeChar '\x1E07' = "\x0062\x0331"
decomposeChar '\x1E08' = "\x0043\x0327\x0301"
decomposeChar '\x1E09' = "\x0063\x0327\x0301"
decomposeChar '\x1E0A' = "\x0044\x0307"
decomposeChar '\x1E0B' = "\x0064\x0307"
decomposeChar '\x1E0C' = "\x0044\x0323"
decomposeChar '\x1E0D' = "\x0064\x0323"
decomposeChar '\x1E0E' = "\x0044\x0331"
decomposeChar '\x1E0F' = "\x0064\x0331"
decomposeChar '\x1E10' = "\x0044\x0327"
decomposeChar '\x1E11' = "\x0064\x0327"
decomposeChar '\x1E12' = "\x0044\x032D"
decomposeChar '\x1E13' = "\x0064\x032D"
decomposeChar '\x1E14' = "\x0045\x0304\x0300"
decomposeChar '\x1E15' = "\x0065\x0304\x0300"
decomposeChar '\x1E16' = "\x0045\x0304\x0301"
decomposeChar '\x1E17' = "\x0065\x0304\x0301"
decomposeChar '\x1E18' = "\x0045\x032D"
decomposeChar '\x1E19' = "\x0065\x032D"
decomposeChar '\x1E1A' = "\x0045\x0330"
decomposeChar '\x1E1B' = "\x0065\x0330"
decomposeChar '\x1E1C' = "\x0045\x0327\x0306"
decomposeChar '\x1E1D' = "\x0065\x0327\x0306"
decomposeChar '\x1E1E' = "\x0046\x0307"
decomposeChar '\x1E1F' = "\x0066\x0307"
decomposeChar '\x1E20' = "\x0047\x0304"
decomposeChar '\x1E21' = "\x0067\x0304"
decomposeChar '\x1E22' = "\x0048\x0307"
decomposeChar '\x1E23' = "\x0068\x0307"
decomposeChar '\x1E24' = "\x0048\x0323"
decomposeChar '\x1E25' = "\x0068\x0323"
decomposeChar '\x1E26' = "\x0048\x0308"
decomposeChar '\x1E27' = "\x0068\x0308"
decomposeChar '\x1E28' = "\x0048\x0327"
decomposeChar '\x1E29' = "\x0068\x0327"
decomposeChar '\x1E2A' = "\x0048\x032E"
decomposeChar '\x1E2B' = "\x0068\x032E"
decomposeChar '\x1E2C' = "\x0049\x0330"
decomposeChar '\x1E2D' = "\x0069\x0330"
decomposeChar '\x1E2E' = "\x0049\x0308\x0301"
decomposeChar '\x1E2F' = "\x0069\x0308\x0301"
decomposeChar '\x1E30' = "\x004B\x0301"
decomposeChar '\x1E31' = "\x006B\x0301"
decomposeChar '\x1E32' = "\x004B\x0323"
decomposeChar '\x1E33' = "\x006B\x0323"
decomposeChar '\x1E34' = "\x004B\x0331"
decomposeChar '\x1E35' = "\x006B\x0331"
decomposeChar '\x1E36' = "\x004C\x0323"
decomposeChar '\x1E37' = "\x006C\x0323"
decomposeChar '\x1E38' = "\x004C\x0323\x0304"
decomposeChar '\x1E39' = "\x006C\x0323\x0304"
decomposeChar '\x1E3A' = "\x004C\x0331"
decomposeChar '\x1E3B' = "\x006C\x0331"
decomposeChar '\x1E3C' = "\x004C\x032D"
decomposeChar '\x1E3D' = "\x006C\x032D"
decomposeChar '\x1E3E' = "\x004D\x0301"
decomposeChar '\x1E3F' = "\x006D\x0301"
decomposeChar '\x1E40' = "\x004D\x0307"
decomposeChar '\x1E41' = "\x006D\x0307"
decomposeChar '\x1E42' = "\x004D\x0323"
decomposeChar '\x1E43' = "\x006D\x0323"
decomposeChar '\x1E44' = "\x004E\x0307"
decomposeChar '\x1E45' = "\x006E\x0307"
decomposeChar '\x1E46' = "\x004E\x0323"
decomposeChar '\x1E47' = "\x006E\x0323"
decomposeChar '\x1E48' = "\x004E\x0331"
decomposeChar '\x1E49' = "\x006E\x0331"
decomposeChar '\x1E4A' = "\x004E\x032D"
decomposeChar '\x1E4B' = "\x006E\x032D"
decomposeChar '\x1E4C' = "\x004F\x0303\x0301"
decomposeChar '\x1E4D' = "\x006F\x0303\x0301"
decomposeChar '\x1E4E' = "\x004F\x0303\x0308"
decomposeChar '\x1E4F' = "\x006F\x0303\x0308"
decomposeChar '\x1E50' = "\x004F\x0304\x0300"
decomposeChar '\x1E51' = "\x006F\x0304\x0300"
decomposeChar '\x1E52' = "\x004F\x0304\x0301"
decomposeChar '\x1E53' = "\x006F\x0304\x0301"
decomposeChar '\x1E54' = "\x0050\x0301"
decomposeChar '\x1E55' = "\x0070\x0301"
decomposeChar '\x1E56' = "\x0050\x0307"
decomposeChar '\x1E57' = "\x0070\x0307"
decomposeChar '\x1E58' = "\x0052\x0307"
decomposeChar '\x1E59' = "\x0072\x0307"
decomposeChar '\x1E5A' = "\x0052\x0323"
decomposeChar '\x1E5B' = "\x0072\x0323"
decomposeChar '\x1E5C' = "\x0052\x0323\x0304"
decomposeChar '\x1E5D' = "\x0072\x0323\x0304"
decomposeChar '\x1E5E' = "\x0052\x0331"
decomposeChar '\x1E5F' = "\x0072\x0331"
decomposeChar '\x1E60' = "\x0053\x0307"
decomposeChar '\x1E61' = "\x0073\x0307"
decomposeChar '\x1E62' = "\x0053\x0323"
decomposeChar '\x1E63' = "\x0073\x0323"
decomposeChar '\x1E64' = "\x0053\x0301\x0307"
decomposeChar '\x1E65' = "\x0073\x0301\x0307"
decomposeChar '\x1E66' = "\x0053\x030C\x0307"
decomposeChar '\x1E67' = "\x0073\x030C\x0307"
decomposeChar '\x1E68' = "\x0053\x0323\x0307"
decomposeChar '\x1E69' = "\x0073\x0323\x0307"
decomposeChar '\x1E6A' = "\x0054\x0307"
decomposeChar '\x1E6B' = "\x0074\x0307"
decomposeChar '\x1E6C' = "\x0054\x0323"
decomposeChar '\x1E6D' = "\x0074\x0323"
decomposeChar '\x1E6E' = "\x0054\x0331"
decomposeChar '\x1E6F' = "\x0074\x0331"
decomposeChar '\x1E70' = "\x0054\x032D"
decomposeChar '\x1E71' = "\x0074\x032D"
decomposeChar '\x1E72' = "\x0055\x0324"
decomposeChar '\x1E73' = "\x0075\x0324"
decomposeChar '\x1E74' = "\x0055\x0330"
decomposeChar '\x1E75' = "\x0075\x0330"
decomposeChar '\x1E76' = "\x0055\x032D"
decomposeChar '\x1E77' = "\x0075\x032D"
decomposeChar '\x1E78' = "\x0055\x0303\x0301"
decomposeChar '\x1E79' = "\x0075\x0303\x0301"
decomposeChar '\x1E7A' = "\x0055\x0304\x0308"
decomposeChar '\x1E7B' = "\x0075\x0304\x0308"
decomposeChar '\x1E7C' = "\x0056\x0303"
decomposeChar '\x1E7D' = "\x0076\x0303"
decomposeChar '\x1E7E' = "\x0056\x0323"
decomposeChar '\x1E7F' = "\x0076\x0323"
decomposeChar '\x1E80' = "\x0057\x0300"
decomposeChar '\x1E81' = "\x0077\x0300"
decomposeChar '\x1E82' = "\x0057\x0301"
decomposeChar '\x1E83' = "\x0077\x0301"
decomposeChar '\x1E84' = "\x0057\x0308"
decomposeChar '\x1E85' = "\x0077\x0308"
decomposeChar '\x1E86' = "\x0057\x0307"
decomposeChar '\x1E87' = "\x0077\x0307"
decomposeChar '\x1E88' = "\x0057\x0323"
decomposeChar '\x1E89' = "\x0077\x0323"
decomposeChar '\x1E8A' = "\x0058\x0307"
decomposeChar '\x1E8B' = "\x0078\x0307"
decomposeChar '\x1E8C' = "\x0058\x0308"
decomposeChar '\x1E8D' = "\x0078\x0308"
decomposeChar '\x1E8E' = "\x0059\x0307"
decomposeChar '\x1E8F' = "\x0079\x0307"
decomposeChar '\x1E90' = "\x005A\x0302"
decomposeChar '\x1E91' = "\x007A\x0302"
decomposeChar '\x1E92' = "\x005A\x0323"
decomposeChar '\x1E93' = "\x007A\x0323"
decomposeChar '\x1E94' = "\x005A\x0331"
decomposeChar '\x1E95' = "\x007A\x0331"
decomposeChar '\x1E96' = "\x0068\x0331"
decomposeChar '\x1E97' = "\x0074\x0308"
decomposeChar '\x1E98' = "\x0077\x030A"
decomposeChar '\x1E99' = "\x0079\x030A"
decomposeChar '\x1E9B' = "\x017F\x0307"
decomposeChar '\x1EA0' = "\x0041\x0323"
decomposeChar '\x1EA1' = "\x0061\x0323"
decomposeChar '\x1EA2' = "\x0041\x0309"
decomposeChar '\x1EA3' = "\x0061\x0309"
decomposeChar '\x1EA4' = "\x0041\x0302\x0301"
decomposeChar '\x1EA5' = "\x0061\x0302\x0301"
decomposeChar '\x1EA6' = "\x0041\x0302\x0300"
decomposeChar '\x1EA7' = "\x0061\x0302\x0300"
decomposeChar '\x1EA8' = "\x0041\x0302\x0309"
decomposeChar '\x1EA9' = "\x0061\x0302\x0309"
decomposeChar '\x1EAA' = "\x0041\x0302\x0303"
decomposeChar '\x1EAB' = "\x0061\x0302\x0303"
decomposeChar '\x1EAC' = "\x0041\x0323\x0302"
decomposeChar '\x1EAD' = "\x0061\x0323\x0302"
decomposeChar '\x1EAE' = "\x0041\x0306\x0301"
decomposeChar '\x1EAF' = "\x0061\x0306\x0301"
decomposeChar '\x1EB0' = "\x0041\x0306\x0300"
decomposeChar '\x1EB1' = "\x0061\x0306\x0300"
decomposeChar '\x1EB2' = "\x0041\x0306\x0309"
decomposeChar '\x1EB3' = "\x0061\x0306\x0309"
decomposeChar '\x1EB4' = "\x0041\x0306\x0303"
decomposeChar '\x1EB5' = "\x0061\x0306\x0303"
decomposeChar '\x1EB6' = "\x0041\x0323\x0306"
decomposeChar '\x1EB7' = "\x0061\x0323\x0306"
decomposeChar '\x1EB8' = "\x0045\x0323"
decomposeChar '\x1EB9' = "\x0065\x0323"
decomposeChar '\x1EBA' = "\x0045\x0309"
decomposeChar '\x1EBB' = "\x0065\x0309"
decomposeChar '\x1EBC' = "\x0045\x0303"
decomposeChar '\x1EBD' = "\x0065\x0303"
decomposeChar '\x1EBE' = "\x0045\x0302\x0301"
decomposeChar '\x1EBF' = "\x0065\x0302\x0301"
decomposeChar '\x1EC0' = "\x0045\x0302\x0300"
decomposeChar '\x1EC1' = "\x0065\x0302\x0300"
decomposeChar '\x1EC2' = "\x0045\x0302\x0309"
decomposeChar '\x1EC3' = "\x0065\x0302\x0309"
decomposeChar '\x1EC4' = "\x0045\x0302\x0303"
decomposeChar '\x1EC5' = "\x0065\x0302\x0303"
decomposeChar '\x1EC6' = "\x0045\x0323\x0302"
decomposeChar '\x1EC7' = "\x0065\x0323\x0302"
decomposeChar '\x1EC8' = "\x0049\x0309"
decomposeChar '\x1EC9' = "\x0069\x0309"
decomposeChar '\x1ECA' = "\x0049\x0323"
decomposeChar '\x1ECB' = "\x0069\x0323"
decomposeChar '\x1ECC' = "\x004F\x0323"
decomposeChar '\x1ECD' = "\x006F\x0323"
decomposeChar '\x1ECE' = "\x004F\x0309"
decomposeChar '\x1ECF' = "\x006F\x0309"
decomposeChar '\x1ED0' = "\x004F\x0302\x0301"
decomposeChar '\x1ED1' = "\x006F\x0302\x0301"
decomposeChar '\x1ED2' = "\x004F\x0302\x0300"
decomposeChar '\x1ED3' = "\x006F\x0302\x0300"
decomposeChar '\x1ED4' = "\x004F\x0302\x0309"
decomposeChar '\x1ED5' = "\x006F\x0302\x0309"
decomposeChar '\x1ED6' = "\x004F\x0302\x0303"
decomposeChar '\x1ED7' = "\x006F\x0302\x0303"
decomposeChar '\x1ED8' = "\x004F\x0323\x0302"
decomposeChar '\x1ED9' = "\x006F\x0323\x0302"
decomposeChar '\x1EDA' = "\x004F\x031B\x0301"
decomposeChar '\x1EDB' = "\x006F\x031B\x0301"
decomposeChar '\x1EDC' = "\x004F\x031B\x0300"
decomposeChar '\x1EDD' = "\x006F\x031B\x0300"
decomposeChar '\x1EDE' = "\x004F\x031B\x0309"
decomposeChar '\x1EDF' = "\x006F\x031B\x0309"
decomposeChar '\x1EE0' = "\x004F\x031B\x0303"
decomposeChar '\x1EE1' = "\x006F\x031B\x0303"
decomposeChar '\x1EE2' = "\x004F\x031B\x0323"
decomposeChar '\x1EE3' = "\x006F\x031B\x0323"
decomposeChar '\x1EE4' = "\x0055\x0323"
decomposeChar '\x1EE5' = "\x0075\x0323"
decomposeChar '\x1EE6' = "\x0055\x0309"
decomposeChar '\x1EE7' = "\x0075\x0309"
decomposeChar '\x1EE8' = "\x0055\x031B\x0301"
decomposeChar '\x1EE9' = "\x0075\x031B\x0301"
decomposeChar '\x1EEA' = "\x0055\x031B\x0300"
decomposeChar '\x1EEB' = "\x0075\x031B\x0300"
decomposeChar '\x1EEC' = "\x0055\x031B\x0309"
decomposeChar '\x1EED' = "\x0075\x031B\x0309"
decomposeChar '\x1EEE' = "\x0055\x031B\x0303"
decomposeChar '\x1EEF' = "\x0075\x031B\x0303"
decomposeChar '\x1EF0' = "\x0055\x031B\x0323"
decomposeChar '\x1EF1' = "\x0075\x031B\x0323"
decomposeChar '\x1EF2' = "\x0059\x0300"
decomposeChar '\x1EF3' = "\x0079\x0300"
decomposeChar '\x1EF4' = "\x0059\x0323"
decomposeChar '\x1EF5' = "\x0079\x0323"
decomposeChar '\x1EF6' = "\x0059\x0309"
decomposeChar '\x1EF7' = "\x0079\x0309"
decomposeChar '\x1EF8' = "\x0059\x0303"
decomposeChar '\x1EF9' = "\x0079\x0303"
decomposeChar '\x1F00' = "\x03B1\x0313"
decomposeChar '\x1F01' = "\x03B1\x0314"
decomposeChar '\x1F02' = "\x03B1\x0313\x0300"
decomposeChar '\x1F03' = "\x03B1\x0314\x0300"
decomposeChar '\x1F04' = "\x03B1\x0313\x0301"
decomposeChar '\x1F05' = "\x03B1\x0314\x0301"
decomposeChar '\x1F06' = "\x03B1\x0313\x0342"
decomposeChar '\x1F07' = "\x03B1\x0314\x0342"
decomposeChar '\x1F08' = "\x0391\x0313"
decomposeChar '\x1F09' = "\x0391\x0314"
decomposeChar '\x1F0A' = "\x0391\x0313\x0300"
decomposeChar '\x1F0B' = "\x0391\x0314\x0300"
decomposeChar '\x1F0C' = "\x0391\x0313\x0301"
decomposeChar '\x1F0D' = "\x0391\x0314\x0301"
decomposeChar '\x1F0E' = "\x0391\x0313\x0342"
decomposeChar '\x1F0F' = "\x0391\x0314\x0342"
decomposeChar '\x1F10' = "\x03B5\x0313"
decomposeChar '\x1F11' = "\x03B5\x0314"
decomposeChar '\x1F12' = "\x03B5\x0313\x0300"
decomposeChar '\x1F13' = "\x03B5\x0314\x0300"
decomposeChar '\x1F14' = "\x03B5\x0313\x0301"
decomposeChar '\x1F15' = "\x03B5\x0314\x0301"
decomposeChar '\x1F18' = "\x0395\x0313"
decomposeChar '\x1F19' = "\x0395\x0314"
decomposeChar '\x1F1A' = "\x0395\x0313\x0300"
decomposeChar '\x1F1B' = "\x0395\x0314\x0300"
decomposeChar '\x1F1C' = "\x0395\x0313\x0301"
decomposeChar '\x1F1D' = "\x0395\x0314\x0301"
decomposeChar '\x1F20' = "\x03B7\x0313"
decomposeChar '\x1F21' = "\x03B7\x0314"
decomposeChar '\x1F22' = "\x03B7\x0313\x0300"
decomposeChar '\x1F23' = "\x03B7\x0314\x0300"
decomposeChar '\x1F24' = "\x03B7\x0313\x0301"
decomposeChar '\x1F25' = "\x03B7\x0314\x0301"
decomposeChar '\x1F26' = "\x03B7\x0313\x0342"
decomposeChar '\x1F27' = "\x03B7\x0314\x0342"
decomposeChar '\x1F28' = "\x0397\x0313"
decomposeChar '\x1F29' = "\x0397\x0314"
decomposeChar '\x1F2A' = "\x0397\x0313\x0300"
decomposeChar '\x1F2B' = "\x0397\x0314\x0300"
decomposeChar '\x1F2C' = "\x0397\x0313\x0301"
decomposeChar '\x1F2D' = "\x0397\x0314\x0301"
decomposeChar '\x1F2E' = "\x0397\x0313\x0342"
decomposeChar '\x1F2F' = "\x0397\x0314\x0342"
decomposeChar '\x1F30' = "\x03B9\x0313"
decomposeChar '\x1F31' = "\x03B9\x0314"
decomposeChar '\x1F32' = "\x03B9\x0313\x0300"
decomposeChar '\x1F33' = "\x03B9\x0314\x0300"
decomposeChar '\x1F34' = "\x03B9\x0313\x0301"
decomposeChar '\x1F35' = "\x03B9\x0314\x0301"
decomposeChar '\x1F36' = "\x03B9\x0313\x0342"
decomposeChar '\x1F37' = "\x03B9\x0314\x0342"
decomposeChar '\x1F38' = "\x0399\x0313"
decomposeChar '\x1F39' = "\x0399\x0314"
decomposeChar '\x1F3A' = "\x0399\x0313\x0300"
decomposeChar '\x1F3B' = "\x0399\x0314\x0300"
decomposeChar '\x1F3C' = "\x0399\x0313\x0301"
decomposeChar '\x1F3D' = "\x0399\x0314\x0301"
decomposeChar '\x1F3E' = "\x0399\x0313\x0342"
decomposeChar '\x1F3F' = "\x0399\x0314\x0342"
decomposeChar '\x1F40' = "\x03BF\x0313"
decomposeChar '\x1F41' = "\x03BF\x0314"
decomposeChar '\x1F42' = "\x03BF\x0313\x0300"
decomposeChar '\x1F43' = "\x03BF\x0314\x0300"
decomposeChar '\x1F44' = "\x03BF\x0313\x0301"
decomposeChar '\x1F45' = "\x03BF\x0314\x0301"
decomposeChar '\x1F48' = "\x039F\x0313"
decomposeChar '\x1F49' = "\x039F\x0314"
decomposeChar '\x1F4A' = "\x039F\x0313\x0300"
decomposeChar '\x1F4B' = "\x039F\x0314\x0300"
decomposeChar '\x1F4C' = "\x039F\x0313\x0301"
decomposeChar '\x1F4D' = "\x039F\x0314\x0301"
decomposeChar '\x1F50' = "\x03C5\x0313"
decomposeChar '\x1F51' = "\x03C5\x0314"
decomposeChar '\x1F52' = "\x03C5\x0313\x0300"
decomposeChar '\x1F53' = "\x03C5\x0314\x0300"
decomposeChar '\x1F54' = "\x03C5\x0313\x0301"
decomposeChar '\x1F55' = "\x03C5\x0314\x0301"
decomposeChar '\x1F56' = "\x03C5\x0313\x0342"
decomposeChar '\x1F57' = "\x03C5\x0314\x0342"
decomposeChar '\x1F59' = "\x03A5\x0314"
decomposeChar '\x1F5B' = "\x03A5\x0314\x0300"
decomposeChar '\x1F5D' = "\x03A5\x0314\x0301"
decomposeChar '\x1F5F' = "\x03A5\x0314\x0342"
decomposeChar '\x1F60' = "\x03C9\x0313"
decomposeChar '\x1F61' = "\x03C9\x0314"
decomposeChar '\x1F62' = "\x03C9\x0313\x0300"
decomposeChar '\x1F63' = "\x03C9\x0314\x0300"
decomposeChar '\x1F64' = "\x03C9\x0313\x0301"
decomposeChar '\x1F65' = "\x03C9\x0314\x0301"
decomposeChar '\x1F66' = "\x03C9\x0313\x0342"
decomposeChar '\x1F67' = "\x03C9\x0314\x0342"
decomposeChar '\x1F68' = "\x03A9\x0313"
decomposeChar '\x1F69' = "\x03A9\x0314"
decomposeChar '\x1F6A' = "\x03A9\x0313\x0300"
decomposeChar '\x1F6B' = "\x03A9\x0314\x0300"
decomposeChar '\x1F6C' = "\x03A9\x0313\x0301"
decomposeChar '\x1F6D' = "\x03A9\x0314\x0301"
decomposeChar '\x1F6E' = "\x03A9\x0313\x0342"
decomposeChar '\x1F6F' = "\x03A9\x0314\x0342"
decomposeChar '\x1F70' = "\x03B1\x0300"
decomposeChar '\x1F71' = "\x03B1\x0301"
decomposeChar '\x1F72' = "\x03B5\x0300"
decomposeChar '\x1F73' = "\x03B5\x0301"
decomposeChar '\x1F74' = "\x03B7\x0300"
decomposeChar '\x1F75' = "\x03B7\x0301"
decomposeChar '\x1F76' = "\x03B9\x0300"
decomposeChar '\x1F77' = "\x03B9\x0301"
decomposeChar '\x1F78' = "\x03BF\x0300"
decomposeChar '\x1F79' = "\x03BF\x0301"
decomposeChar '\x1F7A' = "\x03C5\x0300"
decomposeChar '\x1F7B' = "\x03C5\x0301"
decomposeChar '\x1F7C' = "\x03C9\x0300"
decomposeChar '\x1F7D' = "\x03C9\x0301"
decomposeChar '\x1F80' = "\x03B1\x0313\x0345"
decomposeChar '\x1F81' = "\x03B1\x0314\x0345"
decomposeChar '\x1F82' = "\x03B1\x0313\x0300\x0345"
decomposeChar '\x1F83' = "\x03B1\x0314\x0300\x0345"
decomposeChar '\x1F84' = "\x03B1\x0313\x0301\x0345"
decomposeChar '\x1F85' = "\x03B1\x0314\x0301\x0345"
decomposeChar '\x1F86' = "\x03B1\x0313\x0342\x0345"
decomposeChar '\x1F87' = "\x03B1\x0314\x0342\x0345"
decomposeChar '\x1F88' = "\x0391\x0313\x0345"
decomposeChar '\x1F89' = "\x0391\x0314\x0345"
decomposeChar '\x1F8A' = "\x0391\x0313\x0300\x0345"
decomposeChar '\x1F8B' = "\x0391\x0314\x0300\x0345"
decomposeChar '\x1F8C' = "\x0391\x0313\x0301\x0345"
decomposeChar '\x1F8D' = "\x0391\x0314\x0301\x0345"
decomposeChar '\x1F8E' = "\x0391\x0313\x0342\x0345"
decomposeChar '\x1F8F' = "\x0391\x0314\x0342\x0345"
decomposeChar '\x1F90' = "\x03B7\x0313\x0345"
decomposeChar '\x1F91' = "\x03B7\x0314\x0345"
decomposeChar '\x1F92' = "\x03B7\x0313\x0300\x0345"
decomposeChar '\x1F93' = "\x03B7\x0314\x0300\x0345"
decomposeChar '\x1F94' = "\x03B7\x0313\x0301\x0345"
decomposeChar '\x1F95' = "\x03B7\x0314\x0301\x0345"
decomposeChar '\x1F96' = "\x03B7\x0313\x0342\x0345"
decomposeChar '\x1F97' = "\x03B7\x0314\x0342\x0345"
decomposeChar '\x1F98' = "\x0397\x0313\x0345"
decomposeChar '\x1F99' = "\x0397\x0314\x0345"
decomposeChar '\x1F9A' = "\x0397\x0313\x0300\x0345"
decomposeChar '\x1F9B' = "\x0397\x0314\x0300\x0345"
decomposeChar '\x1F9C' = "\x0397\x0313\x0301\x0345"
decomposeChar '\x1F9D' = "\x0397\x0314\x0301\x0345"
decomposeChar '\x1F9E' = "\x0397\x0313\x0342\x0345"
decomposeChar '\x1F9F' = "\x0397\x0314\x0342\x0345"
decomposeChar '\x1FA0' = "\x03C9\x0313\x0345"
decomposeChar '\x1FA1' = "\x03C9\x0314\x0345"
decomposeChar '\x1FA2' = "\x03C9\x0313\x0300\x0345"
decomposeChar '\x1FA3' = "\x03C9\x0314\x0300\x0345"
decomposeChar '\x1FA4' = "\x03C9\x0313\x0301\x0345"
decomposeChar '\x1FA5' = "\x03C9\x0314\x0301\x0345"
decomposeChar '\x1FA6' = "\x03C9\x0313\x0342\x0345"
decomposeChar '\x1FA7' = "\x03C9\x0314\x0342\x0345"
decomposeChar '\x1FA8' = "\x03A9\x0313\x0345"
decomposeChar '\x1FA9' = "\x03A9\x0314\x0345"
decomposeChar '\x1FAA' = "\x03A9\x0313\x0300\x0345"
decomposeChar '\x1FAB' = "\x03A9\x0314\x0300\x0345"
decomposeChar '\x1FAC' = "\x03A9\x0313\x0301\x0345"
decomposeChar '\x1FAD' = "\x03A9\x0314\x0301\x0345"
decomposeChar '\x1FAE' = "\x03A9\x0313\x0342\x0345"
decomposeChar '\x1FAF' = "\x03A9\x0314\x0342\x0345"
decomposeChar '\x1FB0' = "\x03B1\x0306"
decomposeChar '\x1FB1' = "\x03B1\x0304"
decomposeChar '\x1FB2' = "\x03B1\x0300\x0345"
decomposeChar '\x1FB3' = "\x03B1\x0345"
decomposeChar '\x1FB4' = "\x03B1\x0301\x0345"
decomposeChar '\x1FB6' = "\x03B1\x0342"
decomposeChar '\x1FB7' = "\x03B1\x0342\x0345"
decomposeChar '\x1FB8' = "\x0391\x0306"
decomposeChar '\x1FB9' = "\x0391\x0304"
decomposeChar '\x1FBA' = "\x0391\x0300"
decomposeChar '\x1FBB' = "\x0391\x0301"
decomposeChar '\x1FBC' = "\x0391\x0345"
decomposeChar '\x1FBE' = "\x03B9"
decomposeChar '\x1FC1' = "\x00A8\x0342"
decomposeChar '\x1FC2' = "\x03B7\x0300\x0345"
decomposeChar '\x1FC3' = "\x03B7\x0345"
decomposeChar '\x1FC4' = "\x03B7\x0301\x0345"
decomposeChar '\x1FC6' = "\x03B7\x0342"
decomposeChar '\x1FC7' = "\x03B7\x0342\x0345"
decomposeChar '\x1FC8' = "\x0395\x0300"
decomposeChar '\x1FC9' = "\x0395\x0301"
decomposeChar '\x1FCA' = "\x0397\x0300"
decomposeChar '\x1FCB' = "\x0397\x0301"
decomposeChar '\x1FCC' = "\x0397\x0345"
decomposeChar '\x1FCD' = "\x1FBF\x0300"
decomposeChar '\x1FCE' = "\x1FBF\x0301"
decomposeChar '\x1FCF' = "\x1FBF\x0342"
decomposeChar '\x1FD0' = "\x03B9\x0306"
decomposeChar '\x1FD1' = "\x03B9\x0304"
decomposeChar '\x1FD2' = "\x03B9\x0308\x0300"
decomposeChar '\x1FD3' = "\x03B9\x0308\x0301"
decomposeChar '\x1FD6' = "\x03B9\x0342"
decomposeChar '\x1FD7' = "\x03B9\x0308\x0342"
decomposeChar '\x1FD8' = "\x0399\x0306"
decomposeChar '\x1FD9' = "\x0399\x0304"
decomposeChar '\x1FDA' = "\x0399\x0300"
decomposeChar '\x1FDB' = "\x0399\x0301"
decomposeChar '\x1FDD' = "\x1FFE\x0300"
decomposeChar '\x1FDE' = "\x1FFE\x0301"
decomposeChar '\x1FDF' = "\x1FFE\x0342"
decomposeChar '\x1FE0' = "\x03C5\x0306"
decomposeChar '\x1FE1' = "\x03C5\x0304"
decomposeChar '\x1FE2' = "\x03C5\x0308\x0300"
decomposeChar '\x1FE3' = "\x03C5\x0308\x0301"
decomposeChar '\x1FE4' = "\x03C1\x0313"
decomposeChar '\x1FE5' = "\x03C1\x0314"
decomposeChar '\x1FE6' = "\x03C5\x0342"
decomposeChar '\x1FE7' = "\x03C5\x0308\x0342"
decomposeChar '\x1FE8' = "\x03A5\x0306"
decomposeChar '\x1FE9' = "\x03A5\x0304"
decomposeChar '\x1FEA' = "\x03A5\x0300"
decomposeChar '\x1FEB' = "\x03A5\x0301"
decomposeChar '\x1FEC' = "\x03A1\x0314"
decomposeChar '\x1FED' = "\x00A8\x0300"
decomposeChar '\x1FEE' = "\x00A8\x0301"
decomposeChar '\x1FEF' = "\x0060"
decomposeChar '\x1FF2' = "\x03C9\x0300\x0345"
decomposeChar '\x1FF3' = "\x03C9\x0345"
decomposeChar '\x1FF4' = "\x03C9\x0301\x0345"
decomposeChar '\x1FF6' = "\x03C9\x0342"
decomposeChar '\x1FF7' = "\x03C9\x0342\x0345"
decomposeChar '\x1FF8' = "\x039F\x0300"
decomposeChar '\x1FF9' = "\x039F\x0301"
decomposeChar '\x1FFA' = "\x03A9\x0300"
decomposeChar '\x1FFB' = "\x03A9\x0301"
decomposeChar '\x1FFC' = "\x03A9\x0345"
decomposeChar '\x1FFD' = "\x00B4"
decomposeChar '\x2000' = "\x2002"
decomposeChar '\x2001' = "\x2003"
decomposeChar '\x2126' = "\x03A9"
decomposeChar '\x212A' = "\x004B"
decomposeChar '\x212B' = "\x0041\x030A"
decomposeChar '\x219A' = "\x2190\x0338"
decomposeChar '\x219B' = "\x2192\x0338"
decomposeChar '\x21AE' = "\x2194\x0338"
decomposeChar '\x21CD' = "\x21D0\x0338"
decomposeChar '\x21CE' = "\x21D4\x0338"
decomposeChar '\x21CF' = "\x21D2\x0338"
decomposeChar '\x2204' = "\x2203\x0338"
decomposeChar '\x2209' = "\x2208\x0338"
decomposeChar '\x220C' = "\x220B\x0338"
decomposeChar '\x2224' = "\x2223\x0338"
decomposeChar '\x2226' = "\x2225\x0338"
decomposeChar '\x2241' = "\x223C\x0338"
decomposeChar '\x2244' = "\x2243\x0338"
decomposeChar '\x2247' = "\x2245\x0338"
decomposeChar '\x2249' = "\x2248\x0338"
decomposeChar '\x2260' = "\x003D\x0338"
decomposeChar '\x2262' = "\x2261\x0338"
decomposeChar '\x226D' = "\x224D\x0338"
decomposeChar '\x226E' = "\x003C\x0338"
decomposeChar '\x226F' = "\x003E\x0338"
decomposeChar '\x2270' = "\x2264\x0338"
decomposeChar '\x2271' = "\x2265\x0338"
decomposeChar '\x2274' = "\x2272\x0338"
decomposeChar '\x2275' = "\x2273\x0338"
decomposeChar '\x2278' = "\x2276\x0338"
decomposeChar '\x2279' = "\x2277\x0338"
decomposeChar '\x2280' = "\x227A\x0338"
decomposeChar '\x2281' = "\x227B\x0338"
decomposeChar '\x2284' = "\x2282\x0338"
decomposeChar '\x2285' = "\x2283\x0338"
decomposeChar '\x2288' = "\x2286\x0338"
decomposeChar '\x2289' = "\x2287\x0338"
decomposeChar '\x22AC' = "\x22A2\x0338"
decomposeChar '\x22AD' = "\x22A8\x0338"
decomposeChar '\x22AE' = "\x22A9\x0338"
decomposeChar '\x22AF' = "\x22AB\x0338"
decomposeChar '\x22E0' = "\x227C\x0338"
decomposeChar '\x22E1' = "\x227D\x0338"
decomposeChar '\x22E2' = "\x2291\x0338"
decomposeChar '\x22E3' = "\x2292\x0338"
decomposeChar '\x22EA' = "\x22B2\x0338"
decomposeChar '\x22EB' = "\x22B3\x0338"
decomposeChar '\x22EC' = "\x22B4\x0338"
decomposeChar '\x22ED' = "\x22B5\x0338"
decomposeChar '\x2329' = "\x3008"
decomposeChar '\x232A' = "\x3009"
decomposeChar '\x2ADC' = "\x2ADD\x0338"
decomposeChar '\x304C' = "\x304B\x3099"
decomposeChar '\x304E' = "\x304D\x3099"
decomposeChar '\x3050' = "\x304F\x3099"
decomposeChar '\x3052' = "\x3051\x3099"
decomposeChar '\x3054' = "\x3053\x3099"
decomposeChar '\x3056' = "\x3055\x3099"
decomposeChar '\x3058' = "\x3057\x3099"
decomposeChar '\x305A' = "\x3059\x3099"
decomposeChar '\x305C' = "\x305B\x3099"
decomposeChar '\x305E' = "\x305D\x3099"
decomposeChar '\x3060' = "\x305F\x3099"
decomposeChar '\x3062' = "\x3061\x3099"
decomposeChar '\x3065' = "\x3064\x3099"
decomposeChar '\x3067' = "\x3066\x3099"
decomposeChar '\x3069' = "\x3068\x3099"
decomposeChar '\x3070' = "\x306F\x3099"
decomposeChar '\x3071' = "\x306F\x309A"
decomposeChar '\x3073' = "\x3072\x3099"
decomposeChar '\x3074' = "\x3072\x309A"
decomposeChar '\x3076' = "\x3075\x3099"
decomposeChar '\x3077' = "\x3075\x309A"
decomposeChar '\x3079' = "\x3078\x3099"
decomposeChar '\x307A' = "\x3078\x309A"
decomposeChar '\x307C' = "\x307B\x3099"
decomposeChar '\x307D' = "\x307B\x309A"
decomposeChar '\x3094' = "\x3046\x3099"
decomposeChar '\x309E' = "\x309D\x3099"
decomposeChar '\x30AC' = "\x30AB\x3099"
decomposeChar '\x30AE' = "\x30AD\x3099"
decomposeChar '\x30B0' = "\x30AF\x3099"
decomposeChar '\x30B2' = "\x30B1\x3099"
decomposeChar '\x30B4' = "\x30B3\x3099"
decomposeChar '\x30B6' = "\x30B5\x3099"
decomposeChar '\x30B8' = "\x30B7\x3099"
decomposeChar '\x30BA' = "\x30B9\x3099"
decomposeChar '\x30BC' = "\x30BB\x3099"
decomposeChar '\x30BE' = "\x30BD\x3099"
decomposeChar '\x30C0' = "\x30BF\x3099"
decomposeChar '\x30C2' = "\x30C1\x3099"
decomposeChar '\x30C5' = "\x30C4\x3099"
decomposeChar '\x30C7' = "\x30C6\x3099"
decomposeChar '\x30C9' = "\x30C8\x3099"
decomposeChar '\x30D0' = "\x30CF\x3099"
decomposeChar '\x30D1' = "\x30CF\x309A"
decomposeChar '\x30D3' = "\x30D2\x3099"
decomposeChar '\x30D4' = "\x30D2\x309A"
decomposeChar '\x30D6' = "\x30D5\x3099"
decomposeChar '\x30D7' = "\x30D5\x309A"
decomposeChar '\x30D9' = "\x30D8\x3099"
decomposeChar '\x30DA' = "\x30D8\x309A"
decomposeChar '\x30DC' = "\x30DB\x3099"
decomposeChar '\x30DD' = "\x30DB\x309A"
decomposeChar '\x30F4' = "\x30A6\x3099"
decomposeChar '\x30F7' = "\x30EF\x3099"
decomposeChar '\x30F8' = "\x30F0\x3099"
decomposeChar '\x30F9' = "\x30F1\x3099"
decomposeChar '\x30FA' = "\x30F2\x3099"
decomposeChar '\x30FE' = "\x30FD\x3099"
decomposeChar '\xF900' = "\x8C48"
decomposeChar '\xF901' = "\x66F4"
decomposeChar '\xF902' = "\x8ECA"
decomposeChar '\xF903' = "\x8CC8"
decomposeChar '\xF904' = "\x6ED1"
decomposeChar '\xF905' = "\x4E32"
decomposeChar '\xF906' = "\x53E5"
decomposeChar '\xF907' = "\x9F9C"
decomposeChar '\xF908' = "\x9F9C"
decomposeChar '\xF909' = "\x5951"
decomposeChar '\xF90A' = "\x91D1"
decomposeChar '\xF90B' = "\x5587"
decomposeChar '\xF90C' = "\x5948"
decomposeChar '\xF90D' = "\x61F6"
decomposeChar '\xF90E' = "\x7669"
decomposeChar '\xF90F' = "\x7F85"
decomposeChar '\xF910' = "\x863F"
decomposeChar '\xF911' = "\x87BA"
decomposeChar '\xF912' = "\x88F8"
decomposeChar '\xF913' = "\x908F"
decomposeChar '\xF914' = "\x6A02"
decomposeChar '\xF915' = "\x6D1B"
decomposeChar '\xF916' = "\x70D9"
decomposeChar '\xF917' = "\x73DE"
decomposeChar '\xF918' = "\x843D"
decomposeChar '\xF919' = "\x916A"
decomposeChar '\xF91A' = "\x99F1"
decomposeChar '\xF91B' = "\x4E82"
decomposeChar '\xF91C' = "\x5375"
decomposeChar '\xF91D' = "\x6B04"
decomposeChar '\xF91E' = "\x721B"
decomposeChar '\xF91F' = "\x862D"
decomposeChar '\xF920' = "\x9E1E"
decomposeChar '\xF921' = "\x5D50"
decomposeChar '\xF922' = "\x6FEB"
decomposeChar '\xF923' = "\x85CD"
decomposeChar '\xF924' = "\x8964"
decomposeChar '\xF925' = "\x62C9"
decomposeChar '\xF926' = "\x81D8"
decomposeChar '\xF927' = "\x881F"
decomposeChar '\xF928' = "\x5ECA"
decomposeChar '\xF929' = "\x6717"
decomposeChar '\xF92A' = "\x6D6A"
decomposeChar '\xF92B' = "\x72FC"
decomposeChar '\xF92C' = "\x90CE"
decomposeChar '\xF92D' = "\x4F86"
decomposeChar '\xF92E' = "\x51B7"
decomposeChar '\xF92F' = "\x52DE"
decomposeChar '\xF930' = "\x64C4"
decomposeChar '\xF931' = "\x6AD3"
decomposeChar '\xF932' = "\x7210"
decomposeChar '\xF933' = "\x76E7"
decomposeChar '\xF934' = "\x8001"
decomposeChar '\xF935' = "\x8606"
decomposeChar '\xF936' = "\x865C"
decomposeChar '\xF937' = "\x8DEF"
decomposeChar '\xF938' = "\x9732"
decomposeChar '\xF939' = "\x9B6F"
decomposeChar '\xF93A' = "\x9DFA"
decomposeChar '\xF93B' = "\x788C"
decomposeChar '\xF93C' = "\x797F"
decomposeChar '\xF93D' = "\x7DA0"
decomposeChar '\xF93E' = "\x83C9"
decomposeChar '\xF93F' = "\x9304"
decomposeChar '\xF940' = "\x9E7F"
decomposeChar '\xF941' = "\x8AD6"
decomposeChar '\xF942' = "\x58DF"
decomposeChar '\xF943' = "\x5F04"
decomposeChar '\xF944' = "\x7C60"
decomposeChar '\xF945' = "\x807E"
decomposeChar '\xF946' = "\x7262"
decomposeChar '\xF947' = "\x78CA"
decomposeChar '\xF948' = "\x8CC2"
decomposeChar '\xF949' = "\x96F7"
decomposeChar '\xF94A' = "\x58D8"
decomposeChar '\xF94B' = "\x5C62"
decomposeChar '\xF94C' = "\x6A13"
decomposeChar '\xF94D' = "\x6DDA"
decomposeChar '\xF94E' = "\x6F0F"
decomposeChar '\xF94F' = "\x7D2F"
decomposeChar '\xF950' = "\x7E37"
decomposeChar '\xF951' = "\x964B"
decomposeChar '\xF952' = "\x52D2"
decomposeChar '\xF953' = "\x808B"
decomposeChar '\xF954' = "\x51DC"
decomposeChar '\xF955' = "\x51CC"
decomposeChar '\xF956' = "\x7A1C"
decomposeChar '\xF957' = "\x7DBE"
decomposeChar '\xF958' = "\x83F1"
decomposeChar '\xF959' = "\x9675"
decomposeChar '\xF95A' = "\x8B80"
decomposeChar '\xF95B' = "\x62CF"
decomposeChar '\xF95C' = "\x6A02"
decomposeChar '\xF95D' = "\x8AFE"
decomposeChar '\xF95E' = "\x4E39"
decomposeChar '\xF95F' = "\x5BE7"
decomposeChar '\xF960' = "\x6012"
decomposeChar '\xF961' = "\x7387"
decomposeChar '\xF962' = "\x7570"
decomposeChar '\xF963' = "\x5317"
decomposeChar '\xF964' = "\x78FB"
decomposeChar '\xF965' = "\x4FBF"
decomposeChar '\xF966' = "\x5FA9"
decomposeChar '\xF967' = "\x4E0D"
decomposeChar '\xF968' = "\x6CCC"
decomposeChar '\xF969' = "\x6578"
decomposeChar '\xF96A' = "\x7D22"
decomposeChar '\xF96B' = "\x53C3"
decomposeChar '\xF96C' = "\x585E"
decomposeChar '\xF96D' = "\x7701"
decomposeChar '\xF96E' = "\x8449"
decomposeChar '\xF96F' = "\x8AAA"
decomposeChar '\xF970' = "\x6BBA"
decomposeChar '\xF971' = "\x8FB0"
decomposeChar '\xF972' = "\x6C88"
decomposeChar '\xF973' = "\x62FE"
decomposeChar '\xF974' = "\x82E5"
decomposeChar '\xF975' = "\x63A0"
decomposeChar '\xF976' = "\x7565"
decomposeChar '\xF977' = "\x4EAE"
decomposeChar '\xF978' = "\x5169"
decomposeChar '\xF979' = "\x51C9"
decomposeChar '\xF97A' = "\x6881"
decomposeChar '\xF97B' = "\x7CE7"
decomposeChar '\xF97C' = "\x826F"
decomposeChar '\xF97D' = "\x8AD2"
decomposeChar '\xF97E' = "\x91CF"
decomposeChar '\xF97F' = "\x52F5"
decomposeChar '\xF980' = "\x5442"
decomposeChar '\xF981' = "\x5973"
decomposeChar '\xF982' = "\x5EEC"
decomposeChar '\xF983' = "\x65C5"
decomposeChar '\xF984' = "\x6FFE"
decomposeChar '\xF985' = "\x792A"
decomposeChar '\xF986' = "\x95AD"
decomposeChar '\xF987' = "\x9A6A"
decomposeChar '\xF988' = "\x9E97"
decomposeChar '\xF989' = "\x9ECE"
decomposeChar '\xF98A' = "\x529B"
decomposeChar '\xF98B' = "\x66C6"
decomposeChar '\xF98C' = "\x6B77"
decomposeChar '\xF98D' = "\x8F62"
decomposeChar '\xF98E' = "\x5E74"
decomposeChar '\xF98F' = "\x6190"
decomposeChar '\xF990' = "\x6200"
decomposeChar '\xF991' = "\x649A"
decomposeChar '\xF992' = "\x6F23"
decomposeChar '\xF993' = "\x7149"
decomposeChar '\xF994' = "\x7489"
decomposeChar '\xF995' = "\x79CA"
decomposeChar '\xF996' = "\x7DF4"
decomposeChar '\xF997' = "\x806F"
decomposeChar '\xF998' = "\x8F26"
decomposeChar '\xF999' = "\x84EE"
decomposeChar '\xF99A' = "\x9023"
decomposeChar '\xF99B' = "\x934A"
decomposeChar '\xF99C' = "\x5217"
decomposeChar '\xF99D' = "\x52A3"
decomposeChar '\xF99E' = "\x54BD"
decomposeChar '\xF99F' = "\x70C8"
decomposeChar '\xF9A0' = "\x88C2"
decomposeChar '\xF9A1' = "\x8AAA"
decomposeChar '\xF9A2' = "\x5EC9"
decomposeChar '\xF9A3' = "\x5FF5"
decomposeChar '\xF9A4' = "\x637B"
decomposeChar '\xF9A5' = "\x6BAE"
decomposeChar '\xF9A6' = "\x7C3E"
decomposeChar '\xF9A7' = "\x7375"
decomposeChar '\xF9A8' = "\x4EE4"
decomposeChar '\xF9A9' = "\x56F9"
decomposeChar '\xF9AA' = "\x5BE7"
decomposeChar '\xF9AB' = "\x5DBA"
decomposeChar '\xF9AC' = "\x601C"
decomposeChar '\xF9AD' = "\x73B2"
decomposeChar '\xF9AE' = "\x7469"
decomposeChar '\xF9AF' = "\x7F9A"
decomposeChar '\xF9B0' = "\x8046"
decomposeChar '\xF9B1' = "\x9234"
decomposeChar '\xF9B2' = "\x96F6"
decomposeChar '\xF9B3' = "\x9748"
decomposeChar '\xF9B4' = "\x9818"
decomposeChar '\xF9B5' = "\x4F8B"
decomposeChar '\xF9B6' = "\x79AE"
decomposeChar '\xF9B7' = "\x91B4"
decomposeChar '\xF9B8' = "\x96B8"
decomposeChar '\xF9B9' = "\x60E1"
decomposeChar '\xF9BA' = "\x4E86"
decomposeChar '\xF9BB' = "\x50DA"
decomposeChar '\xF9BC' = "\x5BEE"
decomposeChar '\xF9BD' = "\x5C3F"
decomposeChar '\xF9BE' = "\x6599"
decomposeChar '\xF9BF' = "\x6A02"
decomposeChar '\xF9C0' = "\x71CE"
decomposeChar '\xF9C1' = "\x7642"
decomposeChar '\xF9C2' = "\x84FC"
decomposeChar '\xF9C3' = "\x907C"
decomposeChar '\xF9C4' = "\x9F8D"
decomposeChar '\xF9C5' = "\x6688"
decomposeChar '\xF9C6' = "\x962E"
decomposeChar '\xF9C7' = "\x5289"
decomposeChar '\xF9C8' = "\x677B"
decomposeChar '\xF9C9' = "\x67F3"
decomposeChar '\xF9CA' = "\x6D41"
decomposeChar '\xF9CB' = "\x6E9C"
decomposeChar '\xF9CC' = "\x7409"
decomposeChar '\xF9CD' = "\x7559"
decomposeChar '\xF9CE' = "\x786B"
decomposeChar '\xF9CF' = "\x7D10"
decomposeChar '\xF9D0' = "\x985E"
decomposeChar '\xF9D1' = "\x516D"
decomposeChar '\xF9D2' = "\x622E"
decomposeChar '\xF9D3' = "\x9678"
decomposeChar '\xF9D4' = "\x502B"
decomposeChar '\xF9D5' = "\x5D19"
decomposeChar '\xF9D6' = "\x6DEA"
decomposeChar '\xF9D7' = "\x8F2A"
decomposeChar '\xF9D8' = "\x5F8B"
decomposeChar '\xF9D9' = "\x6144"
decomposeChar '\xF9DA' = "\x6817"
decomposeChar '\xF9DB' = "\x7387"
decomposeChar '\xF9DC' = "\x9686"
decomposeChar '\xF9DD' = "\x5229"
decomposeChar '\xF9DE' = "\x540F"
decomposeChar '\xF9DF' = "\x5C65"
decomposeChar '\xF9E0' = "\x6613"
decomposeChar '\xF9E1' = "\x674E"
decomposeChar '\xF9E2' = "\x68A8"
decomposeChar '\xF9E3' = "\x6CE5"
decomposeChar '\xF9E4' = "\x7406"
decomposeChar '\xF9E5' = "\x75E2"
decomposeChar '\xF9E6' = "\x7F79"
decomposeChar '\xF9E7' = "\x88CF"
decomposeChar '\xF9E8' = "\x88E1"
decomposeChar '\xF9E9' = "\x91CC"
decomposeChar '\xF9EA' = "\x96E2"
decomposeChar '\xF9EB' = "\x533F"
decomposeChar '\xF9EC' = "\x6EBA"
decomposeChar '\xF9ED' = "\x541D"
decomposeChar '\xF9EE' = "\x71D0"
decomposeChar '\xF9EF' = "\x7498"
decomposeChar '\xF9F0' = "\x85FA"
decomposeChar '\xF9F1' = "\x96A3"
decomposeChar '\xF9F2' = "\x9C57"
decomposeChar '\xF9F3' = "\x9E9F"
decomposeChar '\xF9F4' = "\x6797"
decomposeChar '\xF9F5' = "\x6DCB"
decomposeChar '\xF9F6' = "\x81E8"
decomposeChar '\xF9F7' = "\x7ACB"
decomposeChar '\xF9F8' = "\x7B20"
decomposeChar '\xF9F9' = "\x7C92"
decomposeChar '\xF9FA' = "\x72C0"
decomposeChar '\xF9FB' = "\x7099"
decomposeChar '\xF9FC' = "\x8B58"
decomposeChar '\xF9FD' = "\x4EC0"
decomposeChar '\xF9FE' = "\x8336"
decomposeChar '\xF9FF' = "\x523A"
decomposeChar '\xFA00' = "\x5207"
decomposeChar '\xFA01' = "\x5EA6"
decomposeChar '\xFA02' = "\x62D3"
decomposeChar '\xFA03' = "\x7CD6"
decomposeChar '\xFA04' = "\x5B85"
decomposeChar '\xFA05' = "\x6D1E"
decomposeChar '\xFA06' = "\x66B4"
decomposeChar '\xFA07' = "\x8F3B"
decomposeChar '\xFA08' = "\x884C"
decomposeChar '\xFA09' = "\x964D"
decomposeChar '\xFA0A' = "\x898B"
decomposeChar '\xFA0B' = "\x5ED3"
decomposeChar '\xFA0C' = "\x5140"
decomposeChar '\xFA0D' = "\x55C0"
decomposeChar '\xFA10' = "\x585A"
decomposeChar '\xFA12' = "\x6674"
decomposeChar '\xFA15' = "\x51DE"
decomposeChar '\xFA16' = "\x732A"
decomposeChar '\xFA17' = "\x76CA"
decomposeChar '\xFA18' = "\x793C"
decomposeChar '\xFA19' = "\x795E"
decomposeChar '\xFA1A' = "\x7965"
decomposeChar '\xFA1B' = "\x798F"
decomposeChar '\xFA1C' = "\x9756"
decomposeChar '\xFA1D' = "\x7CBE"
decomposeChar '\xFA1E' = "\x7FBD"
decomposeChar '\xFA20' = "\x8612"
decomposeChar '\xFA22' = "\x8AF8"
decomposeChar '\xFA25' = "\x9038"
decomposeChar '\xFA26' = "\x90FD"
decomposeChar '\xFA2A' = "\x98EF"
decomposeChar '\xFA2B' = "\x98FC"
decomposeChar '\xFA2C' = "\x9928"
decomposeChar '\xFA2D' = "\x9DB4"
decomposeChar '\xFA2E' = "\x90DE"
decomposeChar '\xFA2F' = "\x96B7"
decomposeChar '\xFA30' = "\x4FAE"
decomposeChar '\xFA31' = "\x50E7"
decomposeChar '\xFA32' = "\x514D"
decomposeChar '\xFA33' = "\x52C9"
decomposeChar '\xFA34' = "\x52E4"
decomposeChar '\xFA35' = "\x5351"
decomposeChar '\xFA36' = "\x559D"
decomposeChar '\xFA37' = "\x5606"
decomposeChar '\xFA38' = "\x5668"
decomposeChar '\xFA39' = "\x5840"
decomposeChar '\xFA3A' = "\x58A8"
decomposeChar '\xFA3B' = "\x5C64"
decomposeChar '\xFA3C' = "\x5C6E"
decomposeChar '\xFA3D' = "\x6094"
decomposeChar '\xFA3E' = "\x6168"
decomposeChar '\xFA3F' = "\x618E"
decomposeChar '\xFA40' = "\x61F2"
decomposeChar '\xFA41' = "\x654F"
decomposeChar '\xFA42' = "\x65E2"
decomposeChar '\xFA43' = "\x6691"
decomposeChar '\xFA44' = "\x6885"
decomposeChar '\xFA45' = "\x6D77"
decomposeChar '\xFA46' = "\x6E1A"
decomposeChar '\xFA47' = "\x6F22"
decomposeChar '\xFA48' = "\x716E"
decomposeChar '\xFA49' = "\x722B"
decomposeChar '\xFA4A' = "\x7422"
decomposeChar '\xFA4B' = "\x7891"
decomposeChar '\xFA4C' = "\x793E"
decomposeChar '\xFA4D' = "\x7949"
decomposeChar '\xFA4E' = "\x7948"
decomposeChar '\xFA4F' = "\x7950"
decomposeChar '\xFA50' = "\x7956"
decomposeChar '\xFA51' = "\x795D"
decomposeChar '\xFA52' = "\x798D"
decomposeChar '\xFA53' = "\x798E"
decomposeChar '\xFA54' = "\x7A40"
decomposeChar '\xFA55' = "\x7A81"
decomposeChar '\xFA56' = "\x7BC0"
decomposeChar '\xFA57' = "\x7DF4"
decomposeChar '\xFA58' = "\x7E09"
decomposeChar '\xFA59' = "\x7E41"
decomposeChar '\xFA5A' = "\x7F72"
decomposeChar '\xFA5B' = "\x8005"
decomposeChar '\xFA5C' = "\x81ED"
decomposeChar '\xFA5D' = "\x8279"
decomposeChar '\xFA5E' = "\x8279"
decomposeChar '\xFA5F' = "\x8457"
decomposeChar '\xFA60' = "\x8910"
decomposeChar '\xFA61' = "\x8996"
decomposeChar '\xFA62' = "\x8B01"
decomposeChar '\xFA63' = "\x8B39"
decomposeChar '\xFA64' = "\x8CD3"
decomposeChar '\xFA65' = "\x8D08"
decomposeChar '\xFA66' = "\x8FB6"
decomposeChar '\xFA67' = "\x9038"
decomposeChar '\xFA68' = "\x96E3"
decomposeChar '\xFA69' = "\x97FF"
decomposeChar '\xFA6A' = "\x983B"
decomposeChar '\xFA6B' = "\x6075"
decomposeChar '\xFA6C' = "\x242EE"
decomposeChar '\xFA6D' = "\x8218"
decomposeChar '\xFA70' = "\x4E26"
decomposeChar '\xFA71' = "\x51B5"
decomposeChar '\xFA72' = "\x5168"
decomposeChar '\xFA73' = "\x4F80"
decomposeChar '\xFA74' = "\x5145"
decomposeChar '\xFA75' = "\x5180"
decomposeChar '\xFA76' = "\x52C7"
decomposeChar '\xFA77' = "\x52FA"
decomposeChar '\xFA78' = "\x559D"
decomposeChar '\xFA79' = "\x5555"
decomposeChar '\xFA7A' = "\x5599"
decomposeChar '\xFA7B' = "\x55E2"
decomposeChar '\xFA7C' = "\x585A"
decomposeChar '\xFA7D' = "\x58B3"
decomposeChar '\xFA7E' = "\x5944"
decomposeChar '\xFA7F' = "\x5954"
decomposeChar '\xFA80' = "\x5A62"
decomposeChar '\xFA81' = "\x5B28"
decomposeChar '\xFA82' = "\x5ED2"
decomposeChar '\xFA83' = "\x5ED9"
decomposeChar '\xFA84' = "\x5F69"
decomposeChar '\xFA85' = "\x5FAD"
decomposeChar '\xFA86' = "\x60D8"
decomposeChar '\xFA87' = "\x614E"
decomposeChar '\xFA88' = "\x6108"
decomposeChar '\xFA89' = "\x618E"
decomposeChar '\xFA8A' = "\x6160"
decomposeChar '\xFA8B' = "\x61F2"
decomposeChar '\xFA8C' = "\x6234"
decomposeChar '\xFA8D' = "\x63C4"
decomposeChar '\xFA8E' = "\x641C"
decomposeChar '\xFA8F' = "\x6452"
decomposeChar '\xFA90' = "\x6556"
decomposeChar '\xFA91' = "\x6674"
decomposeChar '\xFA92' = "\x6717"
decomposeChar '\xFA93' = "\x671B"
decomposeChar '\xFA94' = "\x6756"
decomposeChar '\xFA95' = "\x6B79"
decomposeChar '\xFA96' = "\x6BBA"
decomposeChar '\xFA97' = "\x6D41"
decomposeChar '\xFA98' = "\x6EDB"
decomposeChar '\xFA99' = "\x6ECB"
decomposeChar '\xFA9A' = "\x6F22"
decomposeChar '\xFA9B' = "\x701E"
decomposeChar '\xFA9C' = "\x716E"
decomposeChar '\xFA9D' = "\x77A7"
decomposeChar '\xFA9E' = "\x7235"
decomposeChar '\xFA9F' = "\x72AF"
decomposeChar '\xFAA0' = "\x732A"
decomposeChar '\xFAA1' = "\x7471"
decomposeChar '\xFAA2' = "\x7506"
decomposeChar '\xFAA3' = "\x753B"
decomposeChar '\xFAA4' = "\x761D"
decomposeChar '\xFAA5' = "\x761F"
decomposeChar '\xFAA6' = "\x76CA"
decomposeChar '\xFAA7' = "\x76DB"
decomposeChar '\xFAA8' = "\x76F4"
decomposeChar '\xFAA9' = "\x774A"
decomposeChar '\xFAAA' = "\x7740"
decomposeChar '\xFAAB' = "\x78CC"
decomposeChar '\xFAAC' = "\x7AB1"
decomposeChar '\xFAAD' = "\x7BC0"
decomposeChar '\xFAAE' = "\x7C7B"
decomposeChar '\xFAAF' = "\x7D5B"
decomposeChar '\xFAB0' = "\x7DF4"
decomposeChar '\xFAB1' = "\x7F3E"
decomposeChar '\xFAB2' = "\x8005"
decomposeChar '\xFAB3' = "\x8352"
decomposeChar '\xFAB4' = "\x83EF"
decomposeChar '\xFAB5' = "\x8779"
decomposeChar '\xFAB6' = "\x8941"
decomposeChar '\xFAB7' = "\x8986"
decomposeChar '\xFAB8' = "\x8996"
decomposeChar '\xFAB9' = "\x8ABF"
decomposeChar '\xFABA' = "\x8AF8"
decomposeChar '\xFABB' = "\x8ACB"
decomposeChar '\xFABC' = "\x8B01"
decomposeChar '\xFABD' = "\x8AFE"
decomposeChar '\xFABE' = "\x8AED"
decomposeChar '\xFABF' = "\x8B39"
decomposeChar '\xFAC0' = "\x8B8A"
decomposeChar '\xFAC1' = "\x8D08"
decomposeChar '\xFAC2' = "\x8F38"
decomposeChar '\xFAC3' = "\x9072"
decomposeChar '\xFAC4' = "\x9199"
decomposeChar '\xFAC5' = "\x9276"
decomposeChar '\xFAC6' = "\x967C"
decomposeChar '\xFAC7' = "\x96E3"
decomposeChar '\xFAC8' = "\x9756"
decomposeChar '\xFAC9' = "\x97DB"
decomposeChar '\xFACA' = "\x97FF"
decomposeChar '\xFACB' = "\x980B"
decomposeChar '\xFACC' = "\x983B"
decomposeChar '\xFACD' = "\x9B12"
decomposeChar '\xFACE' = "\x9F9C"
decomposeChar '\xFACF' = "\x2284A"
decomposeChar '\xFAD0' = "\x22844"
decomposeChar '\xFAD1' = "\x233D5"
decomposeChar '\xFAD2' = "\x3B9D"
decomposeChar '\xFAD3' = "\x4018"
decomposeChar '\xFAD4' = "\x4039"
decomposeChar '\xFAD5' = "\x25249"
decomposeChar '\xFAD6' = "\x25CD0"
decomposeChar '\xFAD7' = "\x27ED3"
decomposeChar '\xFAD8' = "\x9F43"
decomposeChar '\xFAD9' = "\x9F8E"
decomposeChar '\xFB1D' = "\x05D9\x05B4"
decomposeChar '\xFB1F' = "\x05F2\x05B7"
decomposeChar '\xFB2A' = "\x05E9\x05C1"
decomposeChar '\xFB2B' = "\x05E9\x05C2"
decomposeChar '\xFB2C' = "\x05E9\x05BC\x05C1"
decomposeChar '\xFB2D' = "\x05E9\x05BC\x05C2"
decomposeChar '\xFB2E' = "\x05D0\x05B7"
decomposeChar '\xFB2F' = "\x05D0\x05B8"
decomposeChar '\xFB30' = "\x05D0\x05BC"
decomposeChar '\xFB31' = "\x05D1\x05BC"
decomposeChar '\xFB32' = "\x05D2\x05BC"
decomposeChar '\xFB33' = "\x05D3\x05BC"
decomposeChar '\xFB34' = "\x05D4\x05BC"
decomposeChar '\xFB35' = "\x05D5\x05BC"
decomposeChar '\xFB36' = "\x05D6\x05BC"
decomposeChar '\xFB38' = "\x05D8\x05BC"
decomposeChar '\xFB39' = "\x05D9\x05BC"
decomposeChar '\xFB3A' = "\x05DA\x05BC"
decomposeChar '\xFB3B' = "\x05DB\x05BC"
decomposeChar '\xFB3C' = "\x05DC\x05BC"
decomposeChar '\xFB3E' = "\x05DE\x05BC"
decomposeChar '\xFB40' = "\x05E0\x05BC"
decomposeChar '\xFB41' = "\x05E1\x05BC"
decomposeChar '\xFB43' = "\x05E3\x05BC"
decomposeChar '\xFB44' = "\x05E4\x05BC"
decomposeChar '\xFB46' = "\x05E6\x05BC"
decomposeChar '\xFB47' = "\x05E7\x05BC"
decomposeChar '\xFB48' = "\x05E8\x05BC"
decomposeChar '\xFB49' = "\x05E9\x05BC"
decomposeChar '\xFB4A' = "\x05EA\x05BC"
decomposeChar '\xFB4B' = "\x05D5\x05B9"
decomposeChar '\xFB4C' = "\x05D1\x05BF"
decomposeChar '\xFB4D' = "\x05DB\x05BF"
decomposeChar '\xFB4E' = "\x05E4\x05BF"
decomposeChar '\x1109A' = "\x11099\x110BA"
decomposeChar '\x1109C' = "\x1109B\x110BA"
decomposeChar '\x110AB' = "\x110A5\x110BA"
decomposeChar '\x1112E' = "\x11131\x11127"
decomposeChar '\x1112F' = "\x11132\x11127"
decomposeChar '\x1134B' = "\x11347\x1133E"
decomposeChar '\x1134C' = "\x11347\x11357"
decomposeChar '\x114BB' = "\x114B9\x114BA"
decomposeChar '\x114BC' = "\x114B9\x114B0"
decomposeChar '\x114BE' = "\x114B9\x114BD"
decomposeChar '\x115BA' = "\x115B8\x115AF"
decomposeChar '\x115BB' = "\x115B9\x115AF"
decomposeChar '\x1D15E' = "\x1D157\x1D165"
decomposeChar '\x1D15F' = "\x1D158\x1D165"
decomposeChar '\x1D160' = "\x1D158\x1D165\x1D16E"
decomposeChar '\x1D161' = "\x1D158\x1D165\x1D16F"
decomposeChar '\x1D162' = "\x1D158\x1D165\x1D170"
decomposeChar '\x1D163' = "\x1D158\x1D165\x1D171"
decomposeChar '\x1D164' = "\x1D158\x1D165\x1D172"
decomposeChar '\x1D1BB' = "\x1D1B9\x1D165"
decomposeChar '\x1D1BC' = "\x1D1BA\x1D165"
decomposeChar '\x1D1BD' = "\x1D1B9\x1D165\x1D16E"
decomposeChar '\x1D1BE' = "\x1D1BA\x1D165\x1D16E"
decomposeChar '\x1D1BF' = "\x1D1B9\x1D165\x1D16F"
decomposeChar '\x1D1C0' = "\x1D1BA\x1D165\x1D16F"
decomposeChar '\x2F800' = "\x4E3D"
decomposeChar '\x2F801' = "\x4E38"
decomposeChar '\x2F802' = "\x4E41"
decomposeChar '\x2F803' = "\x20122"
decomposeChar '\x2F804' = "\x4F60"
decomposeChar '\x2F805' = "\x4FAE"
decomposeChar '\x2F806' = "\x4FBB"
decomposeChar '\x2F807' = "\x5002"
decomposeChar '\x2F808' = "\x507A"
decomposeChar '\x2F809' = "\x5099"
decomposeChar '\x2F80A' = "\x50E7"
decomposeChar '\x2F80B' = "\x50CF"
decomposeChar '\x2F80C' = "\x349E"
decomposeChar '\x2F80D' = "\x2063A"
decomposeChar '\x2F80E' = "\x514D"
decomposeChar '\x2F80F' = "\x5154"
decomposeChar '\x2F810' = "\x5164"
decomposeChar '\x2F811' = "\x5177"
decomposeChar '\x2F812' = "\x2051C"
decomposeChar '\x2F813' = "\x34B9"
decomposeChar '\x2F814' = "\x5167"
decomposeChar '\x2F815' = "\x518D"
decomposeChar '\x2F816' = "\x2054B"
decomposeChar '\x2F817' = "\x5197"
decomposeChar '\x2F818' = "\x51A4"
decomposeChar '\x2F819' = "\x4ECC"
decomposeChar '\x2F81A' = "\x51AC"
decomposeChar '\x2F81B' = "\x51B5"
decomposeChar '\x2F81C' = "\x291DF"
decomposeChar '\x2F81D' = "\x51F5"
decomposeChar '\x2F81E' = "\x5203"
decomposeChar '\x2F81F' = "\x34DF"
decomposeChar '\x2F820' = "\x523B"
decomposeChar '\x2F821' = "\x5246"
decomposeChar '\x2F822' = "\x5272"
decomposeChar '\x2F823' = "\x5277"
decomposeChar '\x2F824' = "\x3515"
decomposeChar '\x2F825' = "\x52C7"
decomposeChar '\x2F826' = "\x52C9"
decomposeChar '\x2F827' = "\x52E4"
decomposeChar '\x2F828' = "\x52FA"
decomposeChar '\x2F829' = "\x5305"
decomposeChar '\x2F82A' = "\x5306"
decomposeChar '\x2F82B' = "\x5317"
decomposeChar '\x2F82C' = "\x5349"
decomposeChar '\x2F82D' = "\x5351"
decomposeChar '\x2F82E' = "\x535A"
decomposeChar '\x2F82F' = "\x5373"
decomposeChar '\x2F830' = "\x537D"
decomposeChar '\x2F831' = "\x537F"
decomposeChar '\x2F832' = "\x537F"
decomposeChar '\x2F833' = "\x537F"
decomposeChar '\x2F834' = "\x20A2C"
decomposeChar '\x2F835' = "\x7070"
decomposeChar '\x2F836' = "\x53CA"
decomposeChar '\x2F837' = "\x53DF"
decomposeChar '\x2F838' = "\x20B63"
decomposeChar '\x2F839' = "\x53EB"
decomposeChar '\x2F83A' = "\x53F1"
decomposeChar '\x2F83B' = "\x5406"
decomposeChar '\x2F83C' = "\x549E"
decomposeChar '\x2F83D' = "\x5438"
decomposeChar '\x2F83E' = "\x5448"
decomposeChar '\x2F83F' = "\x5468"
decomposeChar '\x2F840' = "\x54A2"
decomposeChar '\x2F841' = "\x54F6"
decomposeChar '\x2F842' = "\x5510"
decomposeChar '\x2F843' = "\x5553"
decomposeChar '\x2F844' = "\x5563"
decomposeChar '\x2F845' = "\x5584"
decomposeChar '\x2F846' = "\x5584"
decomposeChar '\x2F847' = "\x5599"
decomposeChar '\x2F848' = "\x55AB"
decomposeChar '\x2F849' = "\x55B3"
decomposeChar '\x2F84A' = "\x55C2"
decomposeChar '\x2F84B' = "\x5716"
decomposeChar '\x2F84C' = "\x5606"
decomposeChar '\x2F84D' = "\x5717"
decomposeChar '\x2F84E' = "\x5651"
decomposeChar '\x2F84F' = "\x5674"
decomposeChar '\x2F850' = "\x5207"
decomposeChar '\x2F851' = "\x58EE"
decomposeChar '\x2F852' = "\x57CE"
decomposeChar '\x2F853' = "\x57F4"
decomposeChar '\x2F854' = "\x580D"
decomposeChar '\x2F855' = "\x578B"
decomposeChar '\x2F856' = "\x5832"
decomposeChar '\x2F857' = "\x5831"
decomposeChar '\x2F858' = "\x58AC"
decomposeChar '\x2F859' = "\x214E4"
decomposeChar '\x2F85A' = "\x58F2"
decomposeChar '\x2F85B' = "\x58F7"
decomposeChar '\x2F85C' = "\x5906"
decomposeChar '\x2F85D' = "\x591A"
decomposeChar '\x2F85E' = "\x5922"
decomposeChar '\x2F85F' = "\x5962"
decomposeChar '\x2F860' = "\x216A8"
decomposeChar '\x2F861' = "\x216EA"
decomposeChar '\x2F862' = "\x59EC"
decomposeChar '\x2F863' = "\x5A1B"
decomposeChar '\x2F864' = "\x5A27"
decomposeChar '\x2F865' = "\x59D8"
decomposeChar '\x2F866' = "\x5A66"
decomposeChar '\x2F867' = "\x36EE"
decomposeChar '\x2F868' = "\x36FC"
decomposeChar '\x2F869' = "\x5B08"
decomposeChar '\x2F86A' = "\x5B3E"
decomposeChar '\x2F86B' = "\x5B3E"
decomposeChar '\x2F86C' = "\x219C8"
decomposeChar '\x2F86D' = "\x5BC3"
decomposeChar '\x2F86E' = "\x5BD8"
decomposeChar '\x2F86F' = "\x5BE7"
decomposeChar '\x2F870' = "\x5BF3"
decomposeChar '\x2F871' = "\x21B18"
decomposeChar '\x2F872' = "\x5BFF"
decomposeChar '\x2F873' = "\x5C06"
decomposeChar '\x2F874' = "\x5F53"
decomposeChar '\x2F875' = "\x5C22"
decomposeChar '\x2F876' = "\x3781"
decomposeChar '\x2F877' = "\x5C60"
decomposeChar '\x2F878' = "\x5C6E"
decomposeChar '\x2F879' = "\x5CC0"
decomposeChar '\x2F87A' = "\x5C8D"
decomposeChar '\x2F87B' = "\x21DE4"
decomposeChar '\x2F87C' = "\x5D43"
decomposeChar '\x2F87D' = "\x21DE6"
decomposeChar '\x2F87E' = "\x5D6E"
decomposeChar '\x2F87F' = "\x5D6B"
decomposeChar '\x2F880' = "\x5D7C"
decomposeChar '\x2F881' = "\x5DE1"
decomposeChar '\x2F882' = "\x5DE2"
decomposeChar '\x2F883' = "\x382F"
decomposeChar '\x2F884' = "\x5DFD"
decomposeChar '\x2F885' = "\x5E28"
decomposeChar '\x2F886' = "\x5E3D"
decomposeChar '\x2F887' = "\x5E69"
decomposeChar '\x2F888' = "\x3862"
decomposeChar '\x2F889' = "\x22183"
decomposeChar '\x2F88A' = "\x387C"
decomposeChar '\x2F88B' = "\x5EB0"
decomposeChar '\x2F88C' = "\x5EB3"
decomposeChar '\x2F88D' = "\x5EB6"
decomposeChar '\x2F88E' = "\x5ECA"
decomposeChar '\x2F88F' = "\x2A392"
decomposeChar '\x2F890' = "\x5EFE"
decomposeChar '\x2F891' = "\x22331"
decomposeChar '\x2F892' = "\x22331"
decomposeChar '\x2F893' = "\x8201"
decomposeChar '\x2F894' = "\x5F22"
decomposeChar '\x2F895' = "\x5F22"
decomposeChar '\x2F896' = "\x38C7"
decomposeChar '\x2F897' = "\x232B8"
decomposeChar '\x2F898' = "\x261DA"
decomposeChar '\x2F899' = "\x5F62"
decomposeChar '\x2F89A' = "\x5F6B"
decomposeChar '\x2F89B' = "\x38E3"
decomposeChar '\x2F89C' = "\x5F9A"
decomposeChar '\x2F89D' = "\x5FCD"
decomposeChar '\x2F89E' = "\x5FD7"
decomposeChar '\x2F89F' = "\x5FF9"
decomposeChar '\x2F8A0' = "\x6081"
decomposeChar '\x2F8A1' = "\x393A"
decomposeChar '\x2F8A2' = "\x391C"
decomposeChar '\x2F8A3' = "\x6094"
decomposeChar '\x2F8A4' = "\x226D4"
decomposeChar '\x2F8A5' = "\x60C7"
decomposeChar '\x2F8A6' = "\x6148"
decomposeChar '\x2F8A7' = "\x614C"
decomposeChar '\x2F8A8' = "\x614E"
decomposeChar '\x2F8A9' = "\x614C"
decomposeChar '\x2F8AA' = "\x617A"
decomposeChar '\x2F8AB' = "\x618E"
decomposeChar '\x2F8AC' = "\x61B2"
decomposeChar '\x2F8AD' = "\x61A4"
decomposeChar '\x2F8AE' = "\x61AF"
decomposeChar '\x2F8AF' = "\x61DE"
decomposeChar '\x2F8B0' = "\x61F2"
decomposeChar '\x2F8B1' = "\x61F6"
decomposeChar '\x2F8B2' = "\x6210"
decomposeChar '\x2F8B3' = "\x621B"
decomposeChar '\x2F8B4' = "\x625D"
decomposeChar '\x2F8B5' = "\x62B1"
decomposeChar '\x2F8B6' = "\x62D4"
decomposeChar '\x2F8B7' = "\x6350"
decomposeChar '\x2F8B8' = "\x22B0C"
decomposeChar '\x2F8B9' = "\x633D"
decomposeChar '\x2F8BA' = "\x62FC"
decomposeChar '\x2F8BB' = "\x6368"
decomposeChar '\x2F8BC' = "\x6383"
decomposeChar '\x2F8BD' = "\x63E4"
decomposeChar '\x2F8BE' = "\x22BF1"
decomposeChar '\x2F8BF' = "\x6422"
decomposeChar '\x2F8C0' = "\x63C5"
decomposeChar '\x2F8C1' = "\x63A9"
decomposeChar '\x2F8C2' = "\x3A2E"
decomposeChar '\x2F8C3' = "\x6469"
decomposeChar '\x2F8C4' = "\x647E"
decomposeChar '\x2F8C5' = "\x649D"
decomposeChar '\x2F8C6' = "\x6477"
decomposeChar '\x2F8C7' = "\x3A6C"
decomposeChar '\x2F8C8' = "\x654F"
decomposeChar '\x2F8C9' = "\x656C"
decomposeChar '\x2F8CA' = "\x2300A"
decomposeChar '\x2F8CB' = "\x65E3"
decomposeChar '\x2F8CC' = "\x66F8"
decomposeChar '\x2F8CD' = "\x6649"
decomposeChar '\x2F8CE' = "\x3B19"
decomposeChar '\x2F8CF' = "\x6691"
decomposeChar '\x2F8D0' = "\x3B08"
decomposeChar '\x2F8D1' = "\x3AE4"
decomposeChar '\x2F8D2' = "\x5192"
decomposeChar '\x2F8D3' = "\x5195"
decomposeChar '\x2F8D4' = "\x6700"
decomposeChar '\x2F8D5' = "\x669C"
decomposeChar '\x2F8D6' = "\x80AD"
decomposeChar '\x2F8D7' = "\x43D9"
decomposeChar '\x2F8D8' = "\x6717"
decomposeChar '\x2F8D9' = "\x671B"
decomposeChar '\x2F8DA' = "\x6721"
decomposeChar '\x2F8DB' = "\x675E"
decomposeChar '\x2F8DC' = "\x6753"
decomposeChar '\x2F8DD' = "\x233C3"
decomposeChar '\x2F8DE' = "\x3B49"
decomposeChar '\x2F8DF' = "\x67FA"
decomposeChar '\x2F8E0' = "\x6785"
decomposeChar '\x2F8E1' = "\x6852"
decomposeChar '\x2F8E2' = "\x6885"
decomposeChar '\x2F8E3' = "\x2346D"
decomposeChar '\x2F8E4' = "\x688E"
decomposeChar '\x2F8E5' = "\x681F"
decomposeChar '\x2F8E6' = "\x6914"
decomposeChar '\x2F8E7' = "\x3B9D"
decomposeChar '\x2F8E8' = "\x6942"
decomposeChar '\x2F8E9' = "\x69A3"
decomposeChar '\x2F8EA' = "\x69EA"
decomposeChar '\x2F8EB' = "\x6AA8"
decomposeChar '\x2F8EC' = "\x236A3"
decomposeChar '\x2F8ED' = "\x6ADB"
decomposeChar '\x2F8EE' = "\x3C18"
decomposeChar '\x2F8EF' = "\x6B21"
decomposeChar '\x2F8F0' = "\x238A7"
decomposeChar '\x2F8F1' = "\x6B54"
decomposeChar '\x2F8F2' = "\x3C4E"
decomposeChar '\x2F8F3' = "\x6B72"
decomposeChar '\x2F8F4' = "\x6B9F"
decomposeChar '\x2F8F5' = "\x6BBA"
decomposeChar '\x2F8F6' = "\x6BBB"
decomposeChar '\x2F8F7' = "\x23A8D"
decomposeChar '\x2F8F8' = "\x21D0B"
decomposeChar '\x2F8F9' = "\x23AFA"
decomposeChar '\x2F8FA' = "\x6C4E"
decomposeChar '\x2F8FB' = "\x23CBC"
decomposeChar '\x2F8FC' = "\x6CBF"
decomposeChar '\x2F8FD' = "\x6CCD"
decomposeChar '\x2F8FE' = "\x6C67"
decomposeChar '\x2F8FF' = "\x6D16"
decomposeChar '\x2F900' = "\x6D3E"
decomposeChar '\x2F901' = "\x6D77"
decomposeChar '\x2F902' = "\x6D41"
decomposeChar '\x2F903' = "\x6D69"
decomposeChar '\x2F904' = "\x6D78"
decomposeChar '\x2F905' = "\x6D85"
decomposeChar '\x2F906' = "\x23D1E"
decomposeChar '\x2F907' = "\x6D34"
decomposeChar '\x2F908' = "\x6E2F"
decomposeChar '\x2F909' = "\x6E6E"
decomposeChar '\x2F90A' = "\x3D33"
decomposeChar '\x2F90B' = "\x6ECB"
decomposeChar '\x2F90C' = "\x6EC7"
decomposeChar '\x2F90D' = "\x23ED1"
decomposeChar '\x2F90E' = "\x6DF9"
decomposeChar '\x2F90F' = "\x6F6E"
decomposeChar '\x2F910' = "\x23F5E"
decomposeChar '\x2F911' = "\x23F8E"
decomposeChar '\x2F912' = "\x6FC6"
decomposeChar '\x2F913' = "\x7039"
decomposeChar '\x2F914' = "\x701E"
decomposeChar '\x2F915' = "\x701B"
decomposeChar '\x2F916' = "\x3D96"
decomposeChar '\x2F917' = "\x704A"
decomposeChar '\x2F918' = "\x707D"
decomposeChar '\x2F919' = "\x7077"
decomposeChar '\x2F91A' = "\x70AD"
decomposeChar '\x2F91B' = "\x20525"
decomposeChar '\x2F91C' = "\x7145"
decomposeChar '\x2F91D' = "\x24263"
decomposeChar '\x2F91E' = "\x719C"
decomposeChar '\x2F91F' = "\x243AB"
decomposeChar '\x2F920' = "\x7228"
decomposeChar '\x2F921' = "\x7235"
decomposeChar '\x2F922' = "\x7250"
decomposeChar '\x2F923' = "\x24608"
decomposeChar '\x2F924' = "\x7280"
decomposeChar '\x2F925' = "\x7295"
decomposeChar '\x2F926' = "\x24735"
decomposeChar '\x2F927' = "\x24814"
decomposeChar '\x2F928' = "\x737A"
decomposeChar '\x2F929' = "\x738B"
decomposeChar '\x2F92A' = "\x3EAC"
decomposeChar '\x2F92B' = "\x73A5"
decomposeChar '\x2F92C' = "\x3EB8"
decomposeChar '\x2F92D' = "\x3EB8"
decomposeChar '\x2F92E' = "\x7447"
decomposeChar '\x2F92F' = "\x745C"
decomposeChar '\x2F930' = "\x7471"
decomposeChar '\x2F931' = "\x7485"
decomposeChar '\x2F932' = "\x74CA"
decomposeChar '\x2F933' = "\x3F1B"
decomposeChar '\x2F934' = "\x7524"
decomposeChar '\x2F935' = "\x24C36"
decomposeChar '\x2F936' = "\x753E"
decomposeChar '\x2F937' = "\x24C92"
decomposeChar '\x2F938' = "\x7570"
decomposeChar '\x2F939' = "\x2219F"
decomposeChar '\x2F93A' = "\x7610"
decomposeChar '\x2F93B' = "\x24FA1"
decomposeChar '\x2F93C' = "\x24FB8"
decomposeChar '\x2F93D' = "\x25044"
decomposeChar '\x2F93E' = "\x3FFC"
decomposeChar '\x2F93F' = "\x4008"
decomposeChar '\x2F940' = "\x76F4"
decomposeChar '\x2F941' = "\x250F3"
decomposeChar '\x2F942' = "\x250F2"
decomposeChar '\x2F943' = "\x25119"
decomposeChar '\x2F944' = "\x25133"
decomposeChar '\x2F945' = "\x771E"
decomposeChar '\x2F946' = "\x771F"
decomposeChar '\x2F947' = "\x771F"
decomposeChar '\x2F948' = "\x774A"
decomposeChar '\x2F949' = "\x4039"
decomposeChar '\x2F94A' = "\x778B"
decomposeChar '\x2F94B' = "\x4046"
decomposeChar '\x2F94C' = "\x4096"
decomposeChar '\x2F94D' = "\x2541D"
decomposeChar '\x2F94E' = "\x784E"
decomposeChar '\x2F94F' = "\x788C"
decomposeChar '\x2F950' = "\x78CC"
decomposeChar '\x2F951' = "\x40E3"
decomposeChar '\x2F952' = "\x25626"
decomposeChar '\x2F953' = "\x7956"
decomposeChar '\x2F954' = "\x2569A"
decomposeChar '\x2F955' = "\x256C5"
decomposeChar '\x2F956' = "\x798F"
decomposeChar '\x2F957' = "\x79EB"
decomposeChar '\x2F958' = "\x412F"
decomposeChar '\x2F959' = "\x7A40"
decomposeChar '\x2F95A' = "\x7A4A"
decomposeChar '\x2F95B' = "\x7A4F"
decomposeChar '\x2F95C' = "\x2597C"
decomposeChar '\x2F95D' = "\x25AA7"
decomposeChar '\x2F95E' = "\x25AA7"
decomposeChar '\x2F95F' = "\x7AEE"
decomposeChar '\x2F960' = "\x4202"
decomposeChar '\x2F961' = "\x25BAB"
decomposeChar '\x2F962' = "\x7BC6"
decomposeChar '\x2F963' = "\x7BC9"
decomposeChar '\x2F964' = "\x4227"
decomposeChar '\x2F965' = "\x25C80"
decomposeChar '\x2F966' = "\x7CD2"
decomposeChar '\x2F967' = "\x42A0"
decomposeChar '\x2F968' = "\x7CE8"
decomposeChar '\x2F969' = "\x7CE3"
decomposeChar '\x2F96A' = "\x7D00"
decomposeChar '\x2F96B' = "\x25F86"
decomposeChar '\x2F96C' = "\x7D63"
decomposeChar '\x2F96D' = "\x4301"
decomposeChar '\x2F96E' = "\x7DC7"
decomposeChar '\x2F96F' = "\x7E02"
decomposeChar '\x2F970' = "\x7E45"
decomposeChar '\x2F971' = "\x4334"
decomposeChar '\x2F972' = "\x26228"
decomposeChar '\x2F973' = "\x26247"
decomposeChar '\x2F974' = "\x4359"
decomposeChar '\x2F975' = "\x262D9"
decomposeChar '\x2F976' = "\x7F7A"
decomposeChar '\x2F977' = "\x2633E"
decomposeChar '\x2F978' = "\x7F95"
decomposeChar '\x2F979' = "\x7FFA"
decomposeChar '\x2F97A' = "\x8005"
decomposeChar '\x2F97B' = "\x264DA"
decomposeChar '\x2F97C' = "\x26523"
decomposeChar '\x2F97D' = "\x8060"
decomposeChar '\x2F97E' = "\x265A8"
decomposeChar '\x2F97F' = "\x8070"
decomposeChar '\x2F980' = "\x2335F"
decomposeChar '\x2F981' = "\x43D5"
decomposeChar '\x2F982' = "\x80B2"
decomposeChar '\x2F983' = "\x8103"
decomposeChar '\x2F984' = "\x440B"
decomposeChar '\x2F985' = "\x813E"
decomposeChar '\x2F986' = "\x5AB5"
decomposeChar '\x2F987' = "\x267A7"
decomposeChar '\x2F988' = "\x267B5"
decomposeChar '\x2F989' = "\x23393"
decomposeChar '\x2F98A' = "\x2339C"
decomposeChar '\x2F98B' = "\x8201"
decomposeChar '\x2F98C' = "\x8204"
decomposeChar '\x2F98D' = "\x8F9E"
decomposeChar '\x2F98E' = "\x446B"
decomposeChar '\x2F98F' = "\x8291"
decomposeChar '\x2F990' = "\x828B"
decomposeChar '\x2F991' = "\x829D"
decomposeChar '\x2F992' = "\x52B3"
decomposeChar '\x2F993' = "\x82B1"
decomposeChar '\x2F994' = "\x82B3"
decomposeChar '\x2F995' = "\x82BD"
decomposeChar '\x2F996' = "\x82E6"
decomposeChar '\x2F997' = "\x26B3C"
decomposeChar '\x2F998' = "\x82E5"
decomposeChar '\x2F999' = "\x831D"
decomposeChar '\x2F99A' = "\x8363"
decomposeChar '\x2F99B' = "\x83AD"
decomposeChar '\x2F99C' = "\x8323"
decomposeChar '\x2F99D' = "\x83BD"
decomposeChar '\x2F99E' = "\x83E7"
decomposeChar '\x2F99F' = "\x8457"
decomposeChar '\x2F9A0' = "\x8353"
decomposeChar '\x2F9A1' = "\x83CA"
decomposeChar '\x2F9A2' = "\x83CC"
decomposeChar '\x2F9A3' = "\x83DC"
decomposeChar '\x2F9A4' = "\x26C36"
decomposeChar '\x2F9A5' = "\x26D6B"
decomposeChar '\x2F9A6' = "\x26CD5"
decomposeChar '\x2F9A7' = "\x452B"
decomposeChar '\x2F9A8' = "\x84F1"
decomposeChar '\x2F9A9' = "\x84F3"
decomposeChar '\x2F9AA' = "\x8516"
decomposeChar '\x2F9AB' = "\x273CA"
decomposeChar '\x2F9AC' = "\x8564"
decomposeChar '\x2F9AD' = "\x26F2C"
decomposeChar '\x2F9AE' = "\x455D"
decomposeChar '\x2F9AF' = "\x4561"
decomposeChar '\x2F9B0' = "\x26FB1"
decomposeChar '\x2F9B1' = "\x270D2"
decomposeChar '\x2F9B2' = "\x456B"
decomposeChar '\x2F9B3' = "\x8650"
decomposeChar '\x2F9B4' = "\x865C"
decomposeChar '\x2F9B5' = "\x8667"
decomposeChar '\x2F9B6' = "\x8669"
decomposeChar '\x2F9B7' = "\x86A9"
decomposeChar '\x2F9B8' = "\x8688"
decomposeChar '\x2F9B9' = "\x870E"
decomposeChar '\x2F9BA' = "\x86E2"
decomposeChar '\x2F9BB' = "\x8779"
decomposeChar '\x2F9BC' = "\x8728"
decomposeChar '\x2F9BD' = "\x876B"
decomposeChar '\x2F9BE' = "\x8786"
decomposeChar '\x2F9BF' = "\x45D7"
decomposeChar '\x2F9C0' = "\x87E1"
decomposeChar '\x2F9C1' = "\x8801"
decomposeChar '\x2F9C2' = "\x45F9"
decomposeChar '\x2F9C3' = "\x8860"
decomposeChar '\x2F9C4' = "\x8863"
decomposeChar '\x2F9C5' = "\x27667"
decomposeChar '\x2F9C6' = "\x88D7"
decomposeChar '\x2F9C7' = "\x88DE"
decomposeChar '\x2F9C8' = "\x4635"
decomposeChar '\x2F9C9' = "\x88FA"
decomposeChar '\x2F9CA' = "\x34BB"
decomposeChar '\x2F9CB' = "\x278AE"
decomposeChar '\x2F9CC' = "\x27966"
decomposeChar '\x2F9CD' = "\x46BE"
decomposeChar '\x2F9CE' = "\x46C7"
decomposeChar '\x2F9CF' = "\x8AA0"
decomposeChar '\x2F9D0' = "\x8AED"
decomposeChar '\x2F9D1' = "\x8B8A"
decomposeChar '\x2F9D2' = "\x8C55"
decomposeChar '\x2F9D3' = "\x27CA8"
decomposeChar '\x2F9D4' = "\x8CAB"
decomposeChar '\x2F9D5' = "\x8CC1"
decomposeChar '\x2F9D6' = "\x8D1B"
decomposeChar '\x2F9D7' = "\x8D77"
decomposeChar '\x2F9D8' = "\x27F2F"
decomposeChar '\x2F9D9' = "\x20804"
decomposeChar '\x2F9DA' = "\x8DCB"
decomposeChar '\x2F9DB' = "\x8DBC"
decomposeChar '\x2F9DC' = "\x8DF0"
decomposeChar '\x2F9DD' = "\x208DE"
decomposeChar '\x2F9DE' = "\x8ED4"
decomposeChar '\x2F9DF' = "\x8F38"
decomposeChar '\x2F9E0' = "\x285D2"
decomposeChar '\x2F9E1' = "\x285ED"
decomposeChar '\x2F9E2' = "\x9094"
decomposeChar '\x2F9E3' = "\x90F1"
decomposeChar '\x2F9E4' = "\x9111"
decomposeChar '\x2F9E5' = "\x2872E"
decomposeChar '\x2F9E6' = "\x911B"
decomposeChar '\x2F9E7' = "\x9238"
decomposeChar '\x2F9E8' = "\x92D7"
decomposeChar '\x2F9E9' = "\x92D8"
decomposeChar '\x2F9EA' = "\x927C"
decomposeChar '\x2F9EB' = "\x93F9"
decomposeChar '\x2F9EC' = "\x9415"
decomposeChar '\x2F9ED' = "\x28BFA"
decomposeChar '\x2F9EE' = "\x958B"
decomposeChar '\x2F9EF' = "\x4995"
decomposeChar '\x2F9F0' = "\x95B7"
decomposeChar '\x2F9F1' = "\x28D77"
decomposeChar '\x2F9F2' = "\x49E6"
decomposeChar '\x2F9F3' = "\x96C3"
decomposeChar '\x2F9F4' = "\x5DB2"
decomposeChar '\x2F9F5' = "\x9723"
decomposeChar '\x2F9F6' = "\x29145"
decomposeChar '\x2F9F7' = "\x2921A"
decomposeChar '\x2F9F8' = "\x4A6E"
decomposeChar '\x2F9F9' = "\x4A76"
decomposeChar '\x2F9FA' = "\x97E0"
decomposeChar '\x2F9FB' = "\x2940A"
decomposeChar '\x2F9FC' = "\x4AB2"
decomposeChar '\x2F9FD' = "\x29496"
decomposeChar '\x2F9FE' = "\x980B"
decomposeChar '\x2F9FF' = "\x980B"
decomposeChar '\x2FA00' = "\x9829"
decomposeChar '\x2FA01' = "\x295B6"
decomposeChar '\x2FA02' = "\x98E2"
decomposeChar '\x2FA03' = "\x4B33"
decomposeChar '\x2FA04' = "\x9929"
decomposeChar '\x2FA05' = "\x99A7"
decomposeChar '\x2FA06' = "\x99C2"
decomposeChar '\x2FA07' = "\x99FE"
decomposeChar '\x2FA08' = "\x4BCE"
decomposeChar '\x2FA09' = "\x29B30"
decomposeChar '\x2FA0A' = "\x9B12"
decomposeChar '\x2FA0B' = "\x9C40"
decomposeChar '\x2FA0C' = "\x9CFD"
decomposeChar '\x2FA0D' = "\x4CCE"
decomposeChar '\x2FA0E' = "\x4CED"
decomposeChar '\x2FA0F' = "\x9D67"
decomposeChar '\x2FA10' = "\x2A0CE"
decomposeChar '\x2FA11' = "\x4CF8"
decomposeChar '\x2FA12' = "\x2A105"
decomposeChar '\x2FA13' = "\x2A20E"
decomposeChar '\x2FA14' = "\x2A291"
decomposeChar '\x2FA15' = "\x9EBB"
decomposeChar '\x2FA16' = "\x4D56"
decomposeChar '\x2FA17' = "\x9EF9"
decomposeChar '\x2FA18' = "\x9EFE"
decomposeChar '\x2FA19' = "\x9F05"
decomposeChar '\x2FA1A' = "\x9F0F"
decomposeChar '\x2FA1B' = "\x9F16"
decomposeChar '\x2FA1C' = "\x9F3B"
decomposeChar '\x2FA1D' = "\x2A600"
decomposeChar c = Text.singleton c
| ancientlanguage/haskell-analysis | prepare/src/Prepare/Decompose.hs | mit | 79,162 | 0 | 6 | 8,288 | 16,547 | 8,279 | 8,268 | 2,067 | 1 |
isPrime :: Integer -> Bool
isPrime x = ([] == [y | y <- [2..floor (sqrt $ fromIntegral x)], mod x y == 0])
primeSumPairs :: Integer -> [(Integer, Integer, Integer)]
primeSumPairs n =
map makePairSum $ filter isPrimeSum [(x, y)|x <-[1..n], y <- [1..(x-1)]]
where
isPrimeSum (a,b) = isPrime $ a + b
makePairSum (a,b) = (a,b,a+b)
| shouya/thinking-dumps | sicp/chap2/2.40.hs | mit | 343 | 0 | 14 | 74 | 203 | 110 | 93 | 7 | 1 |
module Bot.Component.Combinator (
(+++)
, (>>+)
, combine
) where
import Bot.Component
import Control.Applicative
import Control.Monad.Trans
-- | Combine two generalized process methods into a single process method.
(+++) :: BotMonad b
=> (String -> b ())
-> (String -> b ())
-> String -> b ()
(+++) first second message = first message
>> second message
-- | Combine a list of generalized process methods into a single process method.
combine :: BotMonad b
=> [String -> b ()]
-> String -> b ()
combine methods message = mapM_ ($ message) methods
-- | A convenient combinator for stacking multiple `ComponentPart`s.
(>>+) :: (BotMonad b, BotMonad (d b), Applicative (d b), MonadTrans d)
=> Bot (ComponentPart b)
-> (BotExtractor b -> Bot (ComponentPart (d b)))
-> Bot (ComponentPart (d b))
innerComponent >>+ outerComponentCreator = do
(innerExtractor, innerAction) <- innerComponent
(outerExtractor, outerAction) <- outerComponentCreator innerExtractor
let combinedAction = (*>) <$> lift . innerAction <*> outerAction
return (outerExtractor, combinedAction)
| numberten/zhenya_bot | Bot/Component/Combinator.hs | mit | 1,212 | 0 | 14 | 310 | 354 | 187 | 167 | 26 | 1 |
module Main where
import qualified Tests.Store.LevelDBExternal as LevelDB
import Test.Framework (defaultMain, testGroup)
main :: IO ()
main = defaultMain tests
where
tests = [ testGroup "Tests.Store.LevelDB" LevelDB.externalTests ]
| danstiner/clod | test/ExternalTestSuite.hs | mit | 250 | 0 | 9 | 46 | 61 | 36 | 25 | 6 | 1 |
import Data.Char
ucfirst :: String -> String
ucfirst "" = ""
ucfirst (c:cs) = toUpper c : cs
toUpperCase :: String -> String
toUpperCase = map toUpper
uchead :: String -> Char
uchead = head . (map toUpper) | candu/haskellbook | ch9/dataChar.hs | mit | 208 | 0 | 7 | 40 | 86 | 45 | 41 | 8 | 1 |
module Parser where
import Text.ParserCombinators.Parsec ( Parser, string, many, char, alphaNum
, letter, (<|>), digit, space, many1
, oneOf, skipMany1, parse, sepBy, try
, endBy)
import Text.ParserCombinators.Parsec.Token (identifier)
import System.Environment (getArgs)
import Control.Monad (liftM)
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal
| Number Integer
| String String
| Bool Bool
instance Show LispVal where
show (Atom s) = "Atom " ++ s
show (List xs) = "(" ++ unwords (map show xs) ++ ")"
show (DottedList xs x) = "Dotted list " ++ show xs ++ " . " ++ show x
show (Number x) = show x
show (String s) = "\"" ++ s ++ "\""
show (Bool b) = show b
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
escapedQuote :: Parser Char
escapedQuote = char '\\' >> char '"'
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (alphaNum <|> space <|> symbol <|> escapedQuote)
char '"'
return $ String x
{- An attom is a letter or symbol, followed by any number of letters,-}
{- digits, or symbols. -}
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first : rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) (many1 digit)
parseList :: Parser LispVal
parseList = liftM List $ sepBy parseExpr spaces
parseDottedList :: Parser LispVal
parseDottedList = do
head <- endBy parseExpr spaces
tail <- char '.' >> spaces >> parseExpr
return $ DottedList head tail
parseQuoted :: Parser LispVal
parseQuoted = do
char '\''
x <- parseExpr
return $ List [Atom "quote", x]
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber
<|> parseQuoted
<|> (char '(' >> (try parseList <|> parseDottedList)
>>= \x -> char ')'
>> return x)
readExpr :: String -> LispVal
readExpr input = case parse parseExpr "lisp" input of
Left err -> String $ "No match: " ++ show err
Right val -> val
| ayberkt/Eva | src/Parser.hs | mit | 2,398 | 0 | 12 | 704 | 781 | 396 | 385 | 68 | 3 |
-- @Prefix.hs
{-# LANGUAGE TemplateHaskell #-}
module Prefix where
import Data.Text.Lazy (Text)
import Database.Persist.TH
data Prefix = Prefix {pre :: Text, count :: Int}
deriving (Show, Read, Eq)
derivePersistField "Prefix" | cirquit/mchain-haskell | src/mc-db/Prefix.hs | mit | 229 | 0 | 8 | 33 | 65 | 39 | 26 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html
module Stratosphere.Resources.OpsWorksCMServer where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.OpsWorksCMServerEngineAttribute
-- | Full data type definition for OpsWorksCMServer. See 'opsWorksCMServer'
-- for a more convenient constructor.
data OpsWorksCMServer =
OpsWorksCMServer
{ _opsWorksCMServerAssociatePublicIpAddress :: Maybe (Val Bool)
, _opsWorksCMServerBackupId :: Maybe (Val Text)
, _opsWorksCMServerBackupRetentionCount :: Maybe (Val Integer)
, _opsWorksCMServerDisableAutomatedBackup :: Maybe (Val Bool)
, _opsWorksCMServerEngine :: Maybe (Val Text)
, _opsWorksCMServerEngineAttributes :: Maybe [OpsWorksCMServerEngineAttribute]
, _opsWorksCMServerEngineModel :: Maybe (Val Text)
, _opsWorksCMServerEngineVersion :: Maybe (Val Text)
, _opsWorksCMServerInstanceProfileArn :: Val Text
, _opsWorksCMServerInstanceType :: Val Text
, _opsWorksCMServerKeyPair :: Maybe (Val Text)
, _opsWorksCMServerPreferredBackupWindow :: Maybe (Val Text)
, _opsWorksCMServerPreferredMaintenanceWindow :: Maybe (Val Text)
, _opsWorksCMServerSecurityGroupIds :: Maybe (ValList Text)
, _opsWorksCMServerServerName :: Maybe (Val Text)
, _opsWorksCMServerServiceRoleArn :: Val Text
, _opsWorksCMServerSubnetIds :: Maybe (ValList Text)
} deriving (Show, Eq)
instance ToResourceProperties OpsWorksCMServer where
toResourceProperties OpsWorksCMServer{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::OpsWorksCM::Server"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AssociatePublicIpAddress",) . toJSON) _opsWorksCMServerAssociatePublicIpAddress
, fmap (("BackupId",) . toJSON) _opsWorksCMServerBackupId
, fmap (("BackupRetentionCount",) . toJSON) _opsWorksCMServerBackupRetentionCount
, fmap (("DisableAutomatedBackup",) . toJSON) _opsWorksCMServerDisableAutomatedBackup
, fmap (("Engine",) . toJSON) _opsWorksCMServerEngine
, fmap (("EngineAttributes",) . toJSON) _opsWorksCMServerEngineAttributes
, fmap (("EngineModel",) . toJSON) _opsWorksCMServerEngineModel
, fmap (("EngineVersion",) . toJSON) _opsWorksCMServerEngineVersion
, (Just . ("InstanceProfileArn",) . toJSON) _opsWorksCMServerInstanceProfileArn
, (Just . ("InstanceType",) . toJSON) _opsWorksCMServerInstanceType
, fmap (("KeyPair",) . toJSON) _opsWorksCMServerKeyPair
, fmap (("PreferredBackupWindow",) . toJSON) _opsWorksCMServerPreferredBackupWindow
, fmap (("PreferredMaintenanceWindow",) . toJSON) _opsWorksCMServerPreferredMaintenanceWindow
, fmap (("SecurityGroupIds",) . toJSON) _opsWorksCMServerSecurityGroupIds
, fmap (("ServerName",) . toJSON) _opsWorksCMServerServerName
, (Just . ("ServiceRoleArn",) . toJSON) _opsWorksCMServerServiceRoleArn
, fmap (("SubnetIds",) . toJSON) _opsWorksCMServerSubnetIds
]
}
-- | Constructor for 'OpsWorksCMServer' containing required fields as
-- arguments.
opsWorksCMServer
:: Val Text -- ^ 'owcmsInstanceProfileArn'
-> Val Text -- ^ 'owcmsInstanceType'
-> Val Text -- ^ 'owcmsServiceRoleArn'
-> OpsWorksCMServer
opsWorksCMServer instanceProfileArnarg instanceTypearg serviceRoleArnarg =
OpsWorksCMServer
{ _opsWorksCMServerAssociatePublicIpAddress = Nothing
, _opsWorksCMServerBackupId = Nothing
, _opsWorksCMServerBackupRetentionCount = Nothing
, _opsWorksCMServerDisableAutomatedBackup = Nothing
, _opsWorksCMServerEngine = Nothing
, _opsWorksCMServerEngineAttributes = Nothing
, _opsWorksCMServerEngineModel = Nothing
, _opsWorksCMServerEngineVersion = Nothing
, _opsWorksCMServerInstanceProfileArn = instanceProfileArnarg
, _opsWorksCMServerInstanceType = instanceTypearg
, _opsWorksCMServerKeyPair = Nothing
, _opsWorksCMServerPreferredBackupWindow = Nothing
, _opsWorksCMServerPreferredMaintenanceWindow = Nothing
, _opsWorksCMServerSecurityGroupIds = Nothing
, _opsWorksCMServerServerName = Nothing
, _opsWorksCMServerServiceRoleArn = serviceRoleArnarg
, _opsWorksCMServerSubnetIds = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-associatepublicipaddress
owcmsAssociatePublicIpAddress :: Lens' OpsWorksCMServer (Maybe (Val Bool))
owcmsAssociatePublicIpAddress = lens _opsWorksCMServerAssociatePublicIpAddress (\s a -> s { _opsWorksCMServerAssociatePublicIpAddress = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-backupid
owcmsBackupId :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsBackupId = lens _opsWorksCMServerBackupId (\s a -> s { _opsWorksCMServerBackupId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-backupretentioncount
owcmsBackupRetentionCount :: Lens' OpsWorksCMServer (Maybe (Val Integer))
owcmsBackupRetentionCount = lens _opsWorksCMServerBackupRetentionCount (\s a -> s { _opsWorksCMServerBackupRetentionCount = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-disableautomatedbackup
owcmsDisableAutomatedBackup :: Lens' OpsWorksCMServer (Maybe (Val Bool))
owcmsDisableAutomatedBackup = lens _opsWorksCMServerDisableAutomatedBackup (\s a -> s { _opsWorksCMServerDisableAutomatedBackup = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-engine
owcmsEngine :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsEngine = lens _opsWorksCMServerEngine (\s a -> s { _opsWorksCMServerEngine = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-engineattributes
owcmsEngineAttributes :: Lens' OpsWorksCMServer (Maybe [OpsWorksCMServerEngineAttribute])
owcmsEngineAttributes = lens _opsWorksCMServerEngineAttributes (\s a -> s { _opsWorksCMServerEngineAttributes = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-enginemodel
owcmsEngineModel :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsEngineModel = lens _opsWorksCMServerEngineModel (\s a -> s { _opsWorksCMServerEngineModel = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-engineversion
owcmsEngineVersion :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsEngineVersion = lens _opsWorksCMServerEngineVersion (\s a -> s { _opsWorksCMServerEngineVersion = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-instanceprofilearn
owcmsInstanceProfileArn :: Lens' OpsWorksCMServer (Val Text)
owcmsInstanceProfileArn = lens _opsWorksCMServerInstanceProfileArn (\s a -> s { _opsWorksCMServerInstanceProfileArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-instancetype
owcmsInstanceType :: Lens' OpsWorksCMServer (Val Text)
owcmsInstanceType = lens _opsWorksCMServerInstanceType (\s a -> s { _opsWorksCMServerInstanceType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-keypair
owcmsKeyPair :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsKeyPair = lens _opsWorksCMServerKeyPair (\s a -> s { _opsWorksCMServerKeyPair = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-preferredbackupwindow
owcmsPreferredBackupWindow :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsPreferredBackupWindow = lens _opsWorksCMServerPreferredBackupWindow (\s a -> s { _opsWorksCMServerPreferredBackupWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-preferredmaintenancewindow
owcmsPreferredMaintenanceWindow :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsPreferredMaintenanceWindow = lens _opsWorksCMServerPreferredMaintenanceWindow (\s a -> s { _opsWorksCMServerPreferredMaintenanceWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-securitygroupids
owcmsSecurityGroupIds :: Lens' OpsWorksCMServer (Maybe (ValList Text))
owcmsSecurityGroupIds = lens _opsWorksCMServerSecurityGroupIds (\s a -> s { _opsWorksCMServerSecurityGroupIds = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-servername
owcmsServerName :: Lens' OpsWorksCMServer (Maybe (Val Text))
owcmsServerName = lens _opsWorksCMServerServerName (\s a -> s { _opsWorksCMServerServerName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-servicerolearn
owcmsServiceRoleArn :: Lens' OpsWorksCMServer (Val Text)
owcmsServiceRoleArn = lens _opsWorksCMServerServiceRoleArn (\s a -> s { _opsWorksCMServerServiceRoleArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-opsworkscm-server.html#cfn-opsworkscm-server-subnetids
owcmsSubnetIds :: Lens' OpsWorksCMServer (Maybe (ValList Text))
owcmsSubnetIds = lens _opsWorksCMServerSubnetIds (\s a -> s { _opsWorksCMServerSubnetIds = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/OpsWorksCMServer.hs | mit | 9,833 | 0 | 15 | 1,044 | 1,644 | 927 | 717 | 108 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
-- | Controlling execution
module Control.Biegunka.Execute.Settings
( Executor
, runExecutor
, forkExecutor
-- * Executor environment
, Execution
, HasExecution(..)
-- * Initialize 'Execution'
, withExecution
-- * Auxiliary types
, Work(..)
) where
import Control.Exception (bracket)
import Control.Concurrent (forkFinally)
import Control.Concurrent.STM.TVar (TVar, newTVarIO)
import Control.Lens
import Control.Monad.Reader (ReaderT, runReaderT, ask)
import Control.Monad.IO.Class (liftIO)
import Data.List.NonEmpty (NonEmpty)
import Data.Meep (Meep)
import qualified Data.Meep as Meep
import Data.Set (Set)
import qualified Data.Set as Set
import Prelude hiding (lookup, null)
import qualified System.IO.Temp as IO
import qualified System.Posix as Posix
import Control.Biegunka.Logger (HasLogger(logger))
import Control.Biegunka.Execute.Watcher (Watcher)
import qualified Control.Biegunka.Execute.Watcher as Watcher
import Control.Biegunka.Settings (HasSettings(settings), Settings)
-- | Convenient type alias for task-local-state-ful IO
-- tagged with crosstask execution environment @s@
type Executor a = ReaderT Execution IO a
runExecutor :: Execution -> Executor a -> IO a
runExecutor = flip runReaderT
forkExecutor :: Executor a -> Executor ()
forkExecutor io = do
e <- ask
Watcher.register (view watch e)
liftIO (forkFinally (runExecutor e io)
(\_ -> Watcher.unregister (view watch e)))
return ()
-- | Multithread accessable parts
data Execution = Execution
{ _watch :: Watcher
, _user :: TVar (Meep Posix.CUid Int) -- ^ Current user id and sessions counter
, _repos :: TVar (Set String) -- ^ Already updated repositories
, _activeSource
:: TVar (Maybe (NonEmpty String))
, _settings :: Settings
, _onlyDiff :: Bool
, _tempDir :: FilePath
}
-- | Workload
data Work =
Do (IO ()) -- ^ Task to come
| Stop -- ^ Task is done
class HasExecution t where
execution :: Lens' t Execution
watch :: Lens' t Watcher
watch = execution . \f x -> f (_watch x) <&> \y -> x { _watch = y }
{-# INLINE watch #-}
user :: Lens' t (TVar (Meep Posix.CUid Int))
user = execution . \f x -> f (_user x) <&> \y -> x { _user = y }
{-# INLINE user #-}
repos :: Lens' t (TVar (Set String))
repos = execution . \f x -> f (_repos x) <&> \y -> x { _repos = y }
{-# INLINE repos #-}
activeSource :: Lens' t (TVar (Maybe (NonEmpty String)))
activeSource = execution . \f x -> f (_activeSource x) <&> \y -> x { _activeSource = y }
{-# INLINE onlyDiff #-}
onlyDiff :: Lens' t Bool
onlyDiff = execution . \f x -> f (_onlyDiff x) <&> \y -> x { _onlyDiff = y }
{-# INLINE activeSource #-}
tempDir :: Lens' t FilePath
tempDir = execution . \f x -> f (_tempDir x) <&> \y -> x { _tempDir = y }
{-# INLINE tempDir #-}
instance HasExecution Execution where
execution = id
{-# INLINE execution #-}
instance HasSettings Execution where
settings f x = f (_settings x) <&> \y -> x { _settings = y }
{-# INLINE settings #-}
instance HasLogger Applicative Execution where
logger = settings.logger
{-# INLINE logger #-}
-- | Set up an 'Execution' to be used by 'Executor'.
withExecution :: Settings -> (Execution -> IO a) -> IO a
withExecution s f =
IO.withSystemTempDirectory "biegunka" $ \dir ->
bracket Watcher.new Watcher.wait $ \watcher -> do
e <-
Execution watcher <$> newTVarIO Meep.empty
<*> newTVarIO Set.empty
<*> newTVarIO Nothing
<*> pure s
<*> pure False
<*> pure dir
f e
| biegunka/biegunka | src/Control/Biegunka/Execute/Settings.hs | mit | 3,855 | 0 | 19 | 1,023 | 1,091 | 611 | 480 | 90 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-identitypool-cognitostreams.html
module Stratosphere.ResourceProperties.CognitoIdentityPoolCognitoStreams where
import Stratosphere.ResourceImports
-- | Full data type definition for CognitoIdentityPoolCognitoStreams. See
-- 'cognitoIdentityPoolCognitoStreams' for a more convenient constructor.
data CognitoIdentityPoolCognitoStreams =
CognitoIdentityPoolCognitoStreams
{ _cognitoIdentityPoolCognitoStreamsRoleArn :: Maybe (Val Text)
, _cognitoIdentityPoolCognitoStreamsStreamName :: Maybe (Val Text)
, _cognitoIdentityPoolCognitoStreamsStreamingStatus :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON CognitoIdentityPoolCognitoStreams where
toJSON CognitoIdentityPoolCognitoStreams{..} =
object $
catMaybes
[ fmap (("RoleArn",) . toJSON) _cognitoIdentityPoolCognitoStreamsRoleArn
, fmap (("StreamName",) . toJSON) _cognitoIdentityPoolCognitoStreamsStreamName
, fmap (("StreamingStatus",) . toJSON) _cognitoIdentityPoolCognitoStreamsStreamingStatus
]
-- | Constructor for 'CognitoIdentityPoolCognitoStreams' containing required
-- fields as arguments.
cognitoIdentityPoolCognitoStreams
:: CognitoIdentityPoolCognitoStreams
cognitoIdentityPoolCognitoStreams =
CognitoIdentityPoolCognitoStreams
{ _cognitoIdentityPoolCognitoStreamsRoleArn = Nothing
, _cognitoIdentityPoolCognitoStreamsStreamName = Nothing
, _cognitoIdentityPoolCognitoStreamsStreamingStatus = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-identitypool-cognitostreams.html#cfn-cognito-identitypool-cognitostreams-rolearn
cipcsRoleArn :: Lens' CognitoIdentityPoolCognitoStreams (Maybe (Val Text))
cipcsRoleArn = lens _cognitoIdentityPoolCognitoStreamsRoleArn (\s a -> s { _cognitoIdentityPoolCognitoStreamsRoleArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-identitypool-cognitostreams.html#cfn-cognito-identitypool-cognitostreams-streamname
cipcsStreamName :: Lens' CognitoIdentityPoolCognitoStreams (Maybe (Val Text))
cipcsStreamName = lens _cognitoIdentityPoolCognitoStreamsStreamName (\s a -> s { _cognitoIdentityPoolCognitoStreamsStreamName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cognito-identitypool-cognitostreams.html#cfn-cognito-identitypool-cognitostreams-streamingstatus
cipcsStreamingStatus :: Lens' CognitoIdentityPoolCognitoStreams (Maybe (Val Text))
cipcsStreamingStatus = lens _cognitoIdentityPoolCognitoStreamsStreamingStatus (\s a -> s { _cognitoIdentityPoolCognitoStreamsStreamingStatus = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/CognitoIdentityPoolCognitoStreams.hs | mit | 2,819 | 0 | 12 | 253 | 355 | 202 | 153 | 32 | 1 |
-----------------------------------------------------------------------------
--
-- Module : WAM.Compile
-- Copyright :
-- License : GPL Nothing
--
-- Maintainer : Angelos Charalambidis <a.charalambidis@di.uoa.gr>
-- Stability : Experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module WAM.Compile (
wamCompileProg
, wamCompileGoal
) where
import Prolog
import WAM
import System.IO
import Data.List (nub, delete, (\\))
import Data.Maybe (fromJust)
import Debug.Trace
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Identity
type WamCompile a = ReaderT WamCompEnv (StateT WamCompState Identity) a
type WamSymbolTable = [(VarId,WamRegister)]
data WamCompEnv = WamCompEnv
{ perms' :: [VarId] -- ^ permanent variables
}
data WamCompState = WamCompState
{ symbolTbl :: WamSymbolTable -- ^ mapping between clause variables and wamregisters
, unsafe' :: [VarId] -- ^ unsafe variables
}
-- | Returns the permanent variables from a clause
-- | permanent variables are computed as described below :
-- | those that exist in more that one body literals
-- | assuming that the head literal and the first body literal are considered as one
-- | body literal.
perms :: Clause -> [VarId]
perms (t, ts) =
let varsHead = varsTerm t
varsBody = map varsTerm ts
lst = case varsBody of
[] -> [varsHead]
(v:vs) -> (nub (varsHead ++ v)):vs
aux [] = []
aux (l:ls) = (filter (`elem` l') l) ++ aux ls
where l' = concat ls
in nub $ aux lst
-- | Returns the "safe" variables
-- | safe variables are computed as described below
-- | - the variables in the head literal is safe by default
-- | - variables in a compound literal (inside term structures) are safe
-- | - variables that do not occur only in the last literal of the body.
safe :: Clause -> [VarId]
safe (h, []) = varsTerm h
safe (h, b) = nub $ varsTerm h ++ varsNotL ++ varsInCompound
where b' = init b
l = last b
vl = varsTerm l
varsNotL = varsClause (h,b) \\ vl
varsInCompound = concat $ inCompound $ concatMap args (h:b)
inCompound [] = []
inCompound (t:ts) =
case t of
T (s,args) ->
nub $ map varsTerm args ++ inCompound ts
V _ ->
inCompound ts
-- | Returns the "unsafe" variables
unsafe :: Clause -> [VarId]
unsafe c@(h,b) = (varsClause c) \\ safe c
unWrapVar (Temp i) = i
unWrapVar (Perm i) = i
isPerm (Perm _) = True
isPerm _ = False
isTemp v = not (isPerm v)
newVar r n v = do
pe <- asks perms'
p <- if v `elem` pe then
newPerm
else
newTemp r n
tbl <- gets symbolTbl
let tbl' = (v,p):tbl
modify (\st -> st { symbolTbl = tbl' })
return p
newPerm = do
tbl <- gets symbolTbl
let p = map unWrapVar $ filter isPerm $ map snd tbl
let n = case p of
[] -> 0
_ -> maximum p
return $ Perm (n + 1)
newTemp r m = do
tbl <- gets symbolTbl
let t = filter isTemp $ map snd tbl
let p = map unWrapVar $ t ++ r
let n = case p of
[] -> m
_ -> max m (maximum p)
return $ Temp (n + 1)
-- | Converts a term into a label. eg p(a,b,c) to p/3
termToLabel :: Term -> WamLabel
termToLabel (T (s, args)) = (s, length args)
termToLabel _ = error "cannot convert to wamlabel"
-- WAM Compilation
-- | Compiles a literal, either a head literal or a body literal
wamCompileLit :: Bool -- ^ h is a bool - if true then compilation is a "get" else is a "put" mode
-> [Term] -- ^ a list of literals to compile
-> [WamRegister] -- ^ a list of wam registers to assign to literals (one register for one literal)
-> Int -- ^ a maximum integer used to assign new variables
-> WamCompile WamInstrSeq -- ^ the output sequence of wam instructions
wamCompileLit h [] _ _ = return []
wamCompileLit h (t:ts) (r:rs) n =
let
opValue = if h then GetValue else PutValue
opConstant = if h then GetConstant else PutConstant
opStructure = if h then GetStructure else PutStructure
opVariable = if h then GetVariable else PutVariable
in case t of
T (s, []) -> do
rest <- wamCompileLit h ts rs n
return $ (opConstant s, [r]) : rest
T (_, args) -> do
str <- case r of
Temp i -> do
if i > n -- not an argument Temp 1...Temp n is reserved for procedural calls
then return (GetStructure (termToLabel t), [r])
else return (opStructure (termToLabel t), [r])
_ -> do
return (opStructure (termToLabel t), [r])
rest <- wamCompileTerm h args ts rs n
return (str:rest)
V v -> do
tbl <- gets symbolTbl
case lookup v tbl of
Just z -> do
u <- gets unsafe'
if v `elem` u
then do
modify (\st -> st{unsafe' = (delete v u)})
rest <- wamCompileLit h ts rs n
return $ (PutUnsafeValue, [z,r]) : rest
else do
rest <- wamCompileLit h ts rs n
return $ (opValue, [z,r]) : rest
Nothing -> do
z <- newVar (r:rs) n v
rest <- wamCompileLit h ts rs n
return $ (opVariable, [z,r]) : rest
-- | Compiles head literals
wamCompileHeadLit ts n = wamCompileLit True ts xs n
where n' = length ts
xs = map Temp $ reverse [1..n']
-- | Compiles a goal literal
wamCompileGoalLit ts = wamCompileLit False ts xs n
where n = length ts
xs = map Temp $ reverse [1..n]
-- | Compiles a term
wamCompileTerm :: Bool -- ^ h is a bool - if true then compilation is a "get" else is a "put" mode
-> [Term] -- ^ a list of terms to compile
-> [Term] -- ^ a list of literals to continue compilation after the compilation of the first argument
-> [WamRegister] -- ^ a list of wam registers to assign to literals (one register for one literal)
-> Int -- ^ a minimum lower bound integer used to assign new variables
-> WamCompile WamInstrSeq -- ^ the output sequence of wam instructions
wamCompileTerm h [] ts rs n = wamCompileLit h ts rs n
wamCompileTerm h (a:as) ts rs n =
case a of
T (s,[]) -> do
rest <- wamCompileTerm h as ts rs n
return $ (UnifyConstant s, []) : rest
T (s, args) -> do
r' <- newTemp rs n
rest <- wamCompileTerm h as (a:ts) (r':rs) n
return $ (UnifyVariable, [r']) : rest
V v -> do
tbl <- gets symbolTbl
case lookup v tbl of
Just z -> do
rest <- wamCompileTerm h as ts rs n
return $ (UnifyValue, [z]) : rest
Nothing -> do
z <- newVar rs n v
rest <- wamCompileTerm h as ts rs n
return $ (UnifyVariable, [z]):rest
-- | Compiles the body consisted of many body literals
wamCompileBody [] _ = return [(Proceed, [])]
wamCompileBody [g] e =
let c' = if e
then (Deallocate,[]):c
else c
where c = [(Execute (termToLabel g), [])]
in do
cc <- wamCompileGoalLit (args g)
return $ cc ++ c'
wamCompileBody (g:gs) e = do
c <- wamCompileGoalLit (args g)
let c' = c ++ [(Call (termToLabel g), [])]
modify (\st -> st{symbolTbl = filter (isPerm.snd) (symbolTbl st)})
cc <- wamCompileBody gs e
return (c' ++ cc)
-- | Compiles a clause
wamCompileClause cl@(h,b) =
let
-- n registers are reserved for arguments of first literal in body (?)
n = if isFact
then 0
else length (args (head b))
isFact = length b < 1
notSingleton = length b > 1
headArgs = args h
permans = perms cl
unsafes = unsafe cl
in do
local (\r -> r{ perms' = permans }) $ do
modify (\st -> st { symbolTbl = []
, unsafe' = unsafes
})
g <- wamCompileHeadLit headArgs n
let g'= if notSingleton then (Allocate (length permans), []):g else g
gb <- wamCompileBody b notSingleton
return $ g' ++ gb
-- wamCompileGoal
-- | Compiles the many alternative clauses of a predicate
wamCompileAlters (l:ls) i = do
c <- wamCompileClause l
case ls of
[] -> do
return $ (TrustMe,[]):c
_ -> do
let j = i + length c + 1
let c' = (RetryMeElse j,[]):c
alters <- wamCompileAlters ls j
return $ c' ++ alters
-- | Compiles a whole predicate consisting of none or many alternatives
wamCompilePredicate [] i = return [(Backtrack, [])]
wamCompilePredicate [d] i = wamCompileClause d
wamCompilePredicate (d:ds) i = do
c <- wamCompileClause d
let j = i + length c + 1
let c' = (TryMeElse j, []):c
alters <- wamCompileAlters ds j
return $ c' ++ alters
-- | Compiles the definitions of the predicates
wamCompileDefs :: [WamLabel] -- ^ list of predicate names to compile
-> [Clause] -- ^ clauses of program
-> Int -- ^ offset to start
-> WamCompile [WamInstrSeq] -- ^ returns a list of instruction sequence, one for each predicate
wamCompileDefs [] p i = return []
wamCompileDefs (q:qs) p i = do
c <- wamCompilePredicate (defs p q) i
let j = i + length c
defs <- wamCompileDefs qs p j
return $ c : defs
wamCompile m = runIdentity (evalStateT (runReaderT m emptyEnv) emptyState)
where emptyState = WamCompState { symbolTbl = [], unsafe' = [] }
emptyEnv = WamCompEnv { perms' = [] }
-- | Compiles a logic program consisting of many definitions
wamCompileProg :: [Clause]
-> WamProgram
wamCompileProg p =
let ps = preds p
i = 1
cs = wamCompile (wamCompileDefs ps p i)
in mkDB $ zip ps cs
wamCompileGoal :: Goal
-> Int
-> WamGoal
wamCompileGoal g i =
let g' = (T ("?", vg'), g)
vg' = map V $ vg
vg = varsGoal g
in (reverse vg, wamCompile (wamCompilePredicate [g'] i))
| acharal/wam | src/WAM/Compile.hs | gpl-2.0 | 11,042 | 3 | 26 | 4,035 | 3,198 | 1,653 | 1,545 | 234 | 11 |
import Data.Data
import Language.C.Pretty
import Language.C.Syntax.AST
import Language.C.Syntax.Constants
import Language.C.Data.Node
import Language.C.Parser (parseC)
import Data.Generics
import Data.ByteString.Char8 (pack)
import Language.C.Data.Position (position)
import Control.Monad.State
count :: CStat -> Int
count (CIf _ _ _ _) = 1
count _ = 0
query :: CTranslUnit -> Int
query = everything (+) (mkQ 0 count)
rewriteReturn :: CStat -> State Int CStat
rewriteReturn (CReturn (Just (CConst _ )) _) = do
idx <- get
put (idx + 1)
return (CReturn (Just (CConst (CIntConst (cInteger (fromIntegral idx)) undefNode))) undefNode)
rewriteReturn x = return x
rewrite :: CTranslUnit -> CTranslUnit
rewrite ast = fst $ runState (everywhereM (mkM rewriteReturn) ast) 0
remove x = everywhere (mkT removeDecl) x
removeDecl :: CStat -> CStat
removeDecl (CCompound x items y) = CCompound x (concatMap tr items) y
removeDecl x = x
tr :: CBlockItem -> [CBlockItem]
tr (CBlockDecl _) = []
tr x = [x]
parse :: String -> CTranslUnit
parse code = case parseC code' pos of
Left e -> error $ show e
Right ast -> ast
where
code' = pack code
pos = position 0 "<<test>>" 0 0
code = parse "int x() {if (1) {int i; if(2) { foo: {int i2;} return 9;} return 23;}} int y() {if (1) {return 42;}}"
main = do
print $ query code
print "<<<<<<<<<<<<<<<<<<<<<<<<"
print $ pretty $ rewrite code
print "<<<<<<<<<<<<<<<<<<<<<<<<"
print $ pretty $ remove code
| copton/ocram | try/syb/SybTest.hs | gpl-2.0 | 1,496 | 0 | 19 | 302 | 554 | 282 | 272 | 43 | 2 |
module Domains where
import Domains.Arithmetic
import Domains.PropLogic
import Domains.English
| arnizamani/aiw | Domains.hs | gpl-2.0 | 96 | 0 | 4 | 10 | 19 | 12 | 7 | 4 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Data.Maybe
import Data.Attoparsec.ByteString.Char8
import qualified Data.ByteString.Char8 as BS
import Control.Applicative
allDigits = skipWhile notSigned *> many (signed decimal <* skipWhile notSigned)
where
notSigned c = (c < '0' || c > '9') && c /= '-' && c /= '+'
main = do
input <- BS.getContents
print $ sum <$> parseOnly allDigits input
| lorem-ipsum/adventofcode2015 | day12.hs | gpl-2.0 | 402 | 0 | 12 | 74 | 127 | 67 | 60 | 10 | 1 |
import qualified Data.List as L
main = do
putStrLn.(!! 999999) . L.sort . L.permutations $ "0123456789"
| NaevaTheCat/Project-Euler-Haskell | P24.hs | gpl-2.0 | 109 | 1 | 11 | 21 | 41 | 23 | 18 | 3 | 1 |
module Robatira.Model.Cards
where
import Data.Char (chr)
data Suit = Spades | Hearts | Diamonds | Clubs deriving (Show, Eq, Enum)
data Rank = Ace | Two | Three | Four | Five | Six | Seven | Eight | Nine |
Ten | Jack | Queen | King deriving (Show, Eq, Enum)
data Joker = RedJoker | BlackJoker | WhiteJoker deriving (Show, Eq, Enum)
data Card = Regular Suit Rank | Other Joker deriving (Eq)
instance Show Card where
show card = [chr $ cardToUnicode card]
-- Convert a playing card to its Unicode representation
cardToUnicode :: Card -> Int
cardToUnicode (Regular Spades rank) = 0x1F0A0 + (rankToHex rank)
cardToUnicode (Regular Hearts rank) = 0x1F0B0 + (rankToHex rank)
cardToUnicode (Regular Diamonds rank) = 0x1F0C0 + (rankToHex rank)
cardToUnicode (Regular Clubs rank) = 0x1F0D0 + (rankToHex rank)
cardToUnicode (Other RedJoker) = 0x1F0BF
cardToUnicode (Other BlackJoker) = 0x1F0CF
cardToUnicode (Other WhiteJoker) = 0x1F0DF
-- Need to treat King and Queen explicitly as Unicode defines an extra
-- rank "C" at 0xC
rankToHex :: Rank -> Int
rankToHex King = 0xE
rankToHex Queen = 0xD
rankToHex rank = 0xE - (length (enumFrom rank))
-- Useful for representing the cards of other players, which should be
-- countable, but not open
backOfCardUnicode :: Int
backOfCardUnicode = 0x1F0A0
allSuits = [Spades ..]
allRanks = [Ace ..]
allJokers = [RedJoker ..]
frenchStandard52Plus3Jokers :: [Card]
frenchStandard52Plus3Jokers = [Regular c f | c <- allSuits, f <- allRanks]
++ [Other j | j <- allJokers]
| pbrandwijk/robatira | src/Robatira/Model/Cards.hs | gpl-3.0 | 1,543 | 0 | 9 | 295 | 484 | 268 | 216 | 29 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Gis.Saga.Doc (renderTable, renderDot, renderNodes)
where
import qualified Data.Map as M
import Gis.Saga.Types
import Data.List (intercalate)
import Text.Printf (printf)
class TableView a where renderTable :: a -> String
instance TableView SagaIoCmdDB where
renderTable db =
"Command (cmdPar,sagaPar,default) sagaLib sagaModule defaultSuffix\n" ++
(unlines . map renderTable . M.toList $ db)
instance TableView (String, SagaIoCmdExt) where
renderTable = renderTableSagaIoCmd
renderTableSagaIoCmd :: (String, SagaIoCmdExt) -> String
renderTableSagaIoCmd (cmdName, (cmd, ext)) =
let SagaCmd {sLib = lib, sMod = mod, sParas = ps } = cmd "" ""
in unwords [cmdName, renderTable ps, lib, mod, ext]
instance TableView ParaMap where
renderTable pm
| M.size pm == 0 = "NA"
| otherwise = intercalate ":" (map renderTable . M.toList $ pm)
instance TableView (String, (String,String)) where
renderTable (cmdArg, (sArg,def)) =
"(" ++ intercalate "," [cmdArg,sArg,def] ++ ")"
class DotGraphics a where renderDot :: a -> String
instance DotGraphics (SagaIoCmdDB,NodeMap) where
renderDot (cmds,chains) = unlines [
"digraph chains {"
," graph [rankdir = LR];"
," node [shape = ellipse, fontsize = 8];"
,""
,unlines . map renderDot . M.toList $ cmds -- implemented modules
,renderDot chains -- implemented chains
,"}"
]
instance DotGraphics (String, SagaIoCmdExt) where
renderDot = renderDotSagaIoCmd
renderDotSagaIoCmd :: (String, SagaIoCmdExt) -> String
renderDotSagaIoCmd (cmdName, (cmd,ext)) =
let SagaCmd {sLib = lib, sMod = mod, sParas = ps } = cmd "" ""
in printf " %s [shape = record, label = \"%s|%s|%s|%s %s\"];"
cmdName cmdName (renderDot ps) ext lib mod
renderDotParaMap :: ParaMap -> String
renderDotParaMap pm = "{" ++ ss ++ "}"
where
ps = M.toList pm
cmdArgs = intercalate "\\n" (map fst ps)
sArgs = intercalate "\\n" (map (fst . snd) ps)
defs = intercalate "\\n" (map (snd . snd) ps)
ss = intercalate "|" [cmdArgs,sArgs,defs]
instance DotGraphics ParaMap where
renderDot = renderDotParaMap
instance DotGraphics NodeMap where
renderDot = unlines . map renderDot . M.toList
instance DotGraphics (String, ([String],[String])) where
renderDot (name, (ins, outs)) = unlines $ map unlines [
map (`edge` name) ins
,map (name `edge`) outs
]
edge :: String -> String -> String
edge = printf " \"%s\" -> \"%s\";"
class NodeView a where renderNodes :: a -> String
instance NodeView NodeMap where
renderNodes = unlines . map renderNodes . M.toList
instance NodeView (String, ([String], [String])) where
renderNodes (name, (ins, outs)) = name ++ ": "++ show ins ++ show outs
| michelk/bindings-saga-cmd.hs | src/Gis/Saga/Doc.hs | gpl-3.0 | 2,825 | 0 | 11 | 611 | 927 | 509 | 418 | 63 | 1 |
module Paths_Akkerman_func (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,1,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/anastasia/Haskell/Akkerman-func/.cabal-sandbox/bin"
libdir = "/home/anastasia/Haskell/Akkerman-func/.cabal-sandbox/lib/x86_64-linux-ghc-7.8.3/Akkerman-func-0.1.0.0"
datadir = "/home/anastasia/Haskell/Akkerman-func/.cabal-sandbox/share/x86_64-linux-ghc-7.8.3/Akkerman-func-0.1.0.0"
libexecdir = "/home/anastasia/Haskell/Akkerman-func/.cabal-sandbox/libexec"
sysconfdir = "/home/anastasia/Haskell/Akkerman-func/.cabal-sandbox/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "Akkerman_func_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "Akkerman_func_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "Akkerman_func_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "Akkerman_func_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "Akkerman_func_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| nastya13/Akkerman-func | dist/build/autogen/Paths_Akkerman_func.hs | gpl-3.0 | 1,557 | 0 | 10 | 182 | 371 | 213 | 158 | 28 | 1 |
module Slidecoding
(
-- Browser
browse
-- CabalHelper
, loadExposedModules
-- GHCI
, run
, ioStream
-- Indexer
, indexIO
-- Presentation
, load
-- ReplSession
, evalInSession
, startSession
, endSession
-- SlidesWriter
, processSlides
-- WebSockets
, start
) where
import Slidecoding.Browser
import Slidecoding.CabalHelper
import Slidecoding.GHCI
import Slidecoding.Indexer
import Slidecoding.Presentation
import Slidecoding.ReplSession
import Slidecoding.SlidesWriter
import Slidecoding.WebSockets
| ptitfred/slidecoding | src/Slidecoding.hs | gpl-3.0 | 593 | 0 | 4 | 152 | 87 | 58 | 29 | 21 | 0 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings, RecordWildCards #-}
module Lamdu.GUI.ParamEdit
( Info(..), make
, eventMapAddFirstParam
) where
import Control.Lens.Operators
import Control.Lens.Tuple
import qualified Data.Map as Map
import Data.Store.Transaction (Transaction)
import qualified Graphics.UI.Bottle.EventMap as E
import Graphics.UI.Bottle.ModKey (ModKey)
import qualified Graphics.UI.Bottle.Widget as Widget
import Lamdu.Config (Config)
import qualified Lamdu.Config as Config
import Lamdu.GUI.ExpressionGui (ExpressionGui)
import qualified Lamdu.GUI.ExpressionGui as ExpressionGui
import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM)
import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM
import qualified Lamdu.GUI.ExpressionGui.Types as ExprGuiT
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import qualified Lamdu.Sugar.Types as Sugar
import Prelude.Compat
type T = Transaction
singletonIdMap ::
Sugar.EntityId -> Sugar.EntityId ->
Map.Map Widget.Id Widget.Id
singletonIdMap key val =
Map.singleton (WidgetIds.fromEntityId key) (WidgetIds.fromEntityId val)
chooseAddResultEntityId :: Sugar.ParamAddResult -> Widget.EventResult
chooseAddResultEntityId (Sugar.ParamAddResultVarToTags Sugar.VarToTags {..}) =
eventResultFromEntityId (vttNewTag ^. Sugar.tagInstance)
& Widget.applyIdMapping widgetIdMap
where
widgetIdMap =
singletonIdMap vttReplacedVarEntityId
(vttReplacedByTag ^. Sugar.tagInstance)
chooseAddResultEntityId (Sugar.ParamAddResultNewVar entityId _) =
eventResultFromEntityId entityId
chooseAddResultEntityId (Sugar.ParamAddResultNewTag newParamTag) =
eventResultFromEntityId $ newParamTag ^. Sugar.tagInstance
eventResultFromEntityId :: Sugar.EntityId -> Widget.EventResult
eventResultFromEntityId = Widget.eventResultFromCursor . cursorFromEntityId
cursorFromEntityId :: Sugar.EntityId -> Widget.Id
cursorFromEntityId = WidgetIds.nameEditOf . WidgetIds.fromEntityId
eventMapAddFirstParam ::
Functor m => Config -> T m Sugar.ParamAddResult ->
Widget.EventMap (T m Widget.EventResult)
eventMapAddFirstParam config addFirstParam =
addFirstParam
<&> chooseAddResultEntityId
& E.keyPresses (Config.addNextParamKeys config)
(E.Doc ["Edit", "Add parameter"])
eventMapAddNextParam ::
Functor m =>
Config -> T m Sugar.ParamAddResult ->
Widget.EventMap (T m Widget.EventResult)
eventMapAddNextParam config fpAdd =
fpAdd
<&> chooseAddResultEntityId
& E.keyPresses (Config.addNextParamKeys config)
(E.Doc ["Edit", "Add next parameter"])
eventMapOrderParam ::
Monad m =>
[ModKey] -> String -> m () -> Widget.EventMap (m Widget.EventResult)
eventMapOrderParam keys docSuffix action =
Widget.keysEventMap keys (E.Doc ["Edit", "Parameter", "Move " ++ docSuffix])
action
eventParamDelEventMap ::
Monad m =>
m Sugar.ParamDelResult -> [ModKey] -> String -> Widget.Id ->
Widget.EventMap (m Widget.EventResult)
eventParamDelEventMap fpDel keys docSuffix dstPosId =
do
res <- fpDel
let widgetIdMap =
case res of
Sugar.ParamDelResultTagsToVar Sugar.TagsToVar {..} ->
singletonIdMap (ttvReplacedTag ^. Sugar.tagInstance)
ttvReplacedByVarEntityId
_ -> Map.empty
Widget.eventResultFromCursor dstPosId
& Widget.applyIdMapping widgetIdMap
& return
& E.keyPresses keys
(E.Doc ["Edit", "Delete parameter" ++ docSuffix])
data Info m = Info
{ iMakeNameEdit :: Widget.Id -> ExprGuiM m (ExpressionGui m)
, iDel :: T m Sugar.ParamDelResult
, iMAddNext :: Maybe (T m Sugar.ParamAddResult)
, iMOrderBefore :: Maybe (T m ())
, iMOrderAfter :: Maybe (T m ())
}
-- exported for use in definition sugaring.
make ::
Monad m =>
ExpressionGui.EvalAnnotationOptions ->
ExprGuiT.ShowAnnotation -> Widget.Id -> Widget.Id ->
Sugar.FuncParam (Info m) -> ExprGuiM m (ExpressionGui m)
make annotationOpts showAnnotation prevId nextId param =
assignCursor $
do
config <- ExprGuiM.readConfig
let paramEventMap = mconcat
[ eventParamDelEventMap (iDel info) (Config.delForwardKeys config) "" nextId
, eventParamDelEventMap (iDel info) (Config.delBackwardKeys config) " backwards" prevId
, maybe mempty (eventMapAddNextParam config) (iMAddNext info)
, maybe mempty (eventMapOrderParam (Config.paramOrderBeforeKeys config) "before") (iMOrderBefore info)
, maybe mempty (eventMapOrderParam (Config.paramOrderAfterKeys config) "after") (iMOrderAfter info)
]
ExpressionGui.maybeAddAnnotationWith annotationOpts
ExpressionGui.KeepWideAnnotation showAnnotation
(param ^. Sugar.fpAnnotation)
entityId
<*>
( iMakeNameEdit info myId
<&> ExpressionGui.egWidget %~ Widget.weakerEvents paramEventMap
<&> ExpressionGui.egAlignment . _1 .~ 0.5
)
where
entityId = param ^. Sugar.fpId
myId = WidgetIds.fromEntityId entityId
info = param ^. Sugar.fpInfo
hiddenIds = map WidgetIds.fromEntityId $ param ^. Sugar.fpHiddenIds
assignCursor x =
foldr (`ExprGuiM.assignCursorPrefix` const myId) x hiddenIds
| da-x/lamdu | Lamdu/GUI/ParamEdit.hs | gpl-3.0 | 5,497 | 0 | 18 | 1,236 | 1,356 | 718 | 638 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module WebParsing.ReqParser where
import qualified Text.Parsec as Parsec
import Text.Parsec.String (Parser)
import Text.Parsec ((<|>))
import Database.Requirement
-- define separators
fromSeparator :: Parser ()
fromSeparator = Parsec.spaces >> (Parsec.choice $ map (Parsec.try . Parsec.string) [
"full course or its equivalent",
"FCEs",
"FCE",
"FCEs:",
"FCE:"
]) >> (Parsec.choice $ map (Parsec.try . Parsec.string) [
" of any of the following:",
" from the following: ",
" from:",
" from",
" at"
]) >> Parsec.spaces
lParen :: Parser Char
lParen = Parsec.char '('
rParen :: Parser Char
rParen = Parsec.char ')'
orSeparator :: Parser String
orSeparator = Parsec.choice $ map Parsec.string [
"/",
"OR",
"Or",
"or"
]
andSeparator :: Parser String
andSeparator = Parsec.choice $ map Parsec.string [
",",
"AND",
"And",
"and",
";"
]
semicolon :: Parser Char
semicolon = Parsec.char ';'
creditsParser :: Parser String
creditsParser = do
Parsec.spaces
integral <- Parsec.many1 Parsec.digit
point <- Parsec.option "" $ Parsec.string "."
fractional <- if point == "" then return "" else Parsec.many1 Parsec.digit
return $ integral ++ point ++ fractional
-- | Helpers for parsing grades
percentParser :: Parser String
percentParser = do
fces <- Parsec.many1 Parsec.digit
Parsec.optional (Parsec.char '%')
return fces
letterParser :: Parser String
letterParser = do
letter <- Parsec.oneOf "ABCDEF"
plusminus <- Parsec.option "" $ Parsec.string "+" <|> Parsec.string "-"
return $ letter : plusminus
infoParser :: Parser String
infoParser= do
info <- Parsec.manyTill Parsec.anyChar (Parsec.try $ Parsec.lookAhead $ Parsec.string ")")
return $ info
-- | Parser for a grade, which can be in one of the following forms:
-- a number with or without a percent symbol, or a letter A-F followed by a +/-.
gradeParser :: Parser String
gradeParser = do
grade <- Parsec.try ((Parsec.between lParen rParen percentParser <|> letterParser) <|> (percentParser <|> letterParser))
_ <- Parsec.lookAhead $ Parsec.choice $ map Parsec.try [
andSeparator,
orSeparator,
Parsec.space >> return "",
Parsec.eof >> return "",
Parsec.oneOf "(),/;" >> return ""
]
return grade
-- parse for cutoff percentage before a course
coBefParser :: Parser Req
coBefParser = do
_ <- Parsec.choice $ map (Parsec.try . (>> Parsec.space) . Parsec.string) ["minimum grade of", "minimum mark of", "minimum of", "minimum"]
Parsec.spaces
grade <- gradeParser
Parsec.spaces
_ <- Parsec.manyTill Parsec.anyChar (Parsec.try $ Parsec.lookAhead singleParser)
req <- singleParser
return $ GRADE grade req
-- parse for cutoff percentage after a course
coAftParser :: Parser Req
coAftParser = do
req <- singleParser
Parsec.spaces
grade <- Parsec.between lParen rParen cutoffHelper <|> cutoffHelper
return $ GRADE grade req
where
cutoffHelper = Parsec.between Parsec.spaces Parsec.spaces $ do
_ <- Parsec.manyTill (Parsec.noneOf "()")
(Parsec.try $ Parsec.lookAhead (orSeparator <|> andSeparator <|> (do
_ <- gradeParser
Parsec.spaces
Parsec.notFollowedBy $ Parsec.alphaNum
return "")))
gradeParser
-- | Parser for a grade cutoff on a course.
-- This is tricky because the cutoff can come before or after the course code.
cutoffParser :: Parser Req
cutoffParser = Parsec.try coAftParser <|> coBefParser
-- | Parser for requirements written within parentheses
parParser :: Parser Req
parParser = Parsec.between lParen rParen andParser
-- | Parser for raw text in a prerequisite, e.g., "proficiency in C/C++".
-- Note that even if a course code appears in the middle of such text,
-- this code is treated as plain text.
rawTextParser :: Parser Req
rawTextParser = do
text <- Parsec.many $ Parsec.noneOf ";\r\n"
return $ RAW text
-- | Parser for a single course.
-- We expect 3 letters, 3 digits, and a letter and a number.
courseIDParser :: Parser String
courseIDParser = do
code <- Parsec.count 3 Parsec.letter
num <- Parsec.count 3 Parsec.digit
-- TODO: Make the last two letters more restricted.
sess <- Parsec.count 2 Parsec.alphaNum
return (code ++ num ++ sess)
singleParser :: Parser Req
singleParser = do
courseID <- courseIDParser
return $ J courseID ""
-- | Parser for single courses or "atomic" Reqs represented by a J.
justParser :: Parser Req
justParser = do
Parsec.spaces
courseID <- courseIDParser
Parsec.spaces
meta <- Parsec.option (Right "") $ Parsec.between lParen rParen markInfoParser
return $ case meta of
Left mark -> GRADE mark $ J courseID ""
Right info -> J courseID info
where
markInfoParser :: Parser (Either String String)
markInfoParser = do
grade <- Parsec.try (fmap Left percentParser <|> fmap Left letterParser <|> fmap Right infoParser)
return grade
-- parse for single course with our without cutoff OR a req within parantheses
courseParser :: Parser Req
courseParser = Parsec.between Parsec.spaces Parsec.spaces $ Parsec.choice $ map Parsec.try [
parParser,
cutoffParser,
justParser,
rawTextParser
]
-- | Parser for reqs related through an OR.
orParser :: Parser Req
orParser = do
reqs <- Parsec.sepBy courseParser orSeparator
case reqs of
[] -> fail "Empty Req."
[x] -> return x
(x:xs) -> return $ OR (x:xs)
-- | Parser for for reqs related through an AND.
andParser :: Parser Req
andParser = do
reqs <- Parsec.sepBy orParser andSeparator
case reqs of
[] -> fail "Empty Req."
[x] -> return x
(x:xs) -> return $ AND (x:xs)
-- | Parser for reqs in "from" format:
-- 4.0 FCEs from CSC108H1, CSC148H1, ...
fcesParser :: Parser Req
fcesParser = do
fces <- creditsParser
_ <- fromSeparator
Parsec.spaces
req <- andParser
return $ FCES fces req
-- | Parser for requirements separated by a semicolon.
categoryParser :: Parser Req
categoryParser = Parsec.try fcesParser <|> Parsec.try andParser
parseReqs :: String -> Req
parseReqs reqString =
let req = Parsec.parse categoryParser "" reqString
in case req of
Right x -> x
Left e -> J (show e) ""
| christinem/courseography | app/WebParsing/ReqParser.hs | gpl-3.0 | 6,504 | 0 | 21 | 1,561 | 1,738 | 862 | 876 | 159 | 3 |
{-# OPTIONS -O2 -Wall -Werror -Wwarn -XRankNTypes #-}
{- |
Module : Network.GoTextProtocol2.Types
Copyright : Copyright (C) 2010 Fabian Linzberger
License : GNU GPL, version 3 or above
Maintainer : Fabian Linzberger <e@lefant.net>
Stability : experimental
Portability: probably
Shared types for Go Text Protocol implementations.
-}
module Network.GoTextProtocol2.Types ( Id
, Command(..)
, Argument(..)
) where
import Data.Goban.Types (Move, Color)
type Id = Int
data Command = Command String [Argument]
deriving (Show, Eq)
data Argument = IntArgument Int
| StringArgument String
| MoveArgument Move
| ColorArgument Color
| FloatArgument Float
| TimeLeftArgument Int Int
| TimeSettingsArgument Int Int Int
| MaybeKeyValueArgument (Maybe (String, Int))
deriving (Show, Eq)
| lefant/kurt | src/Network/GoTextProtocol2/Types.hs | gpl-3.0 | 1,048 | 0 | 9 | 383 | 146 | 89 | 57 | 17 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ToolResults.Projects.Histories.Executions.Clusters.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists Screenshot Clusters Returns the list of screenshot clusters
-- corresponding to an execution. Screenshot clusters are created after the
-- execution is finished. Clusters are created from a set of screenshots.
-- Between any two screenshots, a matching score is calculated based off
-- their metadata that determines how similar they are. Screenshots are
-- placed in the cluster that has screens which have the highest matching
-- scores.
--
-- /See:/ <https://firebase.google.com/docs/test-lab/ Cloud Tool Results API Reference> for @toolresults.projects.histories.executions.clusters.list@.
module Network.Google.Resource.ToolResults.Projects.Histories.Executions.Clusters.List
(
-- * REST Resource
ProjectsHistoriesExecutionsClustersListResource
-- * Creating a Request
, projectsHistoriesExecutionsClustersList
, ProjectsHistoriesExecutionsClustersList
-- * Request Lenses
, pheclExecutionId
, pheclHistoryId
, pheclProjectId
) where
import Network.Google.Prelude
import Network.Google.ToolResults.Types
-- | A resource alias for @toolresults.projects.histories.executions.clusters.list@ method which the
-- 'ProjectsHistoriesExecutionsClustersList' request conforms to.
type ProjectsHistoriesExecutionsClustersListResource
=
"toolresults" :>
"v1beta3" :>
"projects" :>
Capture "projectId" Text :>
"histories" :>
Capture "historyId" Text :>
"executions" :>
Capture "executionId" Text :>
"clusters" :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListScreenshotClustersResponse
-- | Lists Screenshot Clusters Returns the list of screenshot clusters
-- corresponding to an execution. Screenshot clusters are created after the
-- execution is finished. Clusters are created from a set of screenshots.
-- Between any two screenshots, a matching score is calculated based off
-- their metadata that determines how similar they are. Screenshots are
-- placed in the cluster that has screens which have the highest matching
-- scores.
--
-- /See:/ 'projectsHistoriesExecutionsClustersList' smart constructor.
data ProjectsHistoriesExecutionsClustersList =
ProjectsHistoriesExecutionsClustersList'
{ _pheclExecutionId :: !Text
, _pheclHistoryId :: !Text
, _pheclProjectId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsHistoriesExecutionsClustersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pheclExecutionId'
--
-- * 'pheclHistoryId'
--
-- * 'pheclProjectId'
projectsHistoriesExecutionsClustersList
:: Text -- ^ 'pheclExecutionId'
-> Text -- ^ 'pheclHistoryId'
-> Text -- ^ 'pheclProjectId'
-> ProjectsHistoriesExecutionsClustersList
projectsHistoriesExecutionsClustersList pPheclExecutionId_ pPheclHistoryId_ pPheclProjectId_ =
ProjectsHistoriesExecutionsClustersList'
{ _pheclExecutionId = pPheclExecutionId_
, _pheclHistoryId = pPheclHistoryId_
, _pheclProjectId = pPheclProjectId_
}
-- | An Execution id. Required.
pheclExecutionId :: Lens' ProjectsHistoriesExecutionsClustersList Text
pheclExecutionId
= lens _pheclExecutionId
(\ s a -> s{_pheclExecutionId = a})
-- | A History id. Required.
pheclHistoryId :: Lens' ProjectsHistoriesExecutionsClustersList Text
pheclHistoryId
= lens _pheclHistoryId
(\ s a -> s{_pheclHistoryId = a})
-- | A Project id. Required.
pheclProjectId :: Lens' ProjectsHistoriesExecutionsClustersList Text
pheclProjectId
= lens _pheclProjectId
(\ s a -> s{_pheclProjectId = a})
instance GoogleRequest
ProjectsHistoriesExecutionsClustersList
where
type Rs ProjectsHistoriesExecutionsClustersList =
ListScreenshotClustersResponse
type Scopes ProjectsHistoriesExecutionsClustersList =
'[]
requestClient
ProjectsHistoriesExecutionsClustersList'{..}
= go _pheclProjectId _pheclHistoryId
_pheclExecutionId
(Just AltJSON)
toolResultsService
where go
= buildClient
(Proxy ::
Proxy
ProjectsHistoriesExecutionsClustersListResource)
mempty
| brendanhay/gogol | gogol-toolresults/gen/Network/Google/Resource/ToolResults/Projects/Histories/Executions/Clusters/List.hs | mpl-2.0 | 5,284 | 0 | 17 | 1,170 | 478 | 290 | 188 | 86 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.CloudSearch.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.CloudSearch.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | The reason for interpretation of the query. This field will not be
-- UNSPECIFIED if the interpretation type is not NONE.
data QueryInterpretationReason
= Unspecified
-- ^ @UNSPECIFIED@
| QueryHasNATuralLanguageIntent
-- ^ @QUERY_HAS_NATURAL_LANGUAGE_INTENT@
-- Natural language interpretation of the query is used to fetch the search
-- results.
| NotEnoughResultsFoundForUserQuery
-- ^ @NOT_ENOUGH_RESULTS_FOUND_FOR_USER_QUERY@
-- Query and document terms similarity is used to selectively broaden the
-- query to retrieve additional search results since enough results were
-- not found for the user query. Interpreted query will be empty for this
-- case.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable QueryInterpretationReason
instance FromHttpApiData QueryInterpretationReason where
parseQueryParam = \case
"UNSPECIFIED" -> Right Unspecified
"QUERY_HAS_NATURAL_LANGUAGE_INTENT" -> Right QueryHasNATuralLanguageIntent
"NOT_ENOUGH_RESULTS_FOUND_FOR_USER_QUERY" -> Right NotEnoughResultsFoundForUserQuery
x -> Left ("Unable to parse QueryInterpretationReason from: " <> x)
instance ToHttpApiData QueryInterpretationReason where
toQueryParam = \case
Unspecified -> "UNSPECIFIED"
QueryHasNATuralLanguageIntent -> "QUERY_HAS_NATURAL_LANGUAGE_INTENT"
NotEnoughResultsFoundForUserQuery -> "NOT_ENOUGH_RESULTS_FOUND_FOR_USER_QUERY"
instance FromJSON QueryInterpretationReason where
parseJSON = parseJSONText "QueryInterpretationReason"
instance ToJSON QueryInterpretationReason where
toJSON = toJSONText
data DriveMimeTypeRestrictType
= DMTRTUnspecified
-- ^ @UNSPECIFIED@
| DMTRTPdf
-- ^ @PDF@
| DMTRTDocument
-- ^ @DOCUMENT@
| DMTRTPresentation
-- ^ @PRESENTATION@
| DMTRTSpreadsheet
-- ^ @SPREADSHEET@
| DMTRTForm
-- ^ @FORM@
| DMTRTDrawing
-- ^ @DRAWING@
| DMTRTScript
-- ^ @SCRIPT@
| DMTRTMap
-- ^ @MAP@
| DMTRTImage
-- ^ @IMAGE@
| DMTRTAudio
-- ^ @AUDIO@
| DMTRTVideo
-- ^ @VIDEO@
| DMTRTFolder
-- ^ @FOLDER@
| DMTRTArchive
-- ^ @ARCHIVE@
| DMTRTSite
-- ^ @SITE@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DriveMimeTypeRestrictType
instance FromHttpApiData DriveMimeTypeRestrictType where
parseQueryParam = \case
"UNSPECIFIED" -> Right DMTRTUnspecified
"PDF" -> Right DMTRTPdf
"DOCUMENT" -> Right DMTRTDocument
"PRESENTATION" -> Right DMTRTPresentation
"SPREADSHEET" -> Right DMTRTSpreadsheet
"FORM" -> Right DMTRTForm
"DRAWING" -> Right DMTRTDrawing
"SCRIPT" -> Right DMTRTScript
"MAP" -> Right DMTRTMap
"IMAGE" -> Right DMTRTImage
"AUDIO" -> Right DMTRTAudio
"VIDEO" -> Right DMTRTVideo
"FOLDER" -> Right DMTRTFolder
"ARCHIVE" -> Right DMTRTArchive
"SITE" -> Right DMTRTSite
x -> Left ("Unable to parse DriveMimeTypeRestrictType from: " <> x)
instance ToHttpApiData DriveMimeTypeRestrictType where
toQueryParam = \case
DMTRTUnspecified -> "UNSPECIFIED"
DMTRTPdf -> "PDF"
DMTRTDocument -> "DOCUMENT"
DMTRTPresentation -> "PRESENTATION"
DMTRTSpreadsheet -> "SPREADSHEET"
DMTRTForm -> "FORM"
DMTRTDrawing -> "DRAWING"
DMTRTScript -> "SCRIPT"
DMTRTMap -> "MAP"
DMTRTImage -> "IMAGE"
DMTRTAudio -> "AUDIO"
DMTRTVideo -> "VIDEO"
DMTRTFolder -> "FOLDER"
DMTRTArchive -> "ARCHIVE"
DMTRTSite -> "SITE"
instance FromJSON DriveMimeTypeRestrictType where
parseJSON = parseJSONText "DriveMimeTypeRestrictType"
instance ToJSON DriveMimeTypeRestrictType where
toJSON = toJSONText
data DriveFollowUpRestrictType
= DFURTUnspecified
-- ^ @UNSPECIFIED@
| DFURTFollowupSuggestions
-- ^ @FOLLOWUP_SUGGESTIONS@
| DFURTFollowupActionItems
-- ^ @FOLLOWUP_ACTION_ITEMS@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DriveFollowUpRestrictType
instance FromHttpApiData DriveFollowUpRestrictType where
parseQueryParam = \case
"UNSPECIFIED" -> Right DFURTUnspecified
"FOLLOWUP_SUGGESTIONS" -> Right DFURTFollowupSuggestions
"FOLLOWUP_ACTION_ITEMS" -> Right DFURTFollowupActionItems
x -> Left ("Unable to parse DriveFollowUpRestrictType from: " <> x)
instance ToHttpApiData DriveFollowUpRestrictType where
toQueryParam = \case
DFURTUnspecified -> "UNSPECIFIED"
DFURTFollowupSuggestions -> "FOLLOWUP_SUGGESTIONS"
DFURTFollowupActionItems -> "FOLLOWUP_ACTION_ITEMS"
instance FromJSON DriveFollowUpRestrictType where
parseJSON = parseJSONText "DriveFollowUpRestrictType"
instance ToJSON DriveFollowUpRestrictType where
toJSON = toJSONText
-- | Status code.
data ItemStatusCode
= CodeUnspecified
-- ^ @CODE_UNSPECIFIED@
-- Input-only value. Used with Items.list to list all items in the queue,
-- regardless of status.
| Error'
-- ^ @ERROR@
-- Error encountered by Cloud Search while processing this item. Details of
-- the error are in repositoryError.
| Modified
-- ^ @MODIFIED@
-- Item has been modified in the repository, and is out of date with the
-- version previously accepted into Cloud Search.
| NewItem
-- ^ @NEW_ITEM@
-- Item is known to exist in the repository, but is not yet accepted by
-- Cloud Search. An item can be in this state when Items.push has been
-- called for an item of this name that did not exist previously.
| Accepted
-- ^ @ACCEPTED@
-- API has accepted the up-to-date data of this item.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ItemStatusCode
instance FromHttpApiData ItemStatusCode where
parseQueryParam = \case
"CODE_UNSPECIFIED" -> Right CodeUnspecified
"ERROR" -> Right Error'
"MODIFIED" -> Right Modified
"NEW_ITEM" -> Right NewItem
"ACCEPTED" -> Right Accepted
x -> Left ("Unable to parse ItemStatusCode from: " <> x)
instance ToHttpApiData ItemStatusCode where
toQueryParam = \case
CodeUnspecified -> "CODE_UNSPECIFIED"
Error' -> "ERROR"
Modified -> "MODIFIED"
NewItem -> "NEW_ITEM"
Accepted -> "ACCEPTED"
instance FromJSON ItemStatusCode where
parseJSON = parseJSONText "ItemStatusCode"
instance ToJSON ItemStatusCode where
toJSON = toJSONText
data QueryInterpretationInterpretationType
= None
-- ^ @NONE@
-- Neither the natural language interpretation, nor a broader version of
-- the query is used to fetch the search results.
| Blend
-- ^ @BLEND@
-- The results from original query are blended with other results. The
-- reason for blending these other results with the results from original
-- query is populated in the \'Reason\' field below.
| Replace
-- ^ @REPLACE@
-- The results from original query are replaced. The reason for replacing
-- the results from original query is populated in the \'Reason\' field
-- below.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable QueryInterpretationInterpretationType
instance FromHttpApiData QueryInterpretationInterpretationType where
parseQueryParam = \case
"NONE" -> Right None
"BLEND" -> Right Blend
"REPLACE" -> Right Replace
x -> Left ("Unable to parse QueryInterpretationInterpretationType from: " <> x)
instance ToHttpApiData QueryInterpretationInterpretationType where
toQueryParam = \case
None -> "NONE"
Blend -> "BLEND"
Replace -> "REPLACE"
instance FromJSON QueryInterpretationInterpretationType where
parseJSON = parseJSONText "QueryInterpretationInterpretationType"
instance ToJSON QueryInterpretationInterpretationType where
toJSON = toJSONText
data InteractionType
= ITUnspecified
-- ^ @UNSPECIFIED@
-- Invalid value.
| ITView
-- ^ @VIEW@
-- This interaction indicates the user viewed the item.
| ITEdit
-- ^ @EDIT@
-- This interaction indicates the user edited the item.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable InteractionType
instance FromHttpApiData InteractionType where
parseQueryParam = \case
"UNSPECIFIED" -> Right ITUnspecified
"VIEW" -> Right ITView
"EDIT" -> Right ITEdit
x -> Left ("Unable to parse InteractionType from: " <> x)
instance ToHttpApiData InteractionType where
toQueryParam = \case
ITUnspecified -> "UNSPECIFIED"
ITView -> "VIEW"
ITEdit -> "EDIT"
instance FromJSON InteractionType where
parseJSON = parseJSONText "InteractionType"
instance ToJSON InteractionType where
toJSON = toJSONText
-- | Type of the operator.
data QueryOperatorType
= QOTUnknown
-- ^ @UNKNOWN@
-- Invalid value.
| QOTInteger
-- ^ @INTEGER@
| QOTDouble
-- ^ @DOUBLE@
| QOTTimestamp
-- ^ @TIMESTAMP@
| QOTBoolean
-- ^ @BOOLEAN@
| QOTEnum'
-- ^ @ENUM@
| QOTDate
-- ^ @DATE@
| QOTText
-- ^ @TEXT@
| QOTHTML
-- ^ @HTML@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable QueryOperatorType
instance FromHttpApiData QueryOperatorType where
parseQueryParam = \case
"UNKNOWN" -> Right QOTUnknown
"INTEGER" -> Right QOTInteger
"DOUBLE" -> Right QOTDouble
"TIMESTAMP" -> Right QOTTimestamp
"BOOLEAN" -> Right QOTBoolean
"ENUM" -> Right QOTEnum'
"DATE" -> Right QOTDate
"TEXT" -> Right QOTText
"HTML" -> Right QOTHTML
x -> Left ("Unable to parse QueryOperatorType from: " <> x)
instance ToHttpApiData QueryOperatorType where
toQueryParam = \case
QOTUnknown -> "UNKNOWN"
QOTInteger -> "INTEGER"
QOTDouble -> "DOUBLE"
QOTTimestamp -> "TIMESTAMP"
QOTBoolean -> "BOOLEAN"
QOTEnum' -> "ENUM"
QOTDate -> "DATE"
QOTText -> "TEXT"
QOTHTML -> "HTML"
instance FromJSON QueryOperatorType where
parseJSON = parseJSONText "QueryOperatorType"
instance ToJSON QueryOperatorType where
toJSON = toJSONText
-- | Used to specify the ordered ranking for the enumeration that determines
-- how the integer values provided in the possible EnumValuePairs are used
-- to rank results. If specified, integer values must be provided for all
-- possible EnumValuePair values given for this property. Can only be used
-- if isRepeatable is false.
data EnumPropertyOptionsOrderedRanking
= NoOrder
-- ^ @NO_ORDER@
-- There is no ranking order for the property. Results aren\'t adjusted by
-- this property\'s value.
| Ascending
-- ^ @ASCENDING@
-- This property is ranked in ascending order. Lower values indicate lower
-- ranking.
| Descending
-- ^ @DESCENDING@
-- This property is ranked in descending order. Lower values indicate
-- higher ranking.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable EnumPropertyOptionsOrderedRanking
instance FromHttpApiData EnumPropertyOptionsOrderedRanking where
parseQueryParam = \case
"NO_ORDER" -> Right NoOrder
"ASCENDING" -> Right Ascending
"DESCENDING" -> Right Descending
x -> Left ("Unable to parse EnumPropertyOptionsOrderedRanking from: " <> x)
instance ToHttpApiData EnumPropertyOptionsOrderedRanking where
toQueryParam = \case
NoOrder -> "NO_ORDER"
Ascending -> "ASCENDING"
Descending -> "DESCENDING"
instance FromJSON EnumPropertyOptionsOrderedRanking where
parseJSON = parseJSONText "EnumPropertyOptionsOrderedRanking"
instance ToJSON EnumPropertyOptionsOrderedRanking where
toJSON = toJSONText
-- | Used to specify the ordered ranking for the integer. Can only be used if
-- isRepeatable is false.
data IntegerPropertyOptionsOrderedRanking
= IPOORNoOrder
-- ^ @NO_ORDER@
-- There is no ranking order for the property. Results are not adjusted by
-- this property\'s value.
| IPOORAscending
-- ^ @ASCENDING@
-- This property is ranked in ascending order. Lower values indicate lower
-- ranking.
| IPOORDescending
-- ^ @DESCENDING@
-- This property is ranked in descending order. Lower values indicate
-- higher ranking.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable IntegerPropertyOptionsOrderedRanking
instance FromHttpApiData IntegerPropertyOptionsOrderedRanking where
parseQueryParam = \case
"NO_ORDER" -> Right IPOORNoOrder
"ASCENDING" -> Right IPOORAscending
"DESCENDING" -> Right IPOORDescending
x -> Left ("Unable to parse IntegerPropertyOptionsOrderedRanking from: " <> x)
instance ToHttpApiData IntegerPropertyOptionsOrderedRanking where
toQueryParam = \case
IPOORNoOrder -> "NO_ORDER"
IPOORAscending -> "ASCENDING"
IPOORDescending -> "DESCENDING"
instance FromJSON IntegerPropertyOptionsOrderedRanking where
parseJSON = parseJSONText "IntegerPropertyOptionsOrderedRanking"
instance ToJSON IntegerPropertyOptionsOrderedRanking where
toJSON = toJSONText
-- | The type of the push operation that defines the push behavior.
data PushItemType
= PITUnspecified
-- ^ @UNSPECIFIED@
-- Default UNSPECIFIED. Specifies that the push operation should not modify
-- ItemStatus
| PITModified
-- ^ @MODIFIED@
-- Indicates that the repository document has been modified or updated
-- since the previous update call. This changes status to MODIFIED state
-- for an existing item. If this is called on a non existing item, the
-- status is changed to NEW_ITEM.
| PITNotModified
-- ^ @NOT_MODIFIED@
-- Item in the repository has not been modified since the last update call.
-- This push operation will set status to ACCEPTED state.
| PITRepositoryError
-- ^ @REPOSITORY_ERROR@
-- Connector is facing a repository error regarding this item. Change
-- status to REPOSITORY_ERROR state. Item is unreserved and rescheduled at
-- a future time determined by exponential backoff.
| PITReQueue
-- ^ @REQUEUE@
-- Call push with REQUEUE only for items that have been reserved. This
-- action unreserves the item and resets its available time to the wall
-- clock time.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PushItemType
instance FromHttpApiData PushItemType where
parseQueryParam = \case
"UNSPECIFIED" -> Right PITUnspecified
"MODIFIED" -> Right PITModified
"NOT_MODIFIED" -> Right PITNotModified
"REPOSITORY_ERROR" -> Right PITRepositoryError
"REQUEUE" -> Right PITReQueue
x -> Left ("Unable to parse PushItemType from: " <> x)
instance ToHttpApiData PushItemType where
toQueryParam = \case
PITUnspecified -> "UNSPECIFIED"
PITModified -> "MODIFIED"
PITNotModified -> "NOT_MODIFIED"
PITRepositoryError -> "REPOSITORY_ERROR"
PITReQueue -> "REQUEUE"
instance FromJSON PushItemType where
parseJSON = parseJSONText "PushItemType"
instance ToJSON PushItemType where
toJSON = toJSONText
-- | The logic operator of the sub filter.
data CompositeFilterLogicOperator
= And
-- ^ @AND@
-- Logical operators, which can only be applied to sub filters.
| OR
-- ^ @OR@
| Not
-- ^ @NOT@
-- NOT can only be applied on a single sub filter.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable CompositeFilterLogicOperator
instance FromHttpApiData CompositeFilterLogicOperator where
parseQueryParam = \case
"AND" -> Right And
"OR" -> Right OR
"NOT" -> Right Not
x -> Left ("Unable to parse CompositeFilterLogicOperator from: " <> x)
instance ToHttpApiData CompositeFilterLogicOperator where
toQueryParam = \case
And -> "AND"
OR -> "OR"
Not -> "NOT"
instance FromJSON CompositeFilterLogicOperator where
parseJSON = parseJSONText "CompositeFilterLogicOperator"
instance ToJSON CompositeFilterLogicOperator where
toJSON = toJSONText
-- | Importance of the source.
data SourceScoringConfigSourceImportance
= Default
-- ^ @DEFAULT@
| Low
-- ^ @LOW@
| High
-- ^ @HIGH@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SourceScoringConfigSourceImportance
instance FromHttpApiData SourceScoringConfigSourceImportance where
parseQueryParam = \case
"DEFAULT" -> Right Default
"LOW" -> Right Low
"HIGH" -> Right High
x -> Left ("Unable to parse SourceScoringConfigSourceImportance from: " <> x)
instance ToHttpApiData SourceScoringConfigSourceImportance where
toQueryParam = \case
Default -> "DEFAULT"
Low -> "LOW"
High -> "HIGH"
instance FromJSON SourceScoringConfigSourceImportance where
parseJSON = parseJSONText "SourceScoringConfigSourceImportance"
instance ToJSON SourceScoringConfigSourceImportance where
toJSON = toJSONText
-- | Indicates the ranking importance given to property when it is matched
-- during retrieval. Once set, the token importance of a property cannot be
-- changed.
data RetrievalImportanceImportance
= RIIDefault
-- ^ @DEFAULT@
-- Treat the match like a body text match.
| RIIHighest
-- ^ @HIGHEST@
-- Treat the match like a match against title of the item.
| RIIHigh
-- ^ @HIGH@
-- Treat the match with higher importance than body text.
| RIILow
-- ^ @LOW@
-- Treat the match with lower importance than body text.
| RIINone
-- ^ @NONE@
-- Do not match against this field during retrieval. The property can still
-- be used for operator matching, faceting, and suggest if desired.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable RetrievalImportanceImportance
instance FromHttpApiData RetrievalImportanceImportance where
parseQueryParam = \case
"DEFAULT" -> Right RIIDefault
"HIGHEST" -> Right RIIHighest
"HIGH" -> Right RIIHigh
"LOW" -> Right RIILow
"NONE" -> Right RIINone
x -> Left ("Unable to parse RetrievalImportanceImportance from: " <> x)
instance ToHttpApiData RetrievalImportanceImportance where
toQueryParam = \case
RIIDefault -> "DEFAULT"
RIIHighest -> "HIGHEST"
RIIHigh -> "HIGH"
RIILow -> "LOW"
RIINone -> "NONE"
instance FromJSON RetrievalImportanceImportance where
parseJSON = parseJSONText "RetrievalImportanceImportance"
instance ToJSON RetrievalImportanceImportance where
toJSON = toJSONText
-- | Error code indicating the nature of the error.
data ProcessingErrorCode
= ProcessingErrorCodeUnspecified
-- ^ @PROCESSING_ERROR_CODE_UNSPECIFIED@
-- Input only value. Use this value in Items.
| MalformedRequest
-- ^ @MALFORMED_REQUEST@
-- Item\'s ACL, metadata, or content is malformed or in invalid state.
-- FieldViolations contains more details on where the problem is.
| UnsupportedContentFormat
-- ^ @UNSUPPORTED_CONTENT_FORMAT@
-- Countent format is unsupported.
| IndirectBrokenACL
-- ^ @INDIRECT_BROKEN_ACL@
-- Items with incomplete ACL information due to inheriting other items with
-- broken ACL or having groups with unmapped descendants.
| ACLCycle
-- ^ @ACL_CYCLE@
-- ACL inheritance graph formed a cycle.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ProcessingErrorCode
instance FromHttpApiData ProcessingErrorCode where
parseQueryParam = \case
"PROCESSING_ERROR_CODE_UNSPECIFIED" -> Right ProcessingErrorCodeUnspecified
"MALFORMED_REQUEST" -> Right MalformedRequest
"UNSUPPORTED_CONTENT_FORMAT" -> Right UnsupportedContentFormat
"INDIRECT_BROKEN_ACL" -> Right IndirectBrokenACL
"ACL_CYCLE" -> Right ACLCycle
x -> Left ("Unable to parse ProcessingErrorCode from: " <> x)
instance ToHttpApiData ProcessingErrorCode where
toQueryParam = \case
ProcessingErrorCodeUnspecified -> "PROCESSING_ERROR_CODE_UNSPECIFIED"
MalformedRequest -> "MALFORMED_REQUEST"
UnsupportedContentFormat -> "UNSUPPORTED_CONTENT_FORMAT"
IndirectBrokenACL -> "INDIRECT_BROKEN_ACL"
ACLCycle -> "ACL_CYCLE"
instance FromJSON ProcessingErrorCode where
parseJSON = parseJSONText "ProcessingErrorCode"
instance ToJSON ProcessingErrorCode where
toJSON = toJSONText
-- | Ascending is the default sort order
data SortOptionsSortOrder
= SOSOAscending
-- ^ @ASCENDING@
| SOSODescending
-- ^ @DESCENDING@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SortOptionsSortOrder
instance FromHttpApiData SortOptionsSortOrder where
parseQueryParam = \case
"ASCENDING" -> Right SOSOAscending
"DESCENDING" -> Right SOSODescending
x -> Left ("Unable to parse SortOptionsSortOrder from: " <> x)
instance ToHttpApiData SortOptionsSortOrder where
toQueryParam = \case
SOSOAscending -> "ASCENDING"
SOSODescending -> "DESCENDING"
instance FromJSON SortOptionsSortOrder where
parseJSON = parseJSONText "SortOptionsSortOrder"
instance ToJSON SortOptionsSortOrder where
toJSON = toJSONText
-- | Required. The RequestMode for this request.
data IndexingDatasourcesItemsDeleteMode
= IDIDMUnspecified
-- ^ @UNSPECIFIED@
-- Priority is not specified in the update request. Leaving priority
-- unspecified results in an update failure.
| IDIDMSynchronous
-- ^ @SYNCHRONOUS@
-- For real-time updates.
| IDIDMAsynchronous
-- ^ @ASYNCHRONOUS@
-- For changes that are executed after the response is sent back to the
-- caller.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable IndexingDatasourcesItemsDeleteMode
instance FromHttpApiData IndexingDatasourcesItemsDeleteMode where
parseQueryParam = \case
"UNSPECIFIED" -> Right IDIDMUnspecified
"SYNCHRONOUS" -> Right IDIDMSynchronous
"ASYNCHRONOUS" -> Right IDIDMAsynchronous
x -> Left ("Unable to parse IndexingDatasourcesItemsDeleteMode from: " <> x)
instance ToHttpApiData IndexingDatasourcesItemsDeleteMode where
toQueryParam = \case
IDIDMUnspecified -> "UNSPECIFIED"
IDIDMSynchronous -> "SYNCHRONOUS"
IDIDMAsynchronous -> "ASYNCHRONOUS"
instance FromJSON IndexingDatasourcesItemsDeleteMode where
parseJSON = parseJSONText "IndexingDatasourcesItemsDeleteMode"
instance ToJSON IndexingDatasourcesItemsDeleteMode where
toJSON = toJSONText
-- | Limit users selection to this status.
data DebugIdentitysourcesUnmAppedidsListResolutionStatusCode
= DIUALRSCCodeUnspecified
-- ^ @CODE_UNSPECIFIED@
-- Input-only value. Used to list all unmapped identities regardless of
-- status.
| DIUALRSCNotFound
-- ^ @NOT_FOUND@
-- The unmapped identity was not found in IDaaS, and needs to be provided
-- by the user.
| DIUALRSCIdentitySourceNotFound
-- ^ @IDENTITY_SOURCE_NOT_FOUND@
-- The identity source associated with the identity was either not found or
-- deleted.
| DIUALRSCIdentitySourceMisConfigured
-- ^ @IDENTITY_SOURCE_MISCONFIGURED@
-- IDaaS does not understand the identity source, probably because the
-- schema was modified in a non compatible way.
| DIUALRSCTooManyMAppingsFound
-- ^ @TOO_MANY_MAPPINGS_FOUND@
-- The number of users associated with the external identity is too large.
| DIUALRSCInternalError
-- ^ @INTERNAL_ERROR@
-- Internal error.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DebugIdentitysourcesUnmAppedidsListResolutionStatusCode
instance FromHttpApiData DebugIdentitysourcesUnmAppedidsListResolutionStatusCode where
parseQueryParam = \case
"CODE_UNSPECIFIED" -> Right DIUALRSCCodeUnspecified
"NOT_FOUND" -> Right DIUALRSCNotFound
"IDENTITY_SOURCE_NOT_FOUND" -> Right DIUALRSCIdentitySourceNotFound
"IDENTITY_SOURCE_MISCONFIGURED" -> Right DIUALRSCIdentitySourceMisConfigured
"TOO_MANY_MAPPINGS_FOUND" -> Right DIUALRSCTooManyMAppingsFound
"INTERNAL_ERROR" -> Right DIUALRSCInternalError
x -> Left ("Unable to parse DebugIdentitysourcesUnmAppedidsListResolutionStatusCode from: " <> x)
instance ToHttpApiData DebugIdentitysourcesUnmAppedidsListResolutionStatusCode where
toQueryParam = \case
DIUALRSCCodeUnspecified -> "CODE_UNSPECIFIED"
DIUALRSCNotFound -> "NOT_FOUND"
DIUALRSCIdentitySourceNotFound -> "IDENTITY_SOURCE_NOT_FOUND"
DIUALRSCIdentitySourceMisConfigured -> "IDENTITY_SOURCE_MISCONFIGURED"
DIUALRSCTooManyMAppingsFound -> "TOO_MANY_MAPPINGS_FOUND"
DIUALRSCInternalError -> "INTERNAL_ERROR"
instance FromJSON DebugIdentitysourcesUnmAppedidsListResolutionStatusCode where
parseJSON = parseJSONText "DebugIdentitysourcesUnmAppedidsListResolutionStatusCode"
instance ToJSON DebugIdentitysourcesUnmAppedidsListResolutionStatusCode where
toJSON = toJSONText
data PollItemsRequestStatusCodesItem
= PIRSCICodeUnspecified
-- ^ @CODE_UNSPECIFIED@
-- Input-only value. Used with Items.list to list all items in the queue,
-- regardless of status.
| PIRSCIError'
-- ^ @ERROR@
-- Error encountered by Cloud Search while processing this item. Details of
-- the error are in repositoryError.
| PIRSCIModified
-- ^ @MODIFIED@
-- Item has been modified in the repository, and is out of date with the
-- version previously accepted into Cloud Search.
| PIRSCINewItem
-- ^ @NEW_ITEM@
-- Item is known to exist in the repository, but is not yet accepted by
-- Cloud Search. An item can be in this state when Items.push has been
-- called for an item of this name that did not exist previously.
| PIRSCIAccepted
-- ^ @ACCEPTED@
-- API has accepted the up-to-date data of this item.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PollItemsRequestStatusCodesItem
instance FromHttpApiData PollItemsRequestStatusCodesItem where
parseQueryParam = \case
"CODE_UNSPECIFIED" -> Right PIRSCICodeUnspecified
"ERROR" -> Right PIRSCIError'
"MODIFIED" -> Right PIRSCIModified
"NEW_ITEM" -> Right PIRSCINewItem
"ACCEPTED" -> Right PIRSCIAccepted
x -> Left ("Unable to parse PollItemsRequestStatusCodesItem from: " <> x)
instance ToHttpApiData PollItemsRequestStatusCodesItem where
toQueryParam = \case
PIRSCICodeUnspecified -> "CODE_UNSPECIFIED"
PIRSCIError' -> "ERROR"
PIRSCIModified -> "MODIFIED"
PIRSCINewItem -> "NEW_ITEM"
PIRSCIAccepted -> "ACCEPTED"
instance FromJSON PollItemsRequestStatusCodesItem where
parseJSON = parseJSONText "PollItemsRequestStatusCodesItem"
instance ToJSON PollItemsRequestStatusCodesItem where
toJSON = toJSONText
-- | The resolution status for the external identity.
data UnmAppedIdentityResolutionStatusCode
= UAIRSCCodeUnspecified
-- ^ @CODE_UNSPECIFIED@
-- Input-only value. Used to list all unmapped identities regardless of
-- status.
| UAIRSCNotFound
-- ^ @NOT_FOUND@
-- The unmapped identity was not found in IDaaS, and needs to be provided
-- by the user.
| UAIRSCIdentitySourceNotFound
-- ^ @IDENTITY_SOURCE_NOT_FOUND@
-- The identity source associated with the identity was either not found or
-- deleted.
| UAIRSCIdentitySourceMisConfigured
-- ^ @IDENTITY_SOURCE_MISCONFIGURED@
-- IDaaS does not understand the identity source, probably because the
-- schema was modified in a non compatible way.
| UAIRSCTooManyMAppingsFound
-- ^ @TOO_MANY_MAPPINGS_FOUND@
-- The number of users associated with the external identity is too large.
| UAIRSCInternalError
-- ^ @INTERNAL_ERROR@
-- Internal error.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable UnmAppedIdentityResolutionStatusCode
instance FromHttpApiData UnmAppedIdentityResolutionStatusCode where
parseQueryParam = \case
"CODE_UNSPECIFIED" -> Right UAIRSCCodeUnspecified
"NOT_FOUND" -> Right UAIRSCNotFound
"IDENTITY_SOURCE_NOT_FOUND" -> Right UAIRSCIdentitySourceNotFound
"IDENTITY_SOURCE_MISCONFIGURED" -> Right UAIRSCIdentitySourceMisConfigured
"TOO_MANY_MAPPINGS_FOUND" -> Right UAIRSCTooManyMAppingsFound
"INTERNAL_ERROR" -> Right UAIRSCInternalError
x -> Left ("Unable to parse UnmAppedIdentityResolutionStatusCode from: " <> x)
instance ToHttpApiData UnmAppedIdentityResolutionStatusCode where
toQueryParam = \case
UAIRSCCodeUnspecified -> "CODE_UNSPECIFIED"
UAIRSCNotFound -> "NOT_FOUND"
UAIRSCIdentitySourceNotFound -> "IDENTITY_SOURCE_NOT_FOUND"
UAIRSCIdentitySourceMisConfigured -> "IDENTITY_SOURCE_MISCONFIGURED"
UAIRSCTooManyMAppingsFound -> "TOO_MANY_MAPPINGS_FOUND"
UAIRSCInternalError -> "INTERNAL_ERROR"
instance FromJSON UnmAppedIdentityResolutionStatusCode where
parseJSON = parseJSONText "UnmAppedIdentityResolutionStatusCode"
instance ToJSON UnmAppedIdentityResolutionStatusCode where
toJSON = toJSONText
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
-- | Type of error.
data RepositoryErrorType
= Unknown
-- ^ @UNKNOWN@
-- Unknown error.
| NetworkError
-- ^ @NETWORK_ERROR@
-- Unknown or unreachable host.
| DNSError
-- ^ @DNS_ERROR@
-- DNS problem, such as the DNS server is not responding.
| ConnectionError
-- ^ @CONNECTION_ERROR@
-- Cannot connect to the repository server.
| AuthenticationError
-- ^ @AUTHENTICATION_ERROR@
-- Failed authentication due to incorrect credentials.
| AuthorizationError
-- ^ @AUTHORIZATION_ERROR@
-- Service account is not authorized for the repository.
| ServerError
-- ^ @SERVER_ERROR@
-- Repository server error.
| QuotaExceeded
-- ^ @QUOTA_EXCEEDED@
-- Quota exceeded.
| ServiceUnavailable
-- ^ @SERVICE_UNAVAILABLE@
-- Server temporarily unavailable.
| ClientError
-- ^ @CLIENT_ERROR@
-- Client-related error, such as an invalid request from the connector to
-- the repository server.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable RepositoryErrorType
instance FromHttpApiData RepositoryErrorType where
parseQueryParam = \case
"UNKNOWN" -> Right Unknown
"NETWORK_ERROR" -> Right NetworkError
"DNS_ERROR" -> Right DNSError
"CONNECTION_ERROR" -> Right ConnectionError
"AUTHENTICATION_ERROR" -> Right AuthenticationError
"AUTHORIZATION_ERROR" -> Right AuthorizationError
"SERVER_ERROR" -> Right ServerError
"QUOTA_EXCEEDED" -> Right QuotaExceeded
"SERVICE_UNAVAILABLE" -> Right ServiceUnavailable
"CLIENT_ERROR" -> Right ClientError
x -> Left ("Unable to parse RepositoryErrorType from: " <> x)
instance ToHttpApiData RepositoryErrorType where
toQueryParam = \case
Unknown -> "UNKNOWN"
NetworkError -> "NETWORK_ERROR"
DNSError -> "DNS_ERROR"
ConnectionError -> "CONNECTION_ERROR"
AuthenticationError -> "AUTHENTICATION_ERROR"
AuthorizationError -> "AUTHORIZATION_ERROR"
ServerError -> "SERVER_ERROR"
QuotaExceeded -> "QUOTA_EXCEEDED"
ServiceUnavailable -> "SERVICE_UNAVAILABLE"
ClientError -> "CLIENT_ERROR"
instance FromJSON RepositoryErrorType where
parseJSON = parseJSONText "RepositoryErrorType"
instance ToJSON RepositoryErrorType where
toJSON = toJSONText
data DriveLocationRestrictType
= DLRTUnspecified
-- ^ @UNSPECIFIED@
| DLRTTrashed
-- ^ @TRASHED@
| DLRTStarred
-- ^ @STARRED@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DriveLocationRestrictType
instance FromHttpApiData DriveLocationRestrictType where
parseQueryParam = \case
"UNSPECIFIED" -> Right DLRTUnspecified
"TRASHED" -> Right DLRTTrashed
"STARRED" -> Right DLRTStarred
x -> Left ("Unable to parse DriveLocationRestrictType from: " <> x)
instance ToHttpApiData DriveLocationRestrictType where
toQueryParam = \case
DLRTUnspecified -> "UNSPECIFIED"
DLRTTrashed -> "TRASHED"
DLRTStarred -> "STARRED"
instance FromJSON DriveLocationRestrictType where
parseJSON = parseJSONText "DriveLocationRestrictType"
instance ToJSON DriveLocationRestrictType where
toJSON = toJSONText
-- | Type for this item.
data ItemItemType
= IITUnspecified
-- ^ @UNSPECIFIED@
| IITContentItem
-- ^ @CONTENT_ITEM@
-- An item that is indexed for the only purpose of serving information.
-- These items cannot be referred in containerName or inheritAclFrom
-- fields.
| IITContainerItem
-- ^ @CONTAINER_ITEM@
-- An item that gets indexed and whose purpose is to supply other items
-- with ACLs and\/or contain other items.
| IITVirtualContainerItem
-- ^ @VIRTUAL_CONTAINER_ITEM@
-- An item that does not get indexed, but otherwise has the same purpose as
-- CONTAINER_ITEM.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ItemItemType
instance FromHttpApiData ItemItemType where
parseQueryParam = \case
"UNSPECIFIED" -> Right IITUnspecified
"CONTENT_ITEM" -> Right IITContentItem
"CONTAINER_ITEM" -> Right IITContainerItem
"VIRTUAL_CONTAINER_ITEM" -> Right IITVirtualContainerItem
x -> Left ("Unable to parse ItemItemType from: " <> x)
instance ToHttpApiData ItemItemType where
toQueryParam = \case
IITUnspecified -> "UNSPECIFIED"
IITContentItem -> "CONTENT_ITEM"
IITContainerItem -> "CONTAINER_ITEM"
IITVirtualContainerItem -> "VIRTUAL_CONTAINER_ITEM"
instance FromJSON ItemItemType where
parseJSON = parseJSONText "ItemItemType"
instance ToJSON ItemItemType where
toJSON = toJSONText
-- | Status of the items.
data ItemCountByStatusStatusCode
= ICBSSCCodeUnspecified
-- ^ @CODE_UNSPECIFIED@
-- Input-only value. Used with Items.list to list all items in the queue,
-- regardless of status.
| ICBSSCError'
-- ^ @ERROR@
-- Error encountered by Cloud Search while processing this item. Details of
-- the error are in repositoryError.
| ICBSSCModified
-- ^ @MODIFIED@
-- Item has been modified in the repository, and is out of date with the
-- version previously accepted into Cloud Search.
| ICBSSCNewItem
-- ^ @NEW_ITEM@
-- Item is known to exist in the repository, but is not yet accepted by
-- Cloud Search. An item can be in this state when Items.push has been
-- called for an item of this name that did not exist previously.
| ICBSSCAccepted
-- ^ @ACCEPTED@
-- API has accepted the up-to-date data of this item.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ItemCountByStatusStatusCode
instance FromHttpApiData ItemCountByStatusStatusCode where
parseQueryParam = \case
"CODE_UNSPECIFIED" -> Right ICBSSCCodeUnspecified
"ERROR" -> Right ICBSSCError'
"MODIFIED" -> Right ICBSSCModified
"NEW_ITEM" -> Right ICBSSCNewItem
"ACCEPTED" -> Right ICBSSCAccepted
x -> Left ("Unable to parse ItemCountByStatusStatusCode from: " <> x)
instance ToHttpApiData ItemCountByStatusStatusCode where
toQueryParam = \case
ICBSSCCodeUnspecified -> "CODE_UNSPECIFIED"
ICBSSCError' -> "ERROR"
ICBSSCModified -> "MODIFIED"
ICBSSCNewItem -> "NEW_ITEM"
ICBSSCAccepted -> "ACCEPTED"
instance FromJSON ItemCountByStatusStatusCode where
parseJSON = parseJSONText "ItemCountByStatusStatusCode"
instance ToJSON ItemCountByStatusStatusCode where
toJSON = toJSONText
data ItemContentContentFormat
= ICCFUnspecified
-- ^ @UNSPECIFIED@
-- Invalid value.
| ICCFHTML
-- ^ @HTML@
-- contentFormat is HTML.
| ICCFText
-- ^ @TEXT@
-- contentFormat is free text.
| ICCFRaw
-- ^ @RAW@
-- contentFormat is raw bytes.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ItemContentContentFormat
instance FromHttpApiData ItemContentContentFormat where
parseQueryParam = \case
"UNSPECIFIED" -> Right ICCFUnspecified
"HTML" -> Right ICCFHTML
"TEXT" -> Right ICCFText
"RAW" -> Right ICCFRaw
x -> Left ("Unable to parse ItemContentContentFormat from: " <> x)
instance ToHttpApiData ItemContentContentFormat where
toQueryParam = \case
ICCFUnspecified -> "UNSPECIFIED"
ICCFHTML -> "HTML"
ICCFText -> "TEXT"
ICCFRaw -> "RAW"
instance FromJSON ItemContentContentFormat where
parseJSON = parseJSONText "ItemContentContentFormat"
instance ToJSON ItemContentContentFormat where
toJSON = toJSONText
-- | Predefined content source for Google Apps.
data SourcePredefinedSource
= SPSNone
-- ^ @NONE@
| SPSQueryHistory
-- ^ @QUERY_HISTORY@
-- Suggests queries issued by the user in the past. Only valid when used
-- with the suggest API. Ignored when used in the query API.
| SPSPerson
-- ^ @PERSON@
-- Suggests people in the organization. Only valid when used with the
-- suggest API. Results in an error when used in the query API.
| SPSGoogleDrive
-- ^ @GOOGLE_DRIVE@
| SPSGoogleGmail
-- ^ @GOOGLE_GMAIL@
| SPSGoogleSites
-- ^ @GOOGLE_SITES@
| SPSGoogleGroups
-- ^ @GOOGLE_GROUPS@
| SPSGoogleCalendar
-- ^ @GOOGLE_CALENDAR@
| SPSGoogleKeep
-- ^ @GOOGLE_KEEP@
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SourcePredefinedSource
instance FromHttpApiData SourcePredefinedSource where
parseQueryParam = \case
"NONE" -> Right SPSNone
"QUERY_HISTORY" -> Right SPSQueryHistory
"PERSON" -> Right SPSPerson
"GOOGLE_DRIVE" -> Right SPSGoogleDrive
"GOOGLE_GMAIL" -> Right SPSGoogleGmail
"GOOGLE_SITES" -> Right SPSGoogleSites
"GOOGLE_GROUPS" -> Right SPSGoogleGroups
"GOOGLE_CALENDAR" -> Right SPSGoogleCalendar
"GOOGLE_KEEP" -> Right SPSGoogleKeep
x -> Left ("Unable to parse SourcePredefinedSource from: " <> x)
instance ToHttpApiData SourcePredefinedSource where
toQueryParam = \case
SPSNone -> "NONE"
SPSQueryHistory -> "QUERY_HISTORY"
SPSPerson -> "PERSON"
SPSGoogleDrive -> "GOOGLE_DRIVE"
SPSGoogleGmail -> "GOOGLE_GMAIL"
SPSGoogleSites -> "GOOGLE_SITES"
SPSGoogleGroups -> "GOOGLE_GROUPS"
SPSGoogleCalendar -> "GOOGLE_CALENDAR"
SPSGoogleKeep -> "GOOGLE_KEEP"
instance FromJSON SourcePredefinedSource where
parseJSON = parseJSONText "SourcePredefinedSource"
instance ToJSON SourcePredefinedSource where
toJSON = toJSONText
data DriveTimeSpanRestrictType
= DTSRTUnspecified
-- ^ @UNSPECIFIED@
| DTSRTToday
-- ^ @TODAY@
| DTSRTYesterday
-- ^ @YESTERDAY@
| DTSRTLast7Days
-- ^ @LAST_7_DAYS@
| DTSRTLast30Days
-- ^ @LAST_30_DAYS@
-- Not Enabled
| DTSRTLast90Days
-- ^ @LAST_90_DAYS@
-- Not Enabled
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DriveTimeSpanRestrictType
instance FromHttpApiData DriveTimeSpanRestrictType where
parseQueryParam = \case
"UNSPECIFIED" -> Right DTSRTUnspecified
"TODAY" -> Right DTSRTToday
"YESTERDAY" -> Right DTSRTYesterday
"LAST_7_DAYS" -> Right DTSRTLast7Days
"LAST_30_DAYS" -> Right DTSRTLast30Days
"LAST_90_DAYS" -> Right DTSRTLast90Days
x -> Left ("Unable to parse DriveTimeSpanRestrictType from: " <> x)
instance ToHttpApiData DriveTimeSpanRestrictType where
toQueryParam = \case
DTSRTUnspecified -> "UNSPECIFIED"
DTSRTToday -> "TODAY"
DTSRTYesterday -> "YESTERDAY"
DTSRTLast7Days -> "LAST_7_DAYS"
DTSRTLast30Days -> "LAST_30_DAYS"
DTSRTLast90Days -> "LAST_90_DAYS"
instance FromJSON DriveTimeSpanRestrictType where
parseJSON = parseJSONText "DriveTimeSpanRestrictType"
instance ToJSON DriveTimeSpanRestrictType where
toJSON = toJSONText
-- | Required. The RequestMode for this request.
data IndexItemRequestMode
= IIRMUnspecified
-- ^ @UNSPECIFIED@
-- Priority is not specified in the update request. Leaving priority
-- unspecified results in an update failure.
| IIRMSynchronous
-- ^ @SYNCHRONOUS@
-- For real-time updates.
| IIRMAsynchronous
-- ^ @ASYNCHRONOUS@
-- For changes that are executed after the response is sent back to the
-- caller.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable IndexItemRequestMode
instance FromHttpApiData IndexItemRequestMode where
parseQueryParam = \case
"UNSPECIFIED" -> Right IIRMUnspecified
"SYNCHRONOUS" -> Right IIRMSynchronous
"ASYNCHRONOUS" -> Right IIRMAsynchronous
x -> Left ("Unable to parse IndexItemRequestMode from: " <> x)
instance ToHttpApiData IndexItemRequestMode where
toQueryParam = \case
IIRMUnspecified -> "UNSPECIFIED"
IIRMSynchronous -> "SYNCHRONOUS"
IIRMAsynchronous -> "ASYNCHRONOUS"
instance FromJSON IndexItemRequestMode where
parseJSON = parseJSONText "IndexItemRequestMode"
instance ToJSON IndexItemRequestMode where
toJSON = toJSONText
-- | Sets the type of access rules to apply when an item inherits its ACL
-- from a parent. This should always be set in tandem with the
-- inheritAclFrom field. Also, when the inheritAclFrom field is set, this
-- field should be set to a valid AclInheritanceType.
data ItemACLACLInheritanceType
= NotApplicable
-- ^ @NOT_APPLICABLE@
-- The default value when this item does not inherit an ACL. Use
-- NOT_APPLICABLE when inheritAclFrom is empty. An item without ACL
-- inheritance can still have ACLs supplied by its own readers and
-- deniedReaders fields.
| ChildOverride
-- ^ @CHILD_OVERRIDE@
-- During an authorization conflict, the ACL of the child item determines
-- its read access.
| ParentOverride
-- ^ @PARENT_OVERRIDE@
-- During an authorization conflict, the ACL of the parent item specified
-- in the inheritAclFrom field determines read access.
| BothPermit
-- ^ @BOTH_PERMIT@
-- Access is granted only if this item and the parent item specified in the
-- inheritAclFrom field both permit read access.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ItemACLACLInheritanceType
instance FromHttpApiData ItemACLACLInheritanceType where
parseQueryParam = \case
"NOT_APPLICABLE" -> Right NotApplicable
"CHILD_OVERRIDE" -> Right ChildOverride
"PARENT_OVERRIDE" -> Right ParentOverride
"BOTH_PERMIT" -> Right BothPermit
x -> Left ("Unable to parse ItemACLACLInheritanceType from: " <> x)
instance ToHttpApiData ItemACLACLInheritanceType where
toQueryParam = \case
NotApplicable -> "NOT_APPLICABLE"
ChildOverride -> "CHILD_OVERRIDE"
ParentOverride -> "PARENT_OVERRIDE"
BothPermit -> "BOTH_PERMIT"
instance FromJSON ItemACLACLInheritanceType where
parseJSON = parseJSONText "ItemACLACLInheritanceType"
instance ToJSON ItemACLACLInheritanceType where
toJSON = toJSONText
| brendanhay/gogol | gogol-cloudsearch/gen/Network/Google/CloudSearch/Types/Sum.hs | mpl-2.0 | 45,982 | 0 | 11 | 10,696 | 6,395 | 3,418 | 2,977 | 776 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.ManagedConfigurationsforDevice.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Adds or updates a per-device managed configuration for an app for the
-- specified device.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.managedconfigurationsfordevice.update@.
module Network.Google.Resource.AndroidEnterprise.ManagedConfigurationsforDevice.Update
(
-- * REST Resource
ManagedConfigurationsforDeviceUpdateResource
-- * Creating a Request
, managedConfigurationsforDeviceUpdate
, ManagedConfigurationsforDeviceUpdate
-- * Request Lenses
, mcduEnterpriseId
, mcduPayload
, mcduUserId
, mcduDeviceId
, mcduManagedConfigurationForDeviceId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.managedconfigurationsfordevice.update@ method which the
-- 'ManagedConfigurationsforDeviceUpdate' request conforms to.
type ManagedConfigurationsforDeviceUpdateResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
"devices" :>
Capture "deviceId" Text :>
"managedConfigurationsForDevice" :>
Capture "managedConfigurationForDeviceId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ManagedConfiguration :>
Put '[JSON] ManagedConfiguration
-- | Adds or updates a per-device managed configuration for an app for the
-- specified device.
--
-- /See:/ 'managedConfigurationsforDeviceUpdate' smart constructor.
data ManagedConfigurationsforDeviceUpdate = ManagedConfigurationsforDeviceUpdate'
{ _mcduEnterpriseId :: !Text
, _mcduPayload :: !ManagedConfiguration
, _mcduUserId :: !Text
, _mcduDeviceId :: !Text
, _mcduManagedConfigurationForDeviceId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagedConfigurationsforDeviceUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcduEnterpriseId'
--
-- * 'mcduPayload'
--
-- * 'mcduUserId'
--
-- * 'mcduDeviceId'
--
-- * 'mcduManagedConfigurationForDeviceId'
managedConfigurationsforDeviceUpdate
:: Text -- ^ 'mcduEnterpriseId'
-> ManagedConfiguration -- ^ 'mcduPayload'
-> Text -- ^ 'mcduUserId'
-> Text -- ^ 'mcduDeviceId'
-> Text -- ^ 'mcduManagedConfigurationForDeviceId'
-> ManagedConfigurationsforDeviceUpdate
managedConfigurationsforDeviceUpdate pMcduEnterpriseId_ pMcduPayload_ pMcduUserId_ pMcduDeviceId_ pMcduManagedConfigurationForDeviceId_ =
ManagedConfigurationsforDeviceUpdate'
{ _mcduEnterpriseId = pMcduEnterpriseId_
, _mcduPayload = pMcduPayload_
, _mcduUserId = pMcduUserId_
, _mcduDeviceId = pMcduDeviceId_
, _mcduManagedConfigurationForDeviceId = pMcduManagedConfigurationForDeviceId_
}
-- | The ID of the enterprise.
mcduEnterpriseId :: Lens' ManagedConfigurationsforDeviceUpdate Text
mcduEnterpriseId
= lens _mcduEnterpriseId
(\ s a -> s{_mcduEnterpriseId = a})
-- | Multipart request metadata.
mcduPayload :: Lens' ManagedConfigurationsforDeviceUpdate ManagedConfiguration
mcduPayload
= lens _mcduPayload (\ s a -> s{_mcduPayload = a})
-- | The ID of the user.
mcduUserId :: Lens' ManagedConfigurationsforDeviceUpdate Text
mcduUserId
= lens _mcduUserId (\ s a -> s{_mcduUserId = a})
-- | The Android ID of the device.
mcduDeviceId :: Lens' ManagedConfigurationsforDeviceUpdate Text
mcduDeviceId
= lens _mcduDeviceId (\ s a -> s{_mcduDeviceId = a})
-- | The ID of the managed configuration (a product ID), e.g.
-- \"app:com.google.android.gm\".
mcduManagedConfigurationForDeviceId :: Lens' ManagedConfigurationsforDeviceUpdate Text
mcduManagedConfigurationForDeviceId
= lens _mcduManagedConfigurationForDeviceId
(\ s a ->
s{_mcduManagedConfigurationForDeviceId = a})
instance GoogleRequest
ManagedConfigurationsforDeviceUpdate where
type Rs ManagedConfigurationsforDeviceUpdate =
ManagedConfiguration
type Scopes ManagedConfigurationsforDeviceUpdate =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient
ManagedConfigurationsforDeviceUpdate'{..}
= go _mcduEnterpriseId _mcduUserId _mcduDeviceId
_mcduManagedConfigurationForDeviceId
(Just AltJSON)
_mcduPayload
androidEnterpriseService
where go
= buildClient
(Proxy ::
Proxy ManagedConfigurationsforDeviceUpdateResource)
mempty
| rueshyna/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/ManagedConfigurationsforDevice/Update.hs | mpl-2.0 | 5,753 | 0 | 19 | 1,312 | 622 | 368 | 254 | 105 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.Videos.GetRating
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the ratings that the authorized user gave to a list of
-- specified videos.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.videos.getRating@.
module Network.Google.Resource.YouTube.Videos.GetRating
(
-- * REST Resource
VideosGetRatingResource
-- * Creating a Request
, videosGetRating
, VideosGetRating
-- * Request Lenses
, vgrXgafv
, vgrUploadProtocol
, vgrAccessToken
, vgrUploadType
, vgrOnBehalfOfContentOwner
, vgrId
, vgrCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.videos.getRating@ method which the
-- 'VideosGetRating' request conforms to.
type VideosGetRatingResource =
"youtube" :>
"v3" :>
"videos" :>
"getRating" :>
QueryParams "id" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] VideoGetRatingResponse
-- | Retrieves the ratings that the authorized user gave to a list of
-- specified videos.
--
-- /See:/ 'videosGetRating' smart constructor.
data VideosGetRating =
VideosGetRating'
{ _vgrXgafv :: !(Maybe Xgafv)
, _vgrUploadProtocol :: !(Maybe Text)
, _vgrAccessToken :: !(Maybe Text)
, _vgrUploadType :: !(Maybe Text)
, _vgrOnBehalfOfContentOwner :: !(Maybe Text)
, _vgrId :: ![Text]
, _vgrCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'VideosGetRating' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vgrXgafv'
--
-- * 'vgrUploadProtocol'
--
-- * 'vgrAccessToken'
--
-- * 'vgrUploadType'
--
-- * 'vgrOnBehalfOfContentOwner'
--
-- * 'vgrId'
--
-- * 'vgrCallback'
videosGetRating
:: [Text] -- ^ 'vgrId'
-> VideosGetRating
videosGetRating pVgrId_ =
VideosGetRating'
{ _vgrXgafv = Nothing
, _vgrUploadProtocol = Nothing
, _vgrAccessToken = Nothing
, _vgrUploadType = Nothing
, _vgrOnBehalfOfContentOwner = Nothing
, _vgrId = _Coerce # pVgrId_
, _vgrCallback = Nothing
}
-- | V1 error format.
vgrXgafv :: Lens' VideosGetRating (Maybe Xgafv)
vgrXgafv = lens _vgrXgafv (\ s a -> s{_vgrXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
vgrUploadProtocol :: Lens' VideosGetRating (Maybe Text)
vgrUploadProtocol
= lens _vgrUploadProtocol
(\ s a -> s{_vgrUploadProtocol = a})
-- | OAuth access token.
vgrAccessToken :: Lens' VideosGetRating (Maybe Text)
vgrAccessToken
= lens _vgrAccessToken
(\ s a -> s{_vgrAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
vgrUploadType :: Lens' VideosGetRating (Maybe Text)
vgrUploadType
= lens _vgrUploadType
(\ s a -> s{_vgrUploadType = a})
-- | *Note:* This parameter is intended exclusively for YouTube content
-- partners. The *onBehalfOfContentOwner* parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The CMS account that the user authenticates with must be linked
-- to the specified YouTube content owner.
vgrOnBehalfOfContentOwner :: Lens' VideosGetRating (Maybe Text)
vgrOnBehalfOfContentOwner
= lens _vgrOnBehalfOfContentOwner
(\ s a -> s{_vgrOnBehalfOfContentOwner = a})
vgrId :: Lens' VideosGetRating [Text]
vgrId
= lens _vgrId (\ s a -> s{_vgrId = a}) . _Coerce
-- | JSONP
vgrCallback :: Lens' VideosGetRating (Maybe Text)
vgrCallback
= lens _vgrCallback (\ s a -> s{_vgrCallback = a})
instance GoogleRequest VideosGetRating where
type Rs VideosGetRating = VideoGetRatingResponse
type Scopes VideosGetRating =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient VideosGetRating'{..}
= go _vgrId _vgrXgafv _vgrUploadProtocol
_vgrAccessToken
_vgrUploadType
_vgrOnBehalfOfContentOwner
_vgrCallback
(Just AltJSON)
youTubeService
where go
= buildClient
(Proxy :: Proxy VideosGetRatingResource)
mempty
| brendanhay/gogol | gogol-youtube/gen/Network/Google/Resource/YouTube/Videos/GetRating.hs | mpl-2.0 | 5,855 | 0 | 19 | 1,385 | 819 | 480 | 339 | 118 | 1 |
module Console
where
import Control.Monad.State
import Control.Concurrent.STM
import Char
import Control.Exception
import Prelude hiding (catch)
import Libaddutil.STMServer
import Freekick.Libmatch.MatchStatus
data ConsoleStatus = ConsoleStatus { status :: Bool }
startConsoleStatus :: ConsoleStatus
startConsoleStatus = ConsoleStatus False
doInterpreter :: TVar [Client] -> TVar MatchStatus -> ConsoleStatus -> IO ()
doInterpreter c mb s = do
n <- getLine
cl <- atomically $ readTVar c
m <- atomically $ readTVar mb
when (length n > 0) $ do
case (map Char.toLower (head (words n))) of
"clients" -> putStrLn ("Number of clients connected: " ++ show (length cl))
"help" -> putStrLn consoleHelp
"status" -> when (length (tail (words n)) > 0) $ do
case (map Char.toLower (head (tail (words n)))) of
"on" -> doInterpreter c mb s{status = True}
_ -> doInterpreter c mb s{status = False}
"ball" -> if (length (tail (words n)) > 0) then do
nbp <- evaluate (read (unwords (tail (words n)))) `catch` \e -> putStrLn ("Invalid parameters: " ++ (show (e :: IOException))) >> return (ballplay m)
let newm = m{ballplay=nbp}
atomically $ writeTVar mb newm
else do
putStrLn (show (ballplay m))
_ -> putStrLn consoleHelp
doInterpreter c mb s
consoleHelp :: String
consoleHelp = "List of commands:\nclients\nhelp\nstatus on|off"
| anttisalonen/freekick | haskell/freekick_server/physics/src/Console.hs | agpl-3.0 | 1,646 | 0 | 26 | 535 | 537 | 272 | 265 | 34 | 7 |
{-# OPTIONS -XFlexibleInstances #-}
{-# OPTIONS -XMultiParamTypeClasses #-}
--------------------------------------------------------------------------------
-- $Id: LookupMapTest.hs,v 1.1 2004/01/13 12:31:24 graham Exp $
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : LookupMapTest
-- Copyright : (c) 2003, Graham Klyne
-- License : GPL V2
--
-- Maintainer : Graham Klyne
-- Stability : provisional
-- Portability : H98
--
-- This Module defines test cases for module Parse parsing functions.
--
--------------------------------------------------------------------------------
-- WNH RIP OUT module Swish.HaskellUtils.LookupMapTest where
import Swish.HaskellUtils.LookupMap
( LookupEntryClass(..), LookupMap(..)
, emptyLookupMap, makeLookupMap, listLookupMap
, reverseLookupMap
, keyOrder
, mapFind, mapFindMaybe, mapContains
, mapReplace, mapReplaceOrAdd, mapReplaceAll, mapReplaceMap
, mapAdd, mapAddIfNew
, mapDelete, mapDeleteAll
, mapApplyToAll, mapTranslate
, mapEq, mapKeys, mapVals
, mapSelect, mapMerge
, mapSortByKey, mapSortByVal
, mapTranslateKeys, mapTranslateVals
, mapTranslateEntries, mapTranslateEntriesM
)
import Swish.HaskellUtils.ListHelpers
( equiv )
import Data.List ( sort )
import System.IO
( Handle, IOMode(WriteMode)
, openFile, hClose, hPutStr, hPutStrLn )
import Test.HUnit
( Test(TestCase,TestList,TestLabel)
, assertEqual, runTestTT, runTestText, putTextToHandle )
------------------------------------------------------------
-- Declare lookup entry for testing
------------------------------------------------------------
data GenMapEntry a b = E a b
instance (Eq a, Show a, Eq b, Show b)
=> LookupEntryClass (GenMapEntry a b) a b
where
keyVal (E k v) = (k,v)
newEntry (k,v) = (E k v)
instance (Eq a, Show a, Eq b, Show b) => Show (GenMapEntry a b) where
show = entryShow
instance (Eq a, Show a, Eq b, Show b) => Eq (GenMapEntry a b) where
(==) = entryEq
type TestEntry = GenMapEntry Int String
type TestMap = LookupMap (GenMapEntry Int String)
type RevTestMap = LookupMap (GenMapEntry String Int)
type MayTestMap = Maybe RevTestMap
type StrTestMap = LookupMap (GenMapEntry String String)
------------------------------------------------------------
-- Test class helper
------------------------------------------------------------
testeq :: (Show a, Eq a) => String -> a -> a -> Test
testeq lab req got =
TestCase ( assertEqual ("test"++lab) req got )
testeqv :: (Show a, Eq a) => String -> [a] -> [a] -> Test
testeqv lab req got =
TestCase ( assertEqual ("test"++lab) True (req `equiv` got) )
------------------------------------------------------------
-- LookupMap functions
------------------------------------------------------------
newMap :: [(Int,String)] -> TestMap
newMap es = makeLookupMap (map newEntry es)
testLookupMap :: String -> TestMap -> [(Int,String)] -> Test
testLookupMap lab m1 m2 = testeq ("LookupMap"++lab ) (newMap m2) m1
testLookupMapFind :: String -> TestMap -> Int -> String -> Test
testLookupMapFind lab lm k res =
testeq ("LookupMapFind"++lab ) res (mapFind "" k lm)
lm00 = newMap []
testLookupMap00 = testLookupMap "00" lm00 []
testLookupMapFind00 = testLookupMapFind "00" lm00 2 ""
lm01 = mapAdd lm00 $ newEntry (1,"aaa")
testLookupMap01 = testLookupMap "01" lm01 [(1,"aaa")]
testLookupMapFind01 = testLookupMapFind "01" lm01 2 ""
lm02 = mapAdd lm01 $ newEntry (2,"bbb")
testLookupMap02 = testLookupMap "02" lm02 [(2,"bbb"),(1,"aaa")]
testLookupMapFind02 = testLookupMapFind "02" lm02 2 "bbb"
lm03 = mapAdd lm02 $ newEntry (3,"ccc")
testLookupMap03 = testLookupMap "03" lm03 [(3,"ccc"),(2,"bbb"),(1,"aaa")]
testLookupMapFind03 = testLookupMapFind "03" lm03 2 "bbb"
lm04 = mapAdd lm03 $ newEntry (2,"bbb")
testLookupMap04 = testLookupMap "04" lm04 [(2,"bbb"),(3,"ccc"),(2,"bbb"),(1,"aaa")]
testLookupMapFind04 = testLookupMapFind "04" lm04 2 "bbb"
lm05 = mapReplaceAll lm04 $ newEntry (2,"bbb1")
testLookupMap05 = testLookupMap "05" lm05 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa")]
testLookupMapFind05 = testLookupMapFind "05" lm05 2 "bbb1"
lm06 = mapReplaceAll lm05 $ newEntry (9,"zzzz")
testLookupMap06 = testLookupMap "06" lm06 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa")]
testLookupMapFind06 = testLookupMapFind "06" lm06 2 "bbb1"
lm07 = mapReplace lm06 $ newEntry (2,"bbb")
testLookupMap07 = testLookupMap "07" lm07 [(2,"bbb"),(3,"ccc"),(2,"bbb1"),(1,"aaa")]
testLookupMapFind07 = testLookupMapFind "07" lm07 2 "bbb"
testLookupMapFind0x = testLookupMapFind "0x" lm07 9 ""
lm08 = mapDelete lm07 3
testLookupMap08 = testLookupMap "08" lm08 [(2,"bbb"),(2,"bbb1"),(1,"aaa")]
testLookupMapFind08 = testLookupMapFind "08" lm08 2 "bbb"
lm09 = mapDeleteAll lm08 2
testLookupMap09 = testLookupMap "09" lm09 [(1,"aaa")]
testLookupMapFind09 = testLookupMapFind "09" lm09 2 ""
la10 = mapApplyToAll lm03 (flip replicate '*')
testLookupMapApp10 = testeq "LookupMapApplyToAll10" ["***","**","*"] la10
lt11 = mapTranslate lm03 la10 1 "****"
testLookupMapTran11 = testeq "LookupMapTranslate11" "*" lt11
lt12 = mapTranslate lm03 la10 2 "****"
testLookupMapTran12 = testeq "LookupMapTranslate12" "**" lt12
lt13 = mapTranslate lm03 la10 3 "****"
testLookupMapTran13 = testeq "LookupMapTranslate13" "***" lt13
lt14 = mapTranslate lm03 la10 4 "****"
testLookupMapTran14 = testeq "LookupMapTranslate14" "****" lt14
lm20 = mapReplaceMap lm05 $ newMap [(2,"bbb20"),(3,"ccc20")]
testLookupMap20 = testLookupMap "20" lm20 [(2,"bbb20"),(3,"ccc20"),(2,"bbb20"),(1,"aaa")]
testLookupMapFind20 = testLookupMapFind "20" lm20 2 "bbb20"
lm21 = mapReplaceMap lm05 $ newMap []
testLookupMap21 = testLookupMap "21" lm21 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa")]
testLookupMapFind21 = testLookupMapFind "21" lm21 2 "bbb1"
lm22 = mapReplaceMap lm05 $ newMap [(9,"zzz22"),(1,"aaa22")]
testLookupMap22 = testLookupMap "22" lm22 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa22")]
testLookupMapFind22 = testLookupMapFind "22" lm22 1 "aaa22"
testLookupContains31 = testeq "LookupContains31" True (mapContains lm22 2)
testLookupContains32 = testeq "LookupContains32" False (mapContains lm22 9)
lm33 = mapAddIfNew lm22 $ newEntry (1,"aaa33")
testLookupMap33 = testLookupMap "33" lm33 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa22")]
testLookupMapFind33a = testLookupMapFind "33a" lm33 1 "aaa22"
testLookupMapFind33b = testLookupMapFind "33b" lm33 4 ""
lm34 = mapAddIfNew lm22 $ newEntry (4,"ddd34")
testLookupMap34 = testLookupMap "34" lm34 [(4,"ddd34"),(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa22")]
testLookupMapFind34a = testLookupMapFind "34a" lm34 1 "aaa22"
testLookupMapFind34b = testLookupMapFind "34b" lm34 4 "ddd34"
lm35 = mapReplaceOrAdd (newEntry (1,"aaa35")) lm22
testLookupMap35 = testLookupMap "35" lm35 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa35")]
testLookupMapFind35a = testLookupMapFind "35a" lm35 1 "aaa35"
testLookupMapFind35b = testLookupMapFind "35b" lm35 4 ""
lm36 = mapReplaceOrAdd (newEntry (4,"ddd36")) lm22
testLookupMap36 = testLookupMap "36" lm36 [(2,"bbb1"),(3,"ccc"),(2,"bbb1"),(1,"aaa22"),(4,"ddd36")]
testLookupMapFind36a = testLookupMapFind "36a" lm36 1 "aaa22"
testLookupMapFind36b = testLookupMapFind "36b" lm36 4 "ddd36"
testLookupMapSuite = TestList
[
testLookupMap01, testLookupMapFind01,
testLookupMap02, testLookupMapFind02,
testLookupMap03, testLookupMapFind03,
testLookupMap04, testLookupMapFind04,
testLookupMap05, testLookupMapFind05,
testLookupMap06, testLookupMapFind06,
testLookupMap07, testLookupMapFind07, testLookupMapFind0x,
testLookupMap08, testLookupMapFind08,
testLookupMap09, testLookupMapFind09,
testLookupMapApp10,
testLookupMapTran11, testLookupMapTran12,
testLookupMapTran13, testLookupMapTran14,
testLookupMap20, testLookupMapFind20,
testLookupMap21, testLookupMapFind21,
testLookupMap22, testLookupMapFind22,
testLookupContains31,
testLookupContains32,
testLookupMap33, testLookupMapFind33a, testLookupMapFind33b,
testLookupMap34, testLookupMapFind34a, testLookupMapFind34b,
testLookupMap35, testLookupMapFind35a, testLookupMapFind35b,
testLookupMap36, testLookupMapFind36a, testLookupMapFind36b
]
------------------------------------------------------------
-- Reverse lookup map test tests
------------------------------------------------------------
revdef = -1 :: Int
newRevMap :: [(String,Int)] -> RevTestMap
newRevMap es = makeLookupMap (map newEntry es)
testRevLookupMap :: String -> RevTestMap -> [(String,Int)] -> Test
testRevLookupMap lab m1 m2 =
testeq ("RevLookupMap"++lab) (newRevMap m2) m1
testRevLookupMapFind :: String -> RevTestMap -> String -> Int -> Test
testRevLookupMapFind lab lm k res =
testeq ("RevLookupMapFind"++lab) res (mapFind revdef k lm)
rlm00 :: RevTestMap
rlm00 = reverseLookupMap lm00
testRevLookupMap00 = testRevLookupMap "00" rlm00 []
testRevLookupMapFind00 = testRevLookupMapFind "00" rlm00 "" revdef
rlm01 :: RevTestMap
rlm01 = reverseLookupMap lm01
testRevLookupMap01 = testRevLookupMap "01" rlm01 [("aaa",1)]
testRevLookupMapFind01 = testRevLookupMapFind "01" rlm01 "bbb" revdef
rlm02 :: RevTestMap
rlm02 = reverseLookupMap lm02
testRevLookupMap02 = testRevLookupMap "02" rlm02 [("bbb",2),("aaa",1)]
testRevLookupMapFind02 = testRevLookupMapFind "02" rlm02 "bbb" 2
rlm03 :: RevTestMap
rlm03 = reverseLookupMap lm03
testRevLookupMap03 = testRevLookupMap "03" rlm03 [("ccc",3),("bbb",2),("aaa",1)]
testRevLookupMapFind03 = testRevLookupMapFind "03" rlm03 "bbb" 2
rlm04 :: RevTestMap
rlm04 = reverseLookupMap lm04
testRevLookupMap04 = testRevLookupMap "04" rlm04 [("bbb",2),("ccc",3),("bbb",2),("aaa",1)]
testRevLookupMapFind04 = testRevLookupMapFind "04" rlm04 "bbb" 2
rlm05 :: RevTestMap
rlm05 = reverseLookupMap lm05
testRevLookupMap05 = testRevLookupMap "05" rlm05 [("bbb1",2),("ccc",3),("bbb1",2),("aaa",1)]
testRevLookupMapFind05 = testRevLookupMapFind "05" rlm05 "bbb1" 2
rlm06 :: RevTestMap
rlm06 = reverseLookupMap lm06
testRevLookupMap06 = testRevLookupMap "06" rlm06 [("bbb1",2),("ccc",3),("bbb1",2),("aaa",1)]
testRevLookupMapFind06 = testRevLookupMapFind "06" rlm06 "bbb1" 2
rlm07 :: RevTestMap
rlm07 = reverseLookupMap lm07
testRevLookupMap07 = testRevLookupMap "07" rlm07 [("bbb",2),("ccc",3),("bbb1",2),("aaa",1)]
testRevLookupMapFind07 = testRevLookupMapFind "07" rlm07 "bbb" 2
testRevLookupMapFind0w = testRevLookupMapFind "07" rlm07 "bbb1" 2
testRevLookupMapFind0x = testRevLookupMapFind "0x" rlm07 "*" revdef
rlm08 :: RevTestMap
rlm08 = reverseLookupMap lm08
testRevLookupMap08 = testRevLookupMap "08" rlm08 [("bbb",2),("bbb1",2),("aaa",1)]
testRevLookupMapFind08 = testRevLookupMapFind "08" rlm08 "bbb" 2
rlm09 :: RevTestMap
rlm09 = reverseLookupMap lm09
testRevLookupMap09 = testRevLookupMap "09" rlm09 [("aaa",1)]
testRevLookupMapFind09 = testRevLookupMapFind "09" rlm09 "" revdef
testRevLookupMapSuite = TestList
[
testRevLookupMap01, testRevLookupMapFind01,
testRevLookupMap02, testRevLookupMapFind02,
testRevLookupMap03, testRevLookupMapFind03,
testRevLookupMap04, testRevLookupMapFind04,
testRevLookupMap05, testRevLookupMapFind05,
testRevLookupMap06, testRevLookupMapFind06,
testRevLookupMap07, testRevLookupMapFind07,
testRevLookupMapFind0w,
testRevLookupMapFind0x,
testRevLookupMap08, testRevLookupMapFind08,
testRevLookupMap09, testRevLookupMapFind09
]
------------------------------------------------------------
-- mapKeys
------------------------------------------------------------
testMapKeys :: String -> TestMap -> [Int] -> Test
testMapKeys lab m1 mk =
testeq ("testMapKeys:"++lab) mk (sort $ mapKeys m1)
testMapKeys00 = testMapKeys "00" lm00 []
testMapKeys01 = testMapKeys "01" lm01 [1]
testMapKeys02 = testMapKeys "02" lm02 [1,2]
testMapKeys03 = testMapKeys "03" lm03 [1,2,3]
testMapKeys04 = testMapKeys "04" lm04 [1,2,3]
testMapKeys05 = testMapKeys "05" lm05 [1,2,3]
testMapKeys06 = testMapKeys "06" lm06 [1,2,3]
testMapKeys07 = testMapKeys "07" lm07 [1,2,3]
testMapKeys08 = testMapKeys "08" lm08 [1,2]
testMapKeys09 = testMapKeys "09" lm09 [1]
testMapKeysSuite = TestList
[ testMapKeys00
, testMapKeys01
, testMapKeys02
, testMapKeys03
, testMapKeys04
, testMapKeys05
, testMapKeys06
, testMapKeys07
, testMapKeys08
, testMapKeys09
]
------------------------------------------------------------
-- mapVals
------------------------------------------------------------
testMapVals :: String -> TestMap -> [String] -> Test
testMapVals lab m1 mv =
testeq ("MapVals:"++lab) mv (sort $ mapVals m1)
testMapVals00 = testMapVals "00" lm00 []
testMapVals01 = testMapVals "01" lm01 ["aaa"]
testMapVals02 = testMapVals "02" lm02 ["aaa","bbb"]
testMapVals03 = testMapVals "03" lm03 ["aaa","bbb","ccc"]
testMapVals04 = testMapVals "04" lm04 ["aaa","bbb","ccc"]
testMapVals05 = testMapVals "05" lm05 ["aaa","bbb1","ccc"]
testMapVals06 = testMapVals "06" lm06 ["aaa","bbb1","ccc"]
testMapVals07 = testMapVals "07" lm07 ["aaa","bbb","bbb1","ccc"]
testMapVals08 = testMapVals "08" lm08 ["aaa","bbb","bbb1"]
testMapVals09 = testMapVals "09" lm09 ["aaa"]
testMapValsSuite = TestList
[ testMapVals00
, testMapVals01
, testMapVals02
, testMapVals03
, testMapVals04
, testMapVals05
, testMapVals06
, testMapVals07
, testMapVals08
, testMapVals09
]
------------------------------------------------------------
-- mapEq
------------------------------------------------------------
maplist =
[ ("lm00",lm00)
, ("lm01",lm01)
, ("lm02",lm02)
, ("lm03",lm03)
, ("lm04",lm04)
, ("lm05",lm05)
, ("lm06",lm06)
, ("lm07",lm07)
, ("lm08",lm08)
, ("lm09",lm09)
]
mapeqlist =
[ ("lm01","lm09")
, ("lm02","lm08")
, ("lm03","lm04")
, ("lm03","lm07")
, ("lm04","lm07")
, ("lm05","lm06")
]
testMapEq :: String -> Bool -> TestMap -> TestMap -> Test
testMapEq lab eq m1 m2 =
testeq ("testMapEq:"++lab) eq (mapEq m1 m2)
testMapEqSuite = TestList
[ testMapEq (testLab l1 l2) (testEq l1 l2) m1 m2
| (l1,m1) <- maplist , (l2,m2) <- maplist ]
where
testLab l1 l2 = l1 ++ "-" ++ l2
testEq l1 l2 = (l1 == l2) ||
(l1,l2) `elem` mapeqlist ||
(l2,l1) `elem` mapeqlist
------------------------------------------------------------
-- mapSelect and mapMerge
------------------------------------------------------------
lm101 = mapAdd lm03 $ newEntry (4,"ddd")
testLookupMap101 = testLookupMap "101" lm101 [(4,"ddd"),(3,"ccc"),(2,"bbb"),(1,"aaa")]
lm102 = mapSelect lm101 [1,3]
testLookupMap102 = testLookupMap "102" lm102 [(3,"ccc"),(1,"aaa")]
lm103 = mapSelect lm101 [2,4]
testLookupMap103 = testLookupMap "103" lm103 [(4,"ddd"),(2,"bbb")]
lm104 = mapSelect lm101 [2,3]
testLookupMap104 = testLookupMap "104" lm104 [(3,"ccc"),(2,"bbb")]
mapSelectSuite = TestList
[ testLookupMap101
, testLookupMap102
, testLookupMap103
, testLookupMap104
]
lm105 = mapMerge lm102 lm103
testLookupMap105 = testLookupMap "105" lm105 [(1,"aaa"),(2,"bbb"),(3,"ccc"),(4,"ddd")]
lm106 = mapMerge lm102 lm104
testLookupMap106 = testLookupMap "106" lm106 [(1,"aaa"),(2,"bbb"),(3,"ccc")]
lm107 = mapMerge lm103 lm104
testLookupMap107 = testLookupMap "107" lm107 [(2,"bbb"),(3,"ccc"),(4,"ddd")]
lm108 = mapMerge lm101 lm102
testLookupMap108 = testLookupMap "108" lm108 [(1,"aaa"),(2,"bbb"),(3,"ccc"),(4,"ddd")]
mapMergeSuite = TestList
[ testLookupMap105
, testLookupMap106
, testLookupMap107
, testLookupMap108
]
------------------------------------------------------------
-- Tranlation tests
------------------------------------------------------------
-- Rather late in the day, generic versions of the testing functions used earlier
type TestMapG a b = LookupMap (GenMapEntry a b)
newMapG :: (Eq a, Show a, Eq b, Show b) => [(a,b)] -> (TestMapG a b)
newMapG es = makeLookupMap (map newEntry es)
testLookupMapG :: (Eq a, Show a, Eq b, Show b) => String -> (TestMapG a b) -> [(a,b)] -> Test
testLookupMapG lab m1 m2 = testeq ("LookupMapG"++lab ) (newMapG m2) m1
testLookupMapM ::
(Eq a, Show a, Eq b, Show b, Monad m,
Eq (m (TestMapG a b)), Show (m (TestMapG a b)))
=> String -> m (TestMapG a b) -> m (TestMapG a b) -> Test
testLookupMapM lab m1 m2 = testeq ("LookupMapM"++lab ) m2 m1
tm101 = newMap [(1,"a"),(2,"bb"),(3,"ccc"),(4,"dddd")]
testTranslateMap101 = testLookupMapG "tm101" tm101 [(1,"a"),(2,"bb"),(3,"ccc"),(4,"dddd")]
tf102 = (flip replicate '*') :: Int -> String
tm102 :: StrTestMap
tm102 = mapTranslateKeys tf102 tm101
testTranslateMap102 = testLookupMapG "tm102" tm102 [("*","a"),("**","bb"),("***","ccc"),("****","dddd")]
tf103 = length
tm103 :: RevTestMap
tm103 = mapTranslateVals tf103 tm102
testTranslateMap103 = testLookupMapG "tm103" tm103 [("*",1),("**",2),("***",3),("****",4)]
tf104 e = newEntry ( (flip replicate '#') k, 5-(length v) ) where (k,v) = keyVal e
tm104 :: RevTestMap
tm104 = mapTranslateEntries tf104 tm101
testTranslateMap104 = testLookupMapG "tm104" tm104 [("#",4),("##",3),("###",2),("####",1)]
-- Test monadic translation, using Maybe monad
-- (Note that if Nothing is generated at any step,
-- it propagates to the result)
tf105 e = Just $ tf104 e
tm105 :: MayTestMap
tm105 = mapTranslateEntriesM tf105 tm101
testTranslateMap105 = testLookupMapM "tm105" tm105 (Just tm104)
tf106 e = if k == 2 then Nothing else tf105 e where (k,_) = keyVal e
tm106 :: MayTestMap
tm106 = mapTranslateEntriesM tf106 tm101
testTranslateMap106 = testLookupMapM "tm106" tm106 Nothing
mapTranslateSuite = TestList
[ testTranslateMap101
, testTranslateMap102
, testTranslateMap103
, testTranslateMap104
, testTranslateMap105
, testTranslateMap106
]
------------------------------------------------------------
-- All tests
------------------------------------------------------------
allTests = TestList
[ testLookupMapSuite
, testRevLookupMapSuite
, testMapKeysSuite
, testMapValsSuite
, testMapEqSuite
, mapSelectSuite
, mapMergeSuite
, mapTranslateSuite
]
main = runTestTT allTests
runTestFile t = do
h <- openFile "a.tmp" WriteMode
runTestText (putTextToHandle h False) t
hClose h
tf = runTestFile
tt = runTestTT
--------------------------------------------------------------------------------
--
-- Copyright (c) 2003, G. KLYNE. All rights reserved.
--
-- This file is part of Swish.
--
-- Swish is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Swish is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Swish; if not, write to:
-- The Free Software Foundation, Inc.,
-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
--------------------------------------------------------------------------------
-- $Source: /file/cvsdev/HaskellUtils/LookupMapTest.hs,v $
-- $Author: graham $
-- $Revision: 1.1 $
-- $Log: LookupMapTest.hs,v $
-- Revision 1.1 2004/01/13 12:31:24 graham
-- Move modules from HaskellRDF to HaskellUtils project
--
-- Revision 1.20 2004/01/06 13:53:10 graham
-- Created consolidated test harness (SwishTestAll.hs)
--
-- Revision 1.19 2003/12/08 23:55:36 graham
-- Various enhancements to variable bindings and proof structure.
-- New module BuiltInMap coded and tested.
-- Script processor is yet to be completed.
--
-- Revision 1.18 2003/12/04 02:53:27 graham
-- More changes to LookupMap functions.
-- SwishScript logic part complete, type-checks OK.
--
-- Revision 1.17 2003/12/03 22:04:00 graham
-- Re-ordered mapFind (again), to simplify currying of default value.
--
-- Revision 1.16 2003/12/03 22:02:09 graham
-- Re-ordered mapFind, to simplify currying of default value.
--
-- Revision 1.15 2003/10/24 21:02:42 graham
-- Changed kind-structure of LookupMap type classes.
--
-- Revision 1.14 2003/10/14 20:31:21 graham
-- Add separate module for generic variable binding functions.
--
-- Revision 1.13 2003/09/24 18:50:52 graham
-- Revised module format to be Haddock compatible.
--
-- Revision 1.12 2003/06/11 14:07:53 graham
-- Added mapTranslateEntriesM, which performs monadic translation of
-- LookupMap entries. (Tested using Maybe monad.)
--
-- Revision 1.11 2003/06/03 19:24:13 graham
-- Updated all source modules to cite GNU Public Licence
--
-- Revision 1.10 2003/05/29 13:04:42 graham
-- All tests now compile and pass as stand-alone programs compiled
-- using GHC. Added batch files to compile programs and run tests.
--
-- Revision 1.9 2003/05/26 22:30:36 graham
-- Working on graph merge.
-- Added methods to Graph class for manipulating variable node.
-- Need to get RDFGraph to compile. And test.
--
-- Revision 1.8 2003/05/23 19:33:36 graham
-- Added and tested RDF graph label translation functions
--
-- Revision 1.7 2003/05/09 00:28:48 graham
-- Added partitionBy to ListHelpers (may want to remove since
-- it's also in the standard List module).
-- Added mapSelect and mapMerge to LookupMap, and test cases.
--
-- Revision 1.6 2003/05/07 18:50:38 graham
-- Add LookupMap functions: mapFindMaybe, mapKeys, mapEq
--
-- Revision 1.5 2003/05/01 23:15:44 graham
-- GraphTest passes all tests using refactored LookupMap
-- Extensive changes to GraphMatch were required.
--
-- Revision 1.4 2003/05/01 19:14:26 graham
-- LookupMap refactored to use class for entry, so that it can be
-- applied to a variety of different types with identifiable key and value
-- components. All tests pass.
--
-- Revision 1.3 2003/05/01 00:21:41 graham
-- Started refactoring LookupMap.
-- Revised module compiles OK.
-- Working on test module.
--
-- Revision 1.2 2003/04/11 18:12:10 graham
-- Renamed GraphHelpers to ListHelpers
-- LookupMapTest, GraphTest, RDFGraphTest all run OK
--
-- Revision 1.1 2003/04/11 18:05:57 graham
-- Add separate LookupMap test harness
-- Added mapReplaceOrAdd function
-- LookupMapTest runs OK
--
-- Revision 1.7 2003/04/10 13:41:22 graham
-- More graph code tidying
-- Graph test cases still run OK
--
-- Revision 1.6 2003/04/10 13:35:34 graham
-- Separated GraphMatch logic from GraphMem
--
-- Revision 1.5 2003/04/10 08:36:06 graham
-- Graph matching passes battery of new tests
-- Started work on RDF graph
--
-- Revision 1.4 2003/03/31 22:18:08 graham
-- Simple graph equality tests all pass
--
-- Revision 1.3 2003/03/31 20:52:23 graham
-- Restructure graph matching to deal with same unbound node names in
-- different graphs. It shows signs that it might be working now.
-- More testing is needed.
--
-- Revision 1.2 2003/03/28 21:50:22 graham
-- Graph equality coded and nearly working
--
-- Revision 1.1 2003/03/12 23:00:43 graham
-- Graph model coded and working, except for graph isomorphism test.
--
| amccausl/Swish | Swish/HaskellUtils/LookupMapTest.hs | lgpl-2.1 | 24,343 | 0 | 12 | 4,502 | 5,804 | 3,377 | 2,427 | -1 | -1 |
{-# language ViewPatterns, ScopedTypeVariables, PackageImports, EmptyDataDecls,
TypeSynonymInstances, FlexibleInstances,
StandaloneDeriving, DeriveDataTypeable,
DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
module Utils (
(<>),
(<$>),
(<*>),
(<|>),
(*>),
(<*),
pure,
(>>>),
(>=>),
forM,
forM_,
when,
(^.),
(^=),
(^:),
(.>),
module Utils,
module Utils.Scripting,
-- * accessor re-exports
Accessor,
(%=),
(%:),
-- * other re-exports
on,
Pair(..),
POSIXTime,
) where
-- imports
import Prelude hiding (catch)
import Safe
import Data.List
import Data.Map (Map, fromList, member, (!), findWithDefault, toList)
import Data.Foldable (Foldable, mapM_, forM_, any, sum)
import qualified Data.Foldable as Foldable
import Data.Traversable (Traversable, mapM)
import Data.IORef
import qualified Data.Set as Set
import Data.Accessor (Accessor, accessor, (^.), (^=), (^:), (.>))
import Data.Accessor.Monad.MTL.State ((%=), (%:))
import Data.Monoid
import Data.Function
import qualified Data.Strict as Strict
import Data.Strict (Pair(..))
import Data.Time.Clock.POSIX
import qualified Data.Vector
import Data.Data
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Data.Text.Encoding.Error as Text
import qualified Data.ByteString as SBS
import Text.Printf
import Text.Logging
import Control.Applicative ((<$>), (<|>), (<*>), (*>), (<*), pure, Alternative(..), Applicative)
import "mtl" Control.Monad.State hiding (forM_)
import "transformers" Control.Monad.Trans.Error (ErrorT(..))
-- and Monad (Either e)
import "MonadCatchIO-transformers" Control.Monad.CatchIO
import Control.Arrow ((>>>))
import Control.Concurrent
import Control.Exception
import Control.DeepSeq
import System.IO.Unsafe
import System.FilePath
import Utils.Scripting
-- * debugging stuff
data Todo
todo :: Todo
todo = error "just working on this (Utils.todo)"
todoError :: String -> a
todoError = error
-- | can be used to try out different values for constants without recompiling
tweakValue :: Read a => FilePath -> a
tweakValue file = System.IO.Unsafe.unsafePerformIO $ do
value <- readFile file
logg Debug (file ++ " = " ++ value)
return $ case readMay value of
Nothing -> error ("cannot read: " ++ value)
Just x -> x
{-# noinline tweakValue #-}
-- | prints debug messages (as unsafe side effects)
-- can be used at the end of expressions like this:
-- (x * y + 3) << id
-- something << getter
(<<) :: Show s => a -> (a -> s) -> a
a << f = trace (show $ f a) a
-- | prints out an expression as a debugging message (unsafe side effect)
-- with a given message
(<<?) :: Show a => a -> String -> a
a <<? msg = trace (msg ++ ": " ++ show a) a
-- | useful for temporarily deactivating $<<?$
(<<|) :: Show a => a -> String -> a
a <<| _ = a
-- | re-implementation of trace that uses Text.Logging.logg
trace :: String -> a -> a
trace msg x = unsafePerformIO $ do
logg Debug msg
return x
traceThis :: String -> (x -> String) -> x -> x
traceThis "" showFun x = trace (showFun x) x
traceThis msg showFun x = trace (msg ++ ": " ++ showFun x) x
e :: String -> a
e = error
es :: Show s => String -> s -> a
es msg x = error (msg ++ ": " ++ show x)
nm :: Show s => String -> s -> a
nm msg = es ("Non-exhaustive patterns: " ++ msg)
assertIO :: Bool -> String -> IO ()
assertIO True _ = return ()
assertIO False msg = error ("ASSERTION ERROR: " ++ msg)
-- | returns True every n-th time 'every' is called.
-- (of course this involves unsafeIO-magick.
every :: Int -> IO () -> IO ()
every n cmd = do
c <- readIORef everyRef
if c >= n then do
writeIORef everyRef 0
cmd
else do
writeIORef everyRef (c + 1)
return ()
{-# NOINLINE everyRef #-}
everyRef :: IORef Int
everyRef = unsafePerformIO $ newIORef 0
-- | @wait n@ waits for n seconds
wait :: MonadIO m => Double -> m ()
wait n = io $ threadDelay $ round (n * 10 ^ 6)
-- * re-named re-exports
{-# inline fmapM #-}
fmapM :: (Data.Traversable.Traversable t, Monad m) => (a -> m b) -> t a -> m (t b)
fmapM = Data.Traversable.mapM
fmapM_ :: (Monad m, Data.Foldable.Foldable t) => (a -> m b) -> t a -> m ()
fmapM_ = Data.Foldable.mapM_
fany :: Foldable t => (a -> Bool) -> t a -> Bool
fany = Data.Foldable.any
fsum :: (Foldable t, Num a) => t a -> a
fsum = Data.Foldable.sum
-- | Efficiency talk: Both flength and fnull assume that it is
-- faster to access the first elements than the last.
flength :: (Functor t, Foldable t) => t a -> Int
flength = Foldable.foldl (+) 0 . fmap (const 1)
fnull :: (Functor t, Foldable t) => t a -> Bool
fnull = Foldable.foldr (&&) True . fmap (const False)
ftoList :: Foldable f => f a -> [a]
ftoList = Foldable.toList
-- * function composition stuff
(|>) :: a -> (a -> b) -> b
a |> f = f a
-- fake kleisli stuff
passThrough :: Monad m => (a -> m ()) -> (a -> m a)
passThrough cmd a = cmd a >> return a
secondKleisli :: Functor f => (a -> f b) -> ((x, a) -> f (x, b))
secondKleisli cmd (x, a) =
fmap (tuple x) $ cmd a
(<>>) :: Functor m => m a -> (a -> b) -> m b
action <>> f = f <$> action
-- * State monad stuff
puts :: MonadState s m => (a -> s -> s) -> a -> m ()
puts setter a = do
s <- get
put (setter a s)
modifies :: MonadState s m => (s -> a) -> (a -> s -> s) -> (a -> a) -> m ()
modifies getter setter fun = do
a <- gets getter
puts setter (fun a)
modifiesT :: (Monad m, MonadTrans t, MonadState s (t m)) =>
(s -> a) -> (a1 -> s -> s) -> (a -> m a1) -> t m ()
modifiesT getter setter cmd = do
x <- gets getter
x' <- lift $ cmd x
puts setter x'
modifyState :: MonadState s m => (s -> s) -> m ()
modifyState f =
get >>= (return . f) >>= put
-- | runs a state monad on the content of an IORef
-- (useful for embedding state monad in e.g callback functions)
runStateTFromIORef :: IORef s -> StateT s IO a -> IO a
runStateTFromIORef ref cmd = do
s <- readIORef ref
(o, s') <- runStateT cmd s
writeIORef ref s'
return o
-- | is not atomic
modifyIORefM :: IORef a -> (a -> IO a) -> IO ()
modifyIORefM ref cmd =
readIORef ref >>=
cmd >>=
writeIORef ref
-- * mvar stuff
tryReadMVar :: MVar a -> IO (Maybe a)
tryReadMVar mvar = do
r <- tryTakeMVar mvar
forM_ r $ putMVar mvar
return r
-- * Monad stuff
chainAppM :: Monad m => (b -> a -> m a) -> [b] -> a -> m a
chainAppM cmd (b : r) a = do
a' <- cmd b a
chainAppM cmd r a'
chainAppM _ [] a = return a
ignore :: Monad m => m a -> m ()
ignore = (>> return ())
{-# inline io #-}
io :: MonadIO m => IO a -> m a
io = liftIO
-- applies a given monadic operation n times
applyTimesM :: Monad m => Int -> (a -> m a) -> a -> m a
applyTimesM 0 m = return
applyTimesM n m =
m >=> applyTimesM (pred n) m
infixl 8 ^^:
(^^:) :: Functor m => Accessor r a -> (a -> m a) -> r -> m r
acc ^^: f = \ r ->
fmap (\ a' -> acc ^= a' $ r) (f (r ^. acc))
(>$>) :: Functor m => m a -> (a -> b) -> m b
(>$>) = flip fmap
catchSomeExceptionsErrorT :: MonadCatchIO m =>
(SomeException -> e) -> ErrorT e m a -> ErrorT e m a
catchSomeExceptionsErrorT convert (ErrorT cmd) =
ErrorT $ Control.Monad.CatchIO.catch cmd (return . Left . convert)
convertErrorT :: Functor m => (a -> b) -> ErrorT a m o -> ErrorT b m o
convertErrorT f (ErrorT action) = ErrorT $
(either (Left . f) Right <$> action)
-- api stolen from the package 'errors'
hush :: Either e a -> Maybe a
hush = either (const Nothing) Just
deriving instance Foldable (Either a)
-- * either stuff
mapLeft :: (a -> b) -> Either a c -> Either b c
mapLeft f (Left a) = Left $ f a
mapLeft _ (Right c) = Right c
-- * list stuff
infixl 4 +:
(+:) :: [a] -> a -> [a]
a +: b = a ++ [b]
singleton :: a -> [a]
singleton = (: [])
-- | dropPrefix a b drops the longest prefix from b that is equal to a prefix of a.
dropPrefix :: Eq a => [a] -> [a] -> [a]
dropPrefix (a : aR) (b : bR) =
if a == b then dropPrefix aR bR else b : bR
dropPrefix _ b = b
-- | drops the given prefix of a list, if prefix `isPrefixOf` list.
dropPrefixMay :: Eq a => [a] -> [a] -> Maybe [a]
dropPrefixMay prefix list =
if prefix `isPrefixOf` list then
Just $ drop (length prefix) list
else
Nothing
chunks :: Int -> [a] -> [[a]]
chunks n [] = []
chunks n l =
let (a, b) = splitAt n l
in a : chunks n b
-- returns the list of items that are in the given list more than once
duplicates :: (Eq a, Ord a) => [a] -> [a]
duplicates =
nub . inner Set.empty
where
inner elements (a : r) =
if Set.member a elements then a : rest else rest
where
rest = inner (Set.insert a elements) r
inner _ [] = []
chainApp :: (b -> a -> a) -> [b] -> a -> a
chainApp fun (b : r) a = chainApp fun r (fun b a)
chainApp _ [] a = a
toEitherList :: [a] -> [b] -> [Either a b]
toEitherList as bs = map Left as ++ map Right bs
single :: String -> [a] -> a
single _ [a] = a
single msg [] = error ("empty list in single: " ++ msg)
single msg __ = error ("more than one element in list in single: " ++ msg)
dropLast :: Int -> [a] -> [a]
dropLast n = reverse . drop n . reverse
wordsBy :: Eq a => [a] -> [a] -> [[a]]
wordsBy seps ll = inner [] ll
where
inner akk [] = [reverse akk]
inner akk (a : r) =
if a `elem` seps then
reverse akk : wordsBy seps r
else
inner (a : akk) r
cartesian :: [a] -> [b] -> [(a, b)]
cartesian al bl =
concatMap (\ b -> map (\ a -> (a, b)) al) bl
-- | returns every combination of given elements once.
completeEdges :: [a] -> [(a, a)]
completeEdges (a : r) = map (tuple a) r ++ completeEdges r
completeEdges [] = []
adjacentCyclic :: [a] -> [(a, a)]
adjacentCyclic [] = []
adjacentCyclic [_] = []
adjacentCyclic list@(head : _) = inner head list
where
inner first (a : b : r) = (a, b) : inner first (b : r)
inner first [last] = [(last, first)]
inner _ [] = error "adjacentCyclic"
-- | merges pairs of elements for which the given function returns (Just a).
-- removes the pair and inserts (the merged) as.
-- Is idempotent.
mergePairs :: Eq a => (a -> a -> Maybe [a]) -> [a] -> [a]
mergePairs f =
fixpoint merge
where
merge (a : r) =
case inner a r of
Nothing -> a : merge r
Just r' -> r'
merge [] = []
inner a (b : r) =
case f a b <|> f b a of
Nothing ->
case inner a r of
Nothing -> Nothing
Just r' -> Just (b : r')
Just newAs -> Just (newAs ++ r)
inner _ [] = Nothing
-- | like mergePairs, but only tries to merge adjacent elements
-- (or the first and the last element)
-- Is idempotent.
mergeAdjacentCyclicPairs :: Eq a => (a -> a -> Maybe a) -> [a] -> [a]
mergeAdjacentCyclicPairs f =
fixpoint merge
where
merge = headAndLast . adjacent
adjacent (a : b : r) = case f a b of
Nothing -> a : adjacent (b : r)
Just x -> adjacent (x : r)
adjacent [x] = [x]
adjacent [] = []
headAndLast [] = []
headAndLast [a] = [a]
headAndLast l = case f (last l) (head l) of
Nothing -> l
Just x -> x : tail (init l)
-- | returns the local minima of a list.
localMinima :: Ord n => [n] -> [n]
localMinima (a : b : c : r) =
if a > b && c > b then
b : localMinima (c : r)
else
localMinima (b : c : r)
localMinima _ = []
-- * String stuff
-- | adds an extension, if the path does not already have the same extension
(<..>) :: FilePath -> String -> FilePath
path <..> ext =
if dotExt `isSuffixOf` path then path else path <.> ext
where
dotExt = if Just '.' == headMay ext then ext else '.' : ext
-- | reads a Text from a file in forced unicode encoding
readUnicodeText :: FilePath -> IO Text.Text
readUnicodeText file =
Text.decodeUtf8With Text.lenientDecode <$> SBS.readFile file
-- * Map stuff
fromKeys :: Ord k => (k -> a) -> [k] -> Map k a
fromKeys f keys = fromList (map (\ key -> (key, f key)) keys)
lookups :: Ord k => [k] -> Map k a -> Maybe a
lookups [] _ = Nothing
lookups (k : r) m = if k `member` m then Just (m ! k) else lookups r m
-- | creates a mapping function with an error message
toFunction :: (Show k, Ord k) => String -> Map k e -> k -> e
toFunction msg m k = findWithDefault err k m
where
err = error ("key not found: " ++ show k ++ " from " ++ msg)
mapPairs :: Ord k => (k -> a -> (k, a)) -> Map k a -> Map k a
mapPairs f = fromList . map (uncurry f) . toList
-- * Maybe stuff
justWhen :: Bool -> a -> Maybe a
justWhen True = Just
justWhen False = const Nothing
-- * math stuff
(==>) :: Bool -> Bool -> Bool
False ==> _ = True
True ==> x = x
infixr 2 ==>
(~=) :: (Ord n, Fractional n) => n -> n -> Bool
a ~= b = distance a b < epsilon
epsilon :: Fractional n => n
epsilon = 0.001
divide :: (RealFrac f, Integral i) => f -> f -> (i, f)
divide a b = (n, f * b)
where
(n, f) = properFraction (a / b)
-- | folds the given number to the given range
-- range is including lower bound and excluding upper bound
-- OPT: is O(a), could be constant (using properFraction)
foldToRange :: (Ord n, Num n) => (n, n) -> n -> n
foldToRange (lower, upper) _ | upper <= lower = e "foldToRange"
foldToRange (lower, upper) a | a >= upper = foldToRange (lower, upper) (a - distance lower upper)
foldToRange (lower, upper) a | a < lower = foldToRange (lower, upper) (a + distance lower upper)
foldToRange _ a = a
-- | returns, if two values are very near in a (floating) modulo body.
rangeEpsilonEquals :: (Ord n, Fractional n) => (n, n) -> n -> n -> Bool
rangeEpsilonEquals range a b =
or (map (aR ~=) [bR, bR + diff, bR - diff])
where
diff = uncurry distance range
aR = foldToRange range a
bR = foldToRange range b
distance :: Num n => n -> n -> n
distance a b = abs (a - b)
-- | clips a number to a given range
-- range is including both bounds
clip :: (Ord n, Num n) => (n, n) -> n -> n
clip (lower, _) x | x < lower = lower
clip (lower, upper) x | x >= lower && x <= upper = x
clip (_, upper) _ = upper
-- * tuple stuff
tuple :: a -> b -> (a, b)
tuple a b = (a, b)
swapTuple :: (a, b) -> (b, a)
swapTuple (a, b) = (b, a)
fst3 :: (a, b, c) -> a
fst3 (a, _, _) = a
snd3 :: (a, b, c) -> b
snd3 (_, b, _) = b
third :: (a, b, c) -> c
third (_, _, x) = x
-- * misc
-- | Returns the current time in seconds.
getTime :: IO POSIXTime
getTime = getPOSIXTime
uncurry3 :: (a -> b -> c -> d) -> ((a, b, c) -> d)
uncurry3 f (a, b, c) = f a b c
uncurry4 :: (a -> b -> c -> d -> e) -> ((a, b, c, d) -> e)
uncurry4 f (a, b, c, d) = f a b c d
swapOrdering :: Ordering -> Ordering
swapOrdering LT = GT
swapOrdering GT = LT
swapOrdering EQ = EQ
xor :: Bool -> Bool -> Bool
xor True True = False
xor a b = a || b
-- | boolean implication
infix 4 ~>
(~>) :: Bool -> Bool -> Bool
True ~> x = x
False ~> _ = True
infix 4 ~.>
(~.>) :: (a -> Bool) -> (a -> Bool) -> (a -> Bool)
(a ~.> b) x = a x ~> b x
fixpoint :: Eq e => (e -> e) -> e -> e
fixpoint f x = if fx == x then x else fixpoint f fx
where
fx = f x
-- | applies a function n times
superApply :: Int -> (a -> a) -> a -> a
superApply n f = foldr (.) id $ replicate n f
-- | returns all possible values, sorted.
allValues :: (Enum a, Bounded a) => [a]
allValues = [minBound .. maxBound]
-- * Pretty Printing
class PP a where
pp :: a -> String
instance (PP a, PP b) => PP (a, b) where
pp (a, b) = "(" ++ pp a ++ ", " ++ pp b ++ ")"
instance (PP a, PP b) => PP (Pair a b) where
pp (a :!: b) = "(" ++ pp a ++ " :!: " ++ pp b ++ ")"
instance (PP a, PP b, PP c) => PP (a, b, c) where
pp (a, b, c) = "(" ++ pp a ++ ", " ++ pp b ++ ", " ++ pp c ++ ")"
instance (PP a, PP b, PP c, PP d) => PP (a, b, c, d) where
pp (a, b, c, d) = "(" ++ pp a ++ ", " ++ pp b ++ ", " ++ pp c ++ ", " ++ pp d ++ ")"
instance PP Bool where
pp True = "|"
pp False = "O"
instance PP a => PP [a] where
pp list = "[" ++ intercalate ", " (map (clipString . pp) list) ++ "]"
where
clipString s = if length s < limit then s else take (limit - length dots) s ++ dots
limit = 20
dots = "..."
instance PP a => PP (Set.Set a) where
pp set = "{" ++ (tail (init (pp (Set.toList set)))) ++ "}"
instance PP a => PP (Maybe a) where
pp Nothing = "Nothing"
pp (Just x) = "Just (" ++ pp x ++ ")"
instance PP a => PP (Strict.Maybe a) where
pp Strict.Nothing = "Strict.Nothing"
pp (Strict.Just x) = "Strict.Just (" ++ pp x ++ ")"
instance PP Double where
pp = printf "%8.3f"
instance PP Float where
pp = printf "%8.3f"
instance PP Int where
pp = show
ppp :: PP p => p -> IO ()
ppp = pp >>> logg Info
-- * strict utils
instance Applicative Strict.Maybe where
pure = Strict.Just
(Strict.Just f) <*> (Strict.Just x) = Strict.Just $ f x
_ <*> _ = Strict.Nothing
instance Alternative Strict.Maybe where
empty = Strict.Nothing
Strict.Nothing <|> x = x
(Strict.Just x) <|> _ = Strict.Just x
deriving instance Typeable2 Pair
deriving instance (Data a, Data b) => Data (Pair a b)
deriving instance Functor (Pair a)
deriving instance Foldable (Pair a)
deriving instance Traversable (Pair a)
instance (NFData a, NFData b) => NFData (Pair a b) where
rnf (a :!: b) = rnf a `seq` rnf b
firstStrict :: (a -> b) -> (Pair a c) -> (Pair b c)
firstStrict f (a :!: c) = f a :!: c
firstAStrict :: Accessor (Pair a b) a
firstAStrict = accessor (\ (a :!: _) -> a) (\ a (_ :!: b) -> (a :!: b))
zipStrict :: [a] -> [b] -> [Pair a b]
zipStrict (a : ra) (b : rb) = (a :!: b) : zipStrict ra rb
zipStrict _ _ = []
| cstrahan/nikki | src/Utils.hs | lgpl-3.0 | 17,720 | 0 | 16 | 4,604 | 7,591 | 4,026 | 3,565 | -1 | -1 |
module Kivuli.Evaluator where
import Language.Haskell.Interpreter
import Kivuli.Types
import Control.Monad.Catch (MonadMask)
evaluate :: (Functor m, MonadIO m, MonadMask m) => Test -> m TestResult
evaluate test = do
let expressionToEvaluate = (head . expressions) test
expected = expectedResult test
result <- runInterpreter $ do
setImportsQ [("Prelude", Nothing)]
eval expressionToEvaluate
case result of
Right resultingVal -> if resultingVal == expected
then return (Pass)
else return (Fail $ "Expected " ++ expected ++ "\nActual " ++ resultingVal)
Left (UnknownError errorMessage) -> return (Fail errorMessage)
Left (WontCompile ghcErrors) -> return (Fail $ concatMap errMsg ghcErrors)
Left (NotAllowed errorMessage) -> return (Fail errorMessage)
Left (GhcException errorMessage) -> return (Fail errorMessage)
| karun012/kivuli | src/Kivuli/Evaluator.hs | unlicense | 1,075 | 0 | 16 | 367 | 286 | 142 | 144 | 19 | 6 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, GeneralizedNewtypeDeriving, TemplateHaskell, GADTs #-}
module Model where
import Yesod
import Data.Time
import Data.Text
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"] $(persistFile "config/models")
| masaedw/Yiki | Model.hs | bsd-2-clause | 444 | 0 | 8 | 56 | 49 | 28 | 21 | 6 | 0 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, GeneralizedNewtypeDeriving, TemplateHaskell, GADTs, TypeSynonymInstances #-}
module Model where
import Yesod
import Data.Time
import Data.Text (Text)
import Database.Persist.MongoDB
import Language.Haskell.TH.Syntax
import Database.Persist
import Database.Persist.Base
import Database.Persist.TH
import Data.Aeson
import Data.Aeson.TH
import qualified Data.Aeson.Types as P
import Data.Text (pack, unpack)
import qualified Data.Text.Encoding as E
import qualified Data.Attoparsec.Number as N
import qualified Data.Map as M
import qualified Data.Vector as V
import Control.Monad (liftM)
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist MkPersistSettings { mpsBackend = ConT ''Action }, mkMigrate "migrateAll"] $(persistFile "config/models")
data ChampionEntry = ChampionEntry { ceChampId :: ChampionId
, ceNotes :: Text
} deriving (Show, Eq, Read)
data SeparatorEntry = SeparatorEntry { sepText :: Text } deriving (Show, Eq, Read)
data TierListEntry = TierListEntryChamp ChampionEntry
| TierListEntrySep SeparatorEntry
deriving (Show, Eq, Read)
data TierListData = TierListData { tldEntries :: [TierListEntry] } deriving (Show, Eq, Read)
instance ToJSON ChampionId where
toJSON = String . pack . show
instance FromJSON ChampionId where
parseJSON (String text) =
case reads (unpack text) of
((champId, _):_) -> return champId
_ -> fail "Could not parse ChampionId"
parseJSON _ = fail "Could not parse ChampionId"
$(deriveJSON (drop 2) ''ChampionEntry)
$(deriveJSON (drop 3) ''SeparatorEntry)
$(deriveJSON (drop 0) ''TierListEntry)
$(deriveJSON (drop 3) ''TierListData)
instance PersistField TierListData where
toPersistValue = toPersistValue . toJSON
fromPersistValue val = fromPersistValue val >>= P.parseEither parseJSON
sqlType _ = SqlString
isNullable _ = False
instance PersistField ChampionEntry where
toPersistValue = toPersistValue . toJSON
fromPersistValue val = fromPersistValue val >>= P.parseEither parseJSON
sqlType _ = SqlString
isNullable _ = False
instance PersistField TierListEntry where
toPersistValue = toPersistValue . toJSON
fromPersistValue val = fromPersistValue val >>= P.parseEither parseJSON
sqlType _ = SqlString
isNullable _ = False
instance PersistField SeparatorEntry where
toPersistValue = toPersistValue . toJSON
fromPersistValue val = fromPersistValue val >>= P.parseEither parseJSON
sqlType _ = SqlString
isNullable _ = False
instance PersistField Value where
toPersistValue (Object obj) = PersistMap . map (\(k,v) -> (k, toPersistValue v)) . M.toList $ obj
toPersistValue (Array arr) = PersistList . map toPersistValue . V.toList $ arr
toPersistValue (String text)= PersistText text
toPersistValue (Number num) = case num of
N.I int -> PersistInt64 . fromIntegral $ int
N.D double -> PersistDouble double
toPersistValue (Bool bool) = PersistBool bool
toPersistValue (Null) = PersistNull
fromPersistValue (PersistText text) = Right $ String text
fromPersistValue (PersistByteString bs) = Right . String . E.decodeUtf8 $ bs
fromPersistValue (PersistInt64 int) = Right . Number . N.I . fromIntegral $ int
fromPersistValue (PersistDouble doub) = Right . Number . N.D $ doub
fromPersistValue (PersistBool bool) = Right $ Bool bool
fromPersistValue (PersistDay day) = Right . String . pack . show $ day
fromPersistValue (PersistTimeOfDay time)= Right . String . pack . show $ time
fromPersistValue (PersistUTCTime utc) = Right . String . pack . show $ utc
fromPersistValue (PersistNull) = Right $ Null
fromPersistValue (PersistList vals) = (Array . V.fromList) `liftM` (mapM (fromPersistValue) vals)
fromPersistValue (PersistMap listPairs) = let parsePair (k,v) = case fromPersistValue v of
Right s -> Right (k,s)
Left m -> Left m
in (Object . M.fromList) `liftM` (mapM parsePair listPairs)
fromPersistValue (PersistObjectId bs) = Right . String . E.decodeUtf8 $ bs
sqlType _ = SqlString
isNullable _ = False
| periodic/Simple-Yesod-ToDo | Model.hs | bsd-2-clause | 4,688 | 0 | 14 | 1,192 | 1,294 | 676 | 618 | 86 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE LambdaCase #-}
module PreludeExtra
( module Exports
, module PreludeExtra
) where
import Control.Monad.State as Exports hiding (withState, withStateT)
import Control.Monad.Reader as Exports hiding (local)
import Control.Monad.Trans.Control as Exports hiding (control)
import Data.Maybe as Exports
import Data.Monoid as Exports
import Data.Foldable as Exports
import Control.Arrow as Exports
import Control.Concurrent as Exports
import Control.Concurrent.STM as Exports hiding (atomically)
import Data.Char as Exports
import Text.Read as Exports (readMaybe)
import Text.Printf as Exports
import Data.List as Exports (sort)
import System.Random as Exports
import System.Directory as Exports
import System.FilePath as Exports
import System.Environment as Exports
import Data.Dynamic as Exports
import Debug.Trace as Exports
import Data.Fixed as Exports
import Data.Time as Exports
import Data.IORef as Exports
import System.Mem as Exports
import Control.Monad.Catch as Exports
import Control.DeepSeq as Exports
import Data.Yaml as Exports (FromJSON, ToJSON)
import GHC.Generics as Exports (Generic)
import Control.Lens.Extra as Exports hiding (List, (<.>), children)
import Linear.Extra as Exports hiding (trace)
import Graphics.GL.Pal as Exports hiding (trace, getNow, ColorSpace(..)) -- using a faster getNow in Types
import Graphics.VR.Pal as Exports hiding (getNow, getDeltaTime)
import Animation.Pal as Exports hiding (getNow, exhaustTChan)
import Data.ECS as Exports
import Data.Sequence as Exports (Seq)
-- import qualified Data.Map as Map
import qualified Control.Concurrent.STM as STM
-- useMapM_ :: (MonadState s m) => Lens' s (Map k v) -> ((k,v) -> m b) -> m ()
-- useMapM_ aLens f = traverseM_ (Map.toList <$> use aLens) f
traverseM :: (Monad m, Traversable t) => m (t a) -> (a -> m b) -> m (t b)
traverseM f x = f >>= traverse x
traverseM_ :: (Monad m, Foldable t) => m (t a) -> (a -> m b) -> m ()
traverseM_ f x = f >>= traverse_ x
useTraverseM_ :: (MonadState s m, Foldable t) => Lens' s (t a) -> (a -> m b) -> m ()
useTraverseM_ aLens f = traverseM_ (use aLens) f
for :: [a] -> (a -> b) -> [b]
for = flip map
exhaustTChan :: TChan a -> STM [a]
exhaustTChan chan = tryReadTChan chan >>= \case
Just a -> (a:) <$> exhaustTChan chan
Nothing -> return []
atomically :: MonadIO m => STM a -> m a
atomically = liftIO . STM.atomically
orientToward :: (Floating a, Epsilon a) => V3 a -> V3 a -> V3 a -> M44 a
orientToward eye target up = inv44 (lookAt eye target up)
| lukexi/rumpus | src/PreludeExtra.hs | bsd-3-clause | 2,554 | 0 | 10 | 438 | 806 | 482 | 324 | 56 | 2 |
-- |Declares data to represent the input command stream
module Commands
where
import Geometry
import Environment
-- |Represents the overall input stream, which looks like this:
--
-- @
-- 5 5
-- 1 2 N
-- LMLMLMLMM
-- 3 3 E
-- MMRMMRMRRM
-- @
data OverallInput = OverallInput {
inputPlateau :: PlateauOrError,
inputRovers :: [RoverInput]
} deriving (Show,Eq,Ord)
-- |Represents the input stream for a particular 'Rover', which looks like this:
--
-- @
-- 1 2 N
-- LMLMLMLMM
-- @
data RoverInput = RoverInput {
inputRover :: RoverPos,
inputCommands :: [Command]
} deriving (Show,Eq,Ord)
-- |Represents a single command: either a turn left/right or move forwards.
-- Looks like one of these:
--
-- @
-- L
-- R
-- M
-- @
data Command = Turn Rotation -- ^rotate the rover
| Forwards -- ^move the rover forwards
deriving (Show,Eq,Ord)
| garethrowlands/marsrover | src/Commands.hs | bsd-3-clause | 899 | 0 | 9 | 215 | 141 | 94 | 47 | 14 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
-- © 2002 Peter Thiemann
module WASH.CGI.CGIConfig where
import Control.Exception
import System.Environment
import System.IO
import System.IO.Unsafe
tmpDir, varDir, imageDir, emailTmpDir, frameDir, persistentDir, persistent2Dir, registryDir, keyFile, pbmPath, catProgram, sendmailProgram :: String
-- |global root for WASH data
globalRoot :: String
globalRoot =
unsafePerformIO (catch (getEnv "HOME") (\ (ioe :: SomeException) -> return ""))
-- temporary storage
tmpDir = globalRoot ++ "/tmp/"
-- persistent, mutable storage
varDir = globalRoot ++ "/tmp/"
imageDir = tmpDir ++ "Images/"
emailTmpDir = tmpDir
frameDir = tmpDir ++ "Frames/"
persistentDir = varDir ++ "Persistent/"
persistent2Dir = varDir ++ "Persistent2/"
transactionDir = varDir ++ "Transactions/"
registryDir = tmpDir ++ "REGISTRY/"
keyFile = varDir ++ "KEYFILE"
-- path to PBMplus programs
pbmPath = "/usr/X11R6/bin/"
-- path of cat program
catProgram = "/bin/cat"
-- path of sendmail program
sendmailProgram = "/usr/sbin/sendmail"
| nh2/WashNGo | WASH/CGI/CGIConfig.hs | bsd-3-clause | 1,052 | 20 | 11 | 150 | 229 | 142 | 87 | 23 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE FlexibleContexts #-}
module VectorTest where
import Data.Proxy
import Data.Kind
import Data.Type.Equality
import Unsafe.Coerce
-- type family Sing (p :: (k -> *)) = r | r -> k
-- type Π = Sing
-- type family Sing (p :: (k -> *)) = r | r -> k
-- type Π = Sing
-- data Π (k :: *) :: (k -> *) -> * where
-- V :: k
-- * Nat
data Nat :: * where
Zero :: Nat
Succ :: Nat -> Nat
instance Num Nat where
(+) = error "crap"
(*) = error "crap"
abs = error "crap"
signum = error "crap"
negate = error "crap"
fromInteger 0 = Zero
fromInteger x = Succ (fromInteger (x - 1))
-- plus_succ :: Proxy n -> Proxy m -> (n + Succ m) :~: (Succ n + m)
-- plus_succ _ _ = _
data SNat :: Nat -> * where
SZero :: SNat Zero
SSucc :: SNat n -> SNat (Succ n)
deriving instance Show (SNat n)
data Exists (k :: *) :: (k -> *) -> * where
Pack :: forall (p :: k -> *) (a :: k). p a -> Exists k p
deriving instance Show (Exists Nat SNat)
deriving instance (Show a) => Show (Exists Nat (Vec a))
-- deriving instance (Show (p a)) => Show (Exists n p)
type family Foo :: (* -> Constraint) -> Constraint
modify :: forall k (p :: k -> *) (f :: k -> k).
(forall a. p a -> p (f a)) ->
Exists k p -> Exists k p
modify f (Pack x) = (Pack (f x))
-- | index-preserving modification
modify' :: forall k (p :: k -> *).
(forall a. p a -> p a) ->
Exists k p -> Exists k p
modify' f (Pack x) = (Pack (f x))
refineNat :: Nat -> Exists Nat SNat
refineNat Zero = Pack SZero
refineNat (Succ n) = modify SSucc (refineNat n)
data Vec :: * -> Nat -> * where
Nil :: Vec a Zero
(:.) :: a -> Vec a n -> Vec a (Succ n)
infixr 6 :.
deriving instance (Show a) => Show (Vec a n)
refineList :: [a] -> Exists Nat (Vec a)
refineList [] = Pack Nil
refineList (x : xs) = modify (x :.) (refineList xs)
type family (n :: Nat) + (m :: Nat) where
Zero + y = y
(Succ x) + y = Succ (x + y)
concatVec :: Vec a n -> Vec a m -> Vec a (n + m)
concatVec Nil ys = ys
concatVec (x :. xs) ys = x :. (concatVec xs ys)
-- fixme: make safe
reverseVec :: Vec a n -> Vec a n
reverseVec x = unsafeCoerce $ reverseVecAux x Nil
-- fixme: make safe
reverseVecAux :: forall a n m. Vec a n -> Vec a m -> Vec a (n + m)
reverseVecAux Nil ys = ys
reverseVecAux (x :. xs) ys = unsafeCoerce $ reverseVecAux xs (x :. ys)
-- * test
test :: (Show a) => String -> a -> IO ()
test str x = putStrLn (" ∘ " ++ str ++ "\n") >> print x >> putStrLn "\n"
main :: IO ()
main = do
test "refineNat:" $ refineNat 42
test "refineList:" $ refineList [1, 2, 3, 4, 5]
test "concatVec: (with Nil)" $ modify' (concatVec Nil) (refineList [1, 2, 3, 4, 5])
test "concatVec: (with (1 :. Nil))" $ modify (concatVec (1 :. Nil)) (refineList [1, 2, 3, 4, 5])
test "reverseVec:" $ modify' reverseVec (refineList [1, 2, 3, 4, 5])
| sleexyz/haskell-fun | VectorTest.hs | bsd-3-clause | 3,391 | 0 | 12 | 767 | 1,235 | 666 | 569 | 83 | 1 |
{-# LANGUAGE TupleSections, GeneralizedNewtypeDeriving #-}
-- |A graph implementation mapping hashed S to a mapping of
-- hashed P to hashed O, backed by 'Data.HashMap'.
module Data.RDF.Graph.HashMapS (HashMapS) where
import Prelude hiding (pred)
import Control.DeepSeq (NFData)
import Data.RDF.Types
import Data.RDF.Query
import Data.RDF.Namespace
import qualified Data.Map as Map
import Data.Hashable()
import Data.HashMap.Strict(HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.HashSet(HashSet)
import qualified Data.HashSet as Set
import Data.List
-- |A map-based graph implementation.
--
-- This instance of 'RDF' is an adjacency map with each subject
-- mapping to a mapping from a predicate node to to the adjacent nodes
-- via that predicate.
--
-- Given the following triples graph::
--
-- @
-- (http:\/\/example.com\/s1,http:\/\/example.com\/p1,http:\/\/example.com\/o1)
-- (http:\/\/example.com\/s1,http:\/\/example.com\/p1,http:\/\/example.com\/o2)
-- (http:\/\/example.com\/s1,http:\/\/example.com\/p2,http:\/\/example.com\/o1)
-- (http:\/\/example.com\/s2,http:\/\/example.com\/p3,http:\/\/example.com\/o3)
-- @
--
-- where
--
-- > hash "http://example.com/s1" = 1600134414
-- > hash "http://example.com/s2" = 1600134413
-- > hash "http://example.com/p1" = 1616912099
-- > hash "http://example.com/p2" = 1616912096
-- > hash "http://example.com/p3" = 1616912097
-- > hash "http://example.com/o1" = 1935686794
-- > hash "http://example.com/o2" = 1935686793
-- > hash "http://example.com/o3" = 1935686792
--
-- the in-memory hashmap representation of the triples graph is:
--
-- @
-- key:1600134414, value:(key:1616912099, value:[1935686794 -- (..\/s1,..\/p1,..\/o1)
-- ,1935686793]; -- (..\/s1,..\/p1,..\/o2)
-- key:1616912096, value:[1935686794]); -- (..\/s1,..\/p2,..\/o1)
-- key:1600134413, value:(key:1616912097, value:[1935686792]) -- (..\/s1,..\/p3,..\/o3)
-- @
--
-- Worst-case time complexity of the graph functions, with respect
-- to the number of triples, are:
--
-- * 'empty' : O(1)
--
-- * 'mkRdf' : O(n)
--
-- * 'triplesOf': O(n)
--
-- * 'select' : O(n)
--
-- * 'query' : O(log n)
newtype HashMapS = HashMapS (TMaps, Maybe BaseUrl, PrefixMappings)
deriving (NFData)
instance RDF HashMapS where
baseUrl = baseUrl'
prefixMappings = prefixMappings'
addPrefixMappings = addPrefixMappings'
empty = empty'
mkRdf = mkRdf'
triplesOf = triplesOf'
uniqTriplesOf = uniqTriplesOf'
select = select'
query = query'
instance Show HashMapS where
show gr = concatMap (\t -> show t ++ "\n") (triplesOf gr)
-- some convenience type alias for readability
-- An adjacency map for a subject, mapping from a predicate node to
-- to the adjacent nodes via that predicate.
type AdjacencyMap = HashMap Predicate (HashSet Node)
type Adjacencies = HashSet Node
type TMap = HashMap Node AdjacencyMap
type TMaps = (TMap, TMap)
baseUrl' :: HashMapS -> Maybe BaseUrl
baseUrl' (HashMapS (_, baseURL, _)) = baseURL
prefixMappings' :: HashMapS -> PrefixMappings
prefixMappings' (HashMapS (_, _, pms)) = pms
addPrefixMappings' :: HashMapS -> PrefixMappings -> Bool -> HashMapS
addPrefixMappings' (HashMapS (ts, baseURL, pms)) pms' replace =
let merge = if replace then flip mergePrefixMappings else mergePrefixMappings
in HashMapS (ts, baseURL, merge pms pms')
empty' :: HashMapS
empty' = HashMapS ((HashMap.empty, HashMap.empty), Nothing, PrefixMappings Map.empty)
mkRdf' :: Triples -> Maybe BaseUrl -> PrefixMappings -> HashMapS
mkRdf' ts baseURL pms = HashMapS (mergeTs (HashMap.empty, HashMap.empty) ts, baseURL, pms)
mergeTs :: TMaps -> [Triple] -> TMaps
mergeTs = foldl' mergeT
where
mergeT :: TMaps -> Triple -> TMaps
mergeT m t = mergeT' m (subjectOf t) (predicateOf t) (objectOf t)
mergeT' :: TMaps -> Subject -> Predicate -> Object -> TMaps
mergeT' (spo, ops) s p o = (mergeT'' spo s p o, mergeT'' ops o p s)
mergeT'' :: TMap -> Subject -> Predicate -> Object -> TMap
mergeT'' m s p o =
if s `HashMap.member` m then
(if p `HashMap.member` adjs then HashMap.insert s addPredObj m
else HashMap.insert s addNewPredObjMap m)
else HashMap.insert s newPredMap m
where
adjs = HashMap.lookupDefault HashMap.empty s m
newPredMap :: HashMap Predicate (HashSet Object)
newPredMap = HashMap.singleton p (Set.singleton o)
addNewPredObjMap :: HashMap Predicate (HashSet Object)
addNewPredObjMap = HashMap.insert p (Set.singleton o) adjs
addPredObj :: HashMap Predicate (HashSet Object)
addPredObj = HashMap.insert p (Set.insert o (get p adjs)) adjs
--get :: (Ord k, Hashable k) => k -> HashMap k v -> v
get = HashMap.lookupDefault Set.empty
-- 3 following functions support triplesOf
triplesOf' :: HashMapS -> Triples
triplesOf' (HashMapS ((spoMap, _), _, _)) = concatMap (uncurry tripsSubj) subjPredMaps
where subjPredMaps = HashMap.toList spoMap
-- naive implementation for now
uniqTriplesOf' :: HashMapS -> Triples
uniqTriplesOf' = nub . expandTriples
tripsSubj :: Subject -> AdjacencyMap -> Triples
tripsSubj s adjMap = concatMap (uncurry (tfsp s)) (HashMap.toList adjMap)
where tfsp = tripsForSubjPred
tripsForSubjPred :: Subject -> Predicate -> Adjacencies -> Triples
tripsForSubjPred s p adjs = map (Triple s p) (Set.toList adjs)
-- supports select
select' :: HashMapS -> NodeSelector -> NodeSelector -> NodeSelector -> Triples
select' (HashMapS ((spoMap,_),_,_)) subjFn predFn objFn =
map (\(s,p,o) -> Triple s p o) $ Set.toList $ sel1 subjFn predFn objFn spoMap
sel1 :: NodeSelector -> NodeSelector -> NodeSelector -> TMap -> HashSet (Node, Node, Node)
sel1 (Just subjFn) p o spoMap =
Set.unions $ map (sel2 p o) $ filter (\(x,_) -> subjFn x) $ HashMap.toList spoMap
sel1 Nothing p o spoMap = Set.unions $ map (sel2 p o) $ HashMap.toList spoMap
sel2 :: NodeSelector -> NodeSelector -> (Node, HashMap Node (HashSet Node)) -> HashSet (Node, Node, Node)
sel2 (Just predFn) mobjFn (s, ps) =
Set.map (\(p,o) -> (s,p,o)) $
foldl' Set.union Set.empty $
map (sel3 mobjFn) poMapS :: HashSet (Node, Node, Node)
where
poMapS :: [(Node, HashSet Node)]
poMapS = filter (\(k,_) -> predFn k) $ HashMap.toList ps
sel2 Nothing mobjFn (s, ps) =
Set.map (\(p,o) -> (s,p,o)) $
foldl' Set.union Set.empty $
map (sel3 mobjFn) poMaps
where
poMaps = HashMap.toList ps
sel3 :: NodeSelector -> (Node, HashSet Node) -> HashSet (Node, Node)
sel3 (Just objFn) (p, os) = Set.map (\o -> (p, o)) $ Set.filter objFn os
sel3 Nothing (p, os) = Set.map (\o -> (p, o)) os
-- support query
query' :: HashMapS -> Maybe Subject -> Maybe Predicate -> Maybe Object -> Triples
query' (HashMapS (m,_ , _)) subj pred obj = map f $ Set.toList $ q1 subj pred obj m
where f (s, p, o) = Triple s p o
q1 :: Maybe Node -> Maybe Node -> Maybe Node -> TMaps -> HashSet (Node, Node, Node)
q1 (Just s) p o (spoMap, _ ) = q2 p o (s, HashMap.lookupDefault HashMap.empty s spoMap)
q1 s p (Just o) (_ , opsMap) = Set.map (\(o',p',s') -> (s',p',o')) $ q2 p s (o, HashMap.lookupDefault HashMap.empty o opsMap)
q1 Nothing p o (spoMap, _ ) = Set.unions $ map (q2 p o) $ HashMap.toList spoMap
q2 :: Maybe Node -> Maybe Node -> (Node, HashMap Node (HashSet Node)) -> HashSet (Node, Node, Node)
q2 (Just p) o (s, pmap) =
maybe Set.empty (Set.map (\ (p', o') -> (s, p', o')) . q3 o . (p,)) $ HashMap.lookup p pmap
q2 Nothing o (s, pmap) = Set.map (\(x,y) -> (s,x,y)) $ Set.unions $ map (q3 o) opmaps
where opmaps ::[(Node, HashSet Node)]
opmaps = HashMap.toList pmap
q3 :: Maybe Node -> (Node, HashSet Node) -> HashSet (Node, Node)
q3 (Just o) (p, os) = if o `Set.member` os then Set.singleton (p, o) else Set.empty
q3 Nothing (p, os) = Set.map (\o -> (p, o)) os
| jutaro/rdf4h | src/Data/RDF/Graph/HashMapS.hs | bsd-3-clause | 7,912 | 0 | 13 | 1,540 | 2,491 | 1,369 | 1,122 | 112 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Pkgs.Pretty where
import Data.List
import Text.PrettyPrint
import Pkgs.Syntax
instance Show Toplevel where
show = pretty
instance Show PkgTerm where
show = render . prettyPkg
instance Show IfaceTerm where
show = render . prettyIface
instance Show Expr where
show = render . prettyExpr
instance Show Type where
show = render . prettyType
pretty :: Toplevel -> String
pretty (P pkg) = show pkg
pretty (I iface) = show iface
prettyPkg :: PkgTerm -> Doc
prettyPkg (Pkg pName args iName (PBody imports defs))
= let pkg = "pkg" <+> text pName <+> prettyArgs args <+>
"impl" <+> text iName
body = vcat $ bodyList $ map prettyImport imports ++ map prettyDef defs
in vcat [ pkg <+> "{", nest 4 body, "}" ]
prettyIface :: IfaceTerm -> Doc
prettyIface (Iface iName args subtype (IBody decls))
= let iface = "iface" <+> text iName <> parens (prettyArgs args) <+>
prettySubtype subtype
body = vcat $ bodyList $ map prettyDecl decls
in vcat [ iface <+> "{", nest 4 body, "}" ]
prettyArgs :: [PArg] -> Doc
prettyArgs = hjoin "," . map prettyArg
prettyArg :: PArg -> Doc
prettyArg (PArg argName iexpr) = text argName <+> ":" <+> prettyIExpr iexpr
prettyImport :: PImport -> Doc
prettyImport (PImport pName iName pExpr)
= "import" <+> text pName <+> ":" <+> text iName <+>
"as" <+> prettyPExpr pExpr
prettyDef :: Def -> Doc
prettyDef (TypeDef tName t)
= "type" <+> text tName <+> "=" <+> prettyType t
prettyDef (FunDef f t e)
= "fun" <+> text f <+> ":" <+> prettyType t <+> "=" <+> prettyExpr e
prettyDecl :: Decl -> Doc
prettyDecl (TypeDecl tName) = "type" <+> text tName
prettyDecl (FunDecl f t) = "fun" <+> text f <+> ":" <+> prettyType t
prettyPExpr (PExpr pName args)
= text pName <> parens (argList (map prettyExpr args))
prettyIExpr (IExpr ivName args)
= text ivName <> parens (argList (map text args))
prettySubtype :: Maybe IVName -> Doc
prettySubtype (Just iName) = "<:" <+> text iName <+> ""
prettySubtype Nothing = ""
------------------------------
-- Core-language pretty printers
prettyExpr :: Expr -> Doc
prettyExpr (Lam x t e) = "λ" <> text x <> ":" <> prettyType t <> "." <> prettyExpr e
prettyExpr (App e1 e2) = parens (prettyExpr e1 <+> prettyExpr e2)
prettyExpr (Plus e1 e2) = parens (prettyExpr e1 <+> "+" <+> prettyExpr e2)
prettyExpr (Proj p f) = prettyExpr p <> "." <> text f
prettyExpr (Var x) = text x
prettyExpr (IntVal x) = int x
prettyExpr Unit = "()"
prettyExpr (PkgVal imps defs)
= braces (hcat $ bodyList $ map prettyImport imps ++ map prettyDef defs)
prettyType :: Type -> Doc
prettyType (Arrow t1 t2) = parens (prettyType t1 <+> "->" <+> prettyType t2)
prettyType (TypeName t) = text t
prettyType (ProjType p t) = text p <> "." <> text t
prettyType IntType = "Int"
prettyType UnitType = "Unit"
prettyType (PkgType tName decls)
= text tName <> braces (hcat $ bodyList $ map prettyDecl decls)
------------------------------
-- Formatting helpers
argList :: [Doc] -> Doc
argList = hjoin ", "
bodyList :: [Doc] -> [Doc]
bodyList = map (<>";")
hjoin :: Doc -> [Doc] -> Doc
hjoin seperator = vcat . intersperse seperator
| markflorisson/packages | Pkgs/Pretty.hs | bsd-3-clause | 3,283 | 0 | 13 | 743 | 1,252 | 619 | 633 | 77 | 1 |
module Main where
import Test.Framework (defaultMain, testGroup)
------------------------------------------------------------------------------
main :: IO ()
main = defaultMain tests
where
tests = [
]
| snapframework/logging-streams | test/TestSuite.hs | bsd-3-clause | 231 | 0 | 7 | 51 | 45 | 26 | 19 | 5 | 1 |
{-# LANGUAGE EmptyDataDecls, GADTs, FlexibleContexts, FlexibleInstances, UndecidableInstances, TypeSynonymInstances, DeriveDataTypeable, RecordWildCards, ScopedTypeVariables, MultiParamTypeClasses #-}
module YaLedger.Types
(module YaLedger.Tree,
module YaLedger.Types.Common,
module YaLedger.Types.Monad,
module YaLedger.Types.Ledger,
module YaLedger.Types.Map,
module YaLedger.Types.Transactions,
module YaLedger.Types.Attributes,
module YaLedger.Types.Config,
Rule (..)
) where
import YaLedger.Tree
import YaLedger.Types.Common
import YaLedger.Types.Monad
import YaLedger.Types.Ledger
import YaLedger.Types.Map
import YaLedger.Types.Transactions
import YaLedger.Types.Attributes
import YaLedger.Types.Config
| portnov/yaledger | YaLedger/Types.hs | bsd-3-clause | 741 | 0 | 5 | 82 | 118 | 81 | 37 | 19 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
#ifndef MIN_VERSION_profunctors
#define MIN_VERSION_profunctors(x,y,z) 0
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Machine.Mealy
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : portable
--
-- <http://en.wikipedia.org/wiki/Mealy_machine>
----------------------------------------------------------------------------
module Data.Machine.Mealy
( Mealy(..)
, unfoldMealy
, logMealy
) where
import Control.Applicative
import Control.Arrow
import Control.Category
import Control.Monad.Fix
import Control.Monad.Reader.Class
import Control.Monad.Zip
import Data.Distributive
import Data.Functor.Extend
import Data.Functor.Rep as Functor
import Data.List.NonEmpty as NonEmpty
import Data.Machine.Plan
import Data.Machine.Type
import Data.Machine.Process
import Data.Profunctor.Closed
import Data.Profunctor
import Data.Profunctor.Sieve
import Data.Profunctor.Rep as Profunctor
import Data.Pointed
import Data.Semigroup
import Data.Sequence as Seq
import Prelude hiding ((.),id)
-- $setup
-- >>> import Data.Machine.Source
-- | 'Mealy' machines
--
-- ==== Examples
--
-- We can enumerate inputs:
--
-- >>> let countingMealy = unfoldMealy (\i x -> ((i, x), i + 1)) 0
-- >>> run (auto countingMealy <~ source "word")
-- [(0,'w'),(1,'o'),(2,'r'),(3,'d')]
--
newtype Mealy a b = Mealy { runMealy :: a -> (b, Mealy a b) }
instance Functor (Mealy a) where
fmap f (Mealy m) = Mealy $ \a -> case m a of
(b, n) -> (f b, fmap f n)
{-# INLINE fmap #-}
b <$ _ = pure b
{-# INLINE (<$) #-}
instance Applicative (Mealy a) where
pure b = r where r = Mealy (const (b, r))
{-# INLINE pure #-}
Mealy m <*> Mealy n = Mealy $ \a -> case m a of
(f, m') -> case n a of
(b, n') -> (f b, m' <*> n')
m <* _ = m
{-# INLINE (<*) #-}
_ *> n = n
{-# INLINE (*>) #-}
instance Pointed (Mealy a) where
point b = r where r = Mealy (const (b, r))
{-# INLINE point #-}
instance Extend (Mealy a) where
duplicated (Mealy m) = Mealy $ \a -> case m a of
(_, b) -> (b, duplicated b)
-- | A 'Mealy' machine modeled with explicit state.
unfoldMealy :: (s -> a -> (b, s)) -> s -> Mealy a b
unfoldMealy f = go where
go s = Mealy $ \a -> case f s a of
(b, t) -> (b, go t)
{-# INLINE unfoldMealy #-}
-- | slow diagonalization
instance Monad (Mealy a) where
return b = r where r = Mealy (const (b, r))
{-# INLINE return #-}
m >>= f = Mealy $ \a -> case runMealy m a of
(b, m') -> (fst (runMealy (f b) a), m' >>= f)
{-# INLINE (>>=) #-}
_ >> n = n
{-# INLINE (>>) #-}
instance Profunctor Mealy where
rmap = fmap
{-# INLINE rmap #-}
lmap f = go where
go (Mealy m) = Mealy $ \a -> case m (f a) of
(b, n) -> (b, go n)
{-# INLINE lmap #-}
#if MIN_VERSION_profunctors(3,1,1)
dimap f g = go where
go (Mealy m) = Mealy $ \a -> case m (f a) of
(b, n) -> (g b, go n)
{-# INLINE dimap #-}
#endif
instance Automaton Mealy where
auto = construct . go where
go (Mealy f) = await >>= \a -> case f a of
(b, m) -> do
yield b
go m
{-# INLINE auto #-}
instance Category Mealy where
id = Mealy (\a -> (a, id))
Mealy bc . Mealy ab = Mealy $ \ a -> case ab a of
(b, nab) -> case bc b of
(c, nbc) -> (c, nbc . nab)
instance Arrow Mealy where
arr f = r where r = Mealy (\a -> (f a, r))
{-# INLINE arr #-}
first (Mealy m) = Mealy $ \(a,c) -> case m a of
(b, n) -> ((b, c), first n)
instance ArrowChoice Mealy where
left m = Mealy $ \a -> case a of
Left l -> case runMealy m l of
(b, m') -> (Left b, left m')
Right r -> (Right r, left m)
right m = Mealy $ \a -> case a of
Left l -> (Left l, right m)
Right r -> case runMealy m r of
(b, m') -> (Right b, right m')
m +++ n = Mealy $ \a -> case a of
Left b -> case runMealy m b of
(c, m') -> (Left c, m' +++ n)
Right b -> case runMealy n b of
(c, n') -> (Right c, m +++ n')
m ||| n = Mealy $ \a -> case a of
Left b -> case runMealy m b of
(d, m') -> (d, m' ||| n)
Right b -> case runMealy n b of
(d, n') -> (d, m ||| n')
#if MIN_VERSION_profunctors(3,2,0)
instance Strong Mealy where
first' = first
instance Choice Mealy where
left' = left
right' = right
#endif
-- | Fast forward a mealy machine forward
driveMealy :: Mealy a b -> Seq a -> a -> (b, Mealy a b)
driveMealy m xs z = case viewl xs of
y :< ys -> case runMealy m y of
(_, n) -> driveMealy n ys z
EmptyL -> runMealy m z
-- | Accumulate history.
logMealy :: Semigroup a => Mealy a a
logMealy = Mealy $ \a -> (a, h a) where
h a = Mealy $ \b -> let c = a <> b in (c, h c)
{-# INLINE logMealy #-}
instance ArrowApply Mealy where
app = go Seq.empty where
go xs = Mealy $ \(m,x) -> case driveMealy m xs x of
(c, _) -> (c, go (xs |> x))
{-# INLINE app #-}
instance Distributive (Mealy a) where
distribute fm = Mealy $ \a -> let fp = fmap (`runMealy` a) fm in
(fmap fst fp, collect snd fp)
collect k fa = Mealy $ \a -> let fp = fmap (\x -> runMealy (k x) a) fa in
(fmap fst fp, collect snd fp)
instance Functor.Representable (Mealy a) where
type Rep (Mealy a) = NonEmpty a
index = cosieve
tabulate = cotabulate
instance Cosieve Mealy NonEmpty where
cosieve m0 (a0 :| as0) = go m0 a0 as0 where
go (Mealy m) a as = case m a of
(b, m') -> case as of
[] -> b
a':as' -> go m' a' as'
instance Costrong Mealy where
unfirst = unfirstCorep
unsecond = unsecondCorep
instance Profunctor.Corepresentable Mealy where
type Corep Mealy = NonEmpty
cotabulate f0 = Mealy $ \a -> go [a] f0 where
go as f = (f (NonEmpty.fromList (Prelude.reverse as)), Mealy $ \b -> go (b:as) f)
instance MonadFix (Mealy a) where
mfix = mfixRep
instance MonadZip (Mealy a) where
mzipWith = mzipWithRep
munzip m = (fmap fst m, fmap snd m)
instance MonadReader (NonEmpty a) (Mealy a) where
ask = askRep
local = localRep
instance Closed Mealy where
closed m = cotabulate $ \fs x -> cosieve m (fmap ($x) fs)
| HuwCampbell/machines | src/Data/Machine/Mealy.hs | bsd-3-clause | 6,263 | 0 | 17 | 1,539 | 2,463 | 1,324 | 1,139 | 153 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import System.Environment
import System.IO
import Imager3000.Parse
import Imager3000.Download
import Imager3000.Concurrent.MVar
import Imager3000.Fetch
main :: IO ()
main = do
--hSetBuffering stdout LineBuffering
[url] <- getArgs
run url
run :: String -> IO ()
run base_url = do
contents <- download base_url
let imgs = getImages contents
--print imgs
let actions = map (\url -> fetch base_url url) imgs
concurrently defaultConfig actions
| BsAsHaskell/imager3000 | src/Main.hs | bsd-3-clause | 533 | 0 | 13 | 109 | 148 | 76 | 72 | 18 | 1 |
module Module1.Task1 where
main = putStrLn "Hello, world!"
| dstarcev/stepic-haskell | src/Module1/Task1.hs | bsd-3-clause | 63 | 0 | 5 | 12 | 14 | 8 | 6 | 2 | 1 |
module AERN2.AD.Type where
import MixedTypesNumPrelude
import AERN2.MP.Precision
data Differential a =
OrderZero {diff_x :: a}
| OrderOne {diff_x :: a, diff_dx :: a}
| OrderTwo {diff_x :: a, diff_dx :: a, diff_dxt :: a, diff_d2x :: a}
deriving (Show)
order :: Differential a -> Integer
order (OrderZero _) = 0
order (OrderOne _ _) = 1
order (OrderTwo _ _ _ _) = 2
class CanBeDifferential a where
differential :: Integer -> a -> Differential a
instance
(HasIntegers a) =>
CanBeDifferential a
where
differential 0 a = OrderZero a
differential 1 a = OrderOne a (convertExactly 0)
differential _ a = OrderTwo a (convertExactly 0) (convertExactly 0) (convertExactly 0)
instance Functor Differential where
fmap f (OrderZero x) = OrderZero (f x)
fmap f (OrderOne x dx) = OrderOne (f x) (f dx)
fmap f (OrderTwo x dx dxt d2x) = OrderTwo (f x) (f dx) (f dxt) (f d2x)
instance
(HasPrecision a) => (HasPrecision (Differential a))
where
getPrecision a = getPrecision (diff_x a) -- TODO: safe?
instance
(CanSetPrecision a) => (CanSetPrecision (Differential a))
where
setPrecision p = fmap (setPrecision p)
setValue :: Differential a -> a -> Differential a
setValue (OrderZero _x) v = OrderZero v
setValue (OrderOne _x dx) v = OrderOne v dx
setValue (OrderTwo _x dx dxt d2x) v = OrderTwo v dx dxt d2x
| michalkonecny/aern2 | aern2-mfun/src/AERN2/AD/Type.hs | bsd-3-clause | 1,425 | 2 | 10 | 357 | 566 | 290 | 276 | 34 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Yo (
fromParameters
, Yo (..)
) where
import Control.Lens ((^?), (^.), (^..))
import Data.Aeson ((.:), decode, Value (Object, String))
import Data.Aeson.Lens (_String, key, nth, values)
import Data.ByteString (ByteString, isPrefixOf)
import qualified Data.ByteString.Lazy as LBS
import Data.List (find)
import Data.Maybe (listToMaybe)
import qualified Data.Text as T
import Text.Read (readMaybe)
import Network.Wreq
type Locality = String
type URL = String
type Username = String
data Yo = JustYo { username :: Username }
| YoLink { username :: Username, link :: URL }
| YoPhoto { username :: Username, link :: URL }
| YoVideo { username :: Username, link :: URL }
| YoLocation { username :: Username, lat :: Double, lng :: Double, locality :: Maybe Locality }
deriving (Eq, Show)
data Media = NoMedia
| Photo
| Video
deriving (Show)
fromParameters :: [(String, String)] -> IO (Maybe Yo)
fromParameters params =
traverse (flip fromParametersWithUsername params) $ lookup "username" params
fromParametersWithUsername :: Username -> [(String, String)] -> IO Yo
fromParametersWithUsername username params = do
let link = lookup "link" params
coord = lookup "location" params >>= parseCoordinate
case (link, coord) of
(Just link, _) -> do
contentType <- contentTypeForURL link
let mediaType = mediaForContentType <$> contentType
let constructor = case mediaType of
Just Photo -> YoPhoto
Just Video -> YoVideo
_ -> YoLink
return $ constructor username link
(_, Just (lat, lng)) -> do
locality <- reverseGeocode (lat, lng)
return $ YoLocation username lat lng locality
_ -> return $ JustYo username
where
parseCoordinate :: String -> Maybe (Double, Double)
parseCoordinate = readMaybe . ('(' :) . (++ ")") . map (\c -> if c == ';' then ',' else c)
type ContentType = ByteString
contentTypeForURL :: URL -> IO (Maybe ContentType)
contentTypeForURL link = do
res <- head_ link
let statusCode' = res ^. responseStatus . statusCode
contentType = res ^? responseHeader "Content-Type"
return $ if statusCode' == 200
then contentType
else Nothing
mediaForContentType :: ContentType -> Media
mediaForContentType contentType = orNoMedia
. foldl (>>=) (return contentType)
. map mkFilter
$ [("image/", Photo), ("video/", Video)]
where orNoMedia = either id (const NoMedia)
mkFilter (prefix, media) = \contentType -> if prefix `isPrefixOf` contentType
then Left media
else Right contentType
reverseGeocode :: (Double, Double) -> IO (Maybe Locality)
reverseGeocode (lat, lng) = localityForResponse <$> get url
where
localityForResponse res =
(decode (res ^. responseBody) :: Maybe Value)
>>= return . (^.. key "results" . nth 0 . key "address_components" . values)
>>= find (\c -> String "locality" `elem` c ^.. key "types" . values)
>>= (^? key "long_name" . _String)
>>= return . T.unpack
url = concat [
"https://maps.googleapis.com/maps/api/geocode/json?sensor=false&latlng="
, show lat
, ","
, show lng
]
| ryota-ka/yo-slack-adapter | src/Web/Yo.hs | bsd-3-clause | 3,744 | 0 | 18 | 1,250 | 1,052 | 583 | 469 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_FileServer (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/bin"
libdir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/lib/x86_64-linux-ghc-8.0.1/FileServer-0.1.0.0-2xQQWPr9p7nWBP1v47ULz"
dynlibdir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/lib/x86_64-linux-ghc-8.0.1"
datadir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/share/x86_64-linux-ghc-8.0.1/FileServer-0.1.0.0"
libexecdir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/libexec"
sysconfdir = "/home/ggunn/DFS/FileServer/.stack-work/install/x86_64-linux/lts-7.13/8.0.1/etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "FileServer_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "FileServer_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "FileServer_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "FileServer_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "FileServer_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "FileServer_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| Garygunn94/DFS | FileServer/.stack-work/dist/x86_64-linux/Cabal-1.24.2.0/build/autogen/Paths_FileServer.hs | bsd-3-clause | 2,184 | 0 | 10 | 239 | 410 | 238 | 172 | 33 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Websave.Web.Dispatch
(
) where
import Websave.Web.AppData (App (..))
import Websave.Web.Handlers (getApiR)
import Websave.Web.Routes (Route (..), resourcesApp)
import Yesod.Core (mkYesodDispatch)
mkYesodDispatch "App" resourcesApp
| dimsmol/websave | src/Websave/Web/Dispatch.hs | bsd-3-clause | 351 | 0 | 6 | 42 | 75 | 48 | 27 | 10 | 0 |
module Game.Innovation.TypesSpec
where
import SpecHelper
import Game.Innovation.TestHelper
import Game.MetaGame
import Game.Innovation.Types
spec :: Spec
spec = do
describe "User" $ do
it "idOf . mkPlayer = id" $
property $ \id -> (idOf . mkPlayer) id == (U (id :: String))
main :: IO ()
main = hspec spec
| maximilianhuber/innovation | test/Game/Innovation/TypesSpec.hs | bsd-3-clause | 329 | 0 | 15 | 72 | 110 | 60 | 50 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
module PlaceHolder where
import Type ( Type )
import Outputable
import Name
import NameSet
import RdrName
import Var
import Coercion
import {-# SOURCE #-} ConLike (ConLike)
import TcEvidence (HsWrapper)
import Data.Data hiding ( Fixity )
import BasicTypes (Fixity)
{-
%************************************************************************
%* *
\subsection{Annotating the syntax}
%* *
%************************************************************************
-}
-- | used as place holder in PostTc and PostRn values
data PlaceHolder = PlaceHolder
deriving (Data,Typeable)
-- | Types that are not defined until after type checking
type family PostTc it ty :: * -- Note [Pass sensitive types]
type instance PostTc Id ty = ty
type instance PostTc Name ty = PlaceHolder
type instance PostTc RdrName ty = PlaceHolder
-- | Types that are not defined until after renaming
type family PostRn id ty :: * -- Note [Pass sensitive types]
type instance PostRn Id ty = ty
type instance PostRn Name ty = ty
type instance PostRn RdrName ty = PlaceHolder
placeHolderKind :: PlaceHolder
placeHolderKind = PlaceHolder
placeHolderFixity :: PlaceHolder
placeHolderFixity = PlaceHolder
placeHolderType :: PlaceHolder
placeHolderType = PlaceHolder
placeHolderTypeTc :: Type
placeHolderTypeTc = panic "Evaluated the place holder for a PostTcType"
placeHolderNames :: PlaceHolder
placeHolderNames = PlaceHolder
placeHolderNamesTc :: NameSet
placeHolderNamesTc = emptyNameSet
{-
Note [Pass sensitive types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Since the same AST types are re-used through parsing,renaming and type
checking there are naturally some places in the AST that do not have
any meaningful value prior to the pass they are assigned a value.
Historically these have been filled in with place holder values of the form
panic "error message"
This has meant the AST is difficult to traverse using standard generic
programming techniques. The problem is addressed by introducing
pass-specific data types, implemented as a pair of open type families,
one for PostTc and one for PostRn. These are then explicitly populated
with a PlaceHolder value when they do not yet have meaning.
Since the required bootstrap compiler at this stage does not have
closed type families, an open type family had to be used, which
unfortunately forces the requirement for UndecidableInstances.
In terms of actual usage, we have the following
PostTc id Kind
PostTc id Type
PostRn id Fixity
PostRn id NameSet
TcId and Var are synonyms for Id
-}
type DataId id =
( Data id
, Data (PostRn id NameSet)
, Data (PostRn id Fixity)
, Data (PostRn id Bool)
, Data (PostRn id Name)
, Data (PostRn id [Name])
-- , Data (PostRn id [id])
, Data (PostRn id id)
, Data (PostTc id Type)
, Data (PostTc id Coercion)
, Data (PostTc id id)
, Data (PostTc id [Type])
, Data (PostTc id [ConLike])
, Data (PostTc id HsWrapper)
)
| AlexanderPankiv/ghc | compiler/hsSyn/PlaceHolder.hs | bsd-3-clause | 3,297 | 0 | 9 | 696 | 443 | 258 | 185 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Lens
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Text as T
import Network.Wreq
import qualified Network.Wreq.Session as S
import System.Environment
main :: IO ()
main = do
args <- getArgs
let boardDesignFilePath = args !! 0
let boardFilePath = args !! 1
let tileHand = args !! 2
boardDesign <- readFile boardDesignFilePath
board <- readFile boardFilePath
solveBoard (T.pack boardDesign) (T.pack board) (T.pack tileHand)
solveBoard :: T.Text -> T.Text -> T.Text -> IO ()
solveBoard boardDesign board tileHand =
S.withSession $ \sess -> do
let opts = defaults & param "boardDesign" .~ [boardDesign]
& param "board" .~ [board]
& param "tileHand" .~ [tileHand]
r <- S.getWith opts sess "http://localhost:8080/solve/"
B.putStr (r ^. responseBody)
| jasdennison/scrabble-solver | client/Client.hs | bsd-3-clause | 901 | 0 | 18 | 199 | 292 | 149 | 143 | 24 | 1 |
{- Current Design #-}
{-
Fixed design + Polymorphicj
Here we add a polymorphic constructor
to DocBase.
-}
module D6 where
data Doc a
= Empty
| DocStr (DocBase a)
| Union (Doc a) (Doc a)
data DocBase a
= Str String
| Chr Char
| Free a
comma :: Doc a
comma = DocStr (Chr ',')
bracket :: Doc a -> Doc a
bracket d = Union (DocStr (Chr '(')) (Union d (DocStr (Chr ')')))
printDoc :: (Doc a) -> (DocBase a -> b -> b) -> b -> b
printDoc Empty _ end = end
printDoc (DocStr b) printer end = printer b end
printDoc (Union d1 d2) printer end = printDoc d1 printer
(printDoc d2 printer end)
| dterei/Scraps | haskell/prettyGenerics/D6.hs | bsd-3-clause | 664 | 0 | 11 | 205 | 255 | 132 | 123 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Control.Monad.Reader
import Data.ByteString
import System.IO
data Bot = Bot {
socket :: Handle
}
type Net = ReaderT Bot IO
data Message = Message (Maybe Prefix) Command Params
deriving (Eq, Read, Show)
-- The (optional) prefix can be either a servername or a nickname
-- with optional username and host
data Prefix = User NickName (Maybe UserName) (Maybe ServerName)
| Server ServerName
deriving (Eq, Read, Show)
type Command = ByteString
type NickName = ByteString
type Param = ByteString
type Params = [Param]
type ServerName = ByteString
type UserName = ByteString
| nlogax/norby | src/Types.hs | bsd-3-clause | 695 | 0 | 8 | 164 | 167 | 99 | 68 | 19 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Tri-state ("tri-colour") garbage collector.
--
-- * Not compacting: alive memory is not altered
-- * Tracing
module Stg.Machine.GarbageCollection.TriStateTracing (
triStateTracing,
) where
import Data.Map (Map)
import qualified Data.Map as M
import Data.Sequence (Seq)
import Data.Set (Set)
import qualified Data.Set as S
import Stg.Machine.GarbageCollection.Common
import Stg.Machine.Types
-- | Remove all unused addresses, without moving the others.
triStateTracing :: GarbageCollectionAlgorithm
triStateTracing = GarbageCollectionAlgorithm
"Tri-state tracing"
(insert2nd mempty . garbageCollect)
insert2nd :: a -> (x, y) -> (x, a, y)
insert2nd a (x,y) = (x,a,y)
garbageCollect :: StgState -> (Set MemAddr, StgState)
garbageCollect stgState@StgState
{ stgCode = code
, stgHeap = heap
, stgGlobals = globals
, stgStack = stack }
= let GcState {aliveHeap = alive, oldHeap = Heap dead}
= until everythingCollected gcStep start
start = GcState
{ aliveHeap = mempty
, oldHeap = heap
, staged = (seqToSet . mconcat)
[addrs code, addrs globals, addrs stack] }
stgState' = stgState { stgHeap = alive }
in (M.keysSet dead, stgState')
seqToSet :: Ord a => Seq a -> Set a
seqToSet = foldMap S.singleton
everythingCollected :: GcState -> Bool
everythingCollected = noAlives
where
noAlives GcState {staged = alive} = S.null alive
-- | Each closure is in one of three states: in the alive heap, staged for
-- later rescue, or not even staged yet.
data GcState = GcState
{ aliveHeap :: Heap
-- ^ Heap of closures known to be alive.
-- Has no overlap with the old heap.
, staged :: Set MemAddr
-- ^ Memory addresses known to be alive,
-- but not yet rescued from the old heap.
, oldHeap :: Heap
-- ^ The old heap, containing both dead
-- and not-yet-found alive closures.
} deriving (Eq, Ord, Show)
gcStep :: GcState -> GcState
gcStep GcState
{ aliveHeap = oldAlive@(Heap alive)
, staged = stagedAddrs
, oldHeap = Heap oldRest }
= GcState
{ aliveHeap = oldAlive <> Heap rescued
, staged = seqToSet (addrs rescued)
, oldHeap = Heap newRest }
where
rescued, newRest :: Map MemAddr HeapObject
(rescued, newRest) = M.partitionWithKey isAlive oldRest
where
isAlive addr _closure = M.member addr alive
|| S.member addr stagedAddrs
| quchen/stg | src/Stg/Machine/GarbageCollection/TriStateTracing.hs | bsd-3-clause | 2,639 | 0 | 13 | 715 | 603 | 346 | 257 | 55 | 1 |
{-# LANGUAGE CPP, StandaloneDeriving, GeneralizedNewtypeDeriving #-}
-- |
-- Types for referring to remote objects in Remote GHCi. For more
-- details, see Note [External GHCi pointers] in compiler/ghci/GHCi.hs
--
-- For details on Remote GHCi, see Note [Remote GHCi] in
-- compiler/ghci/GHCi.hs.
--
module GHCi.RemoteTypes
( RemotePtr(..), toRemotePtr, fromRemotePtr, castRemotePtr
, HValue(..)
, RemoteRef, mkRemoteRef, localRef, freeRemoteRef
, HValueRef, toHValueRef
, ForeignRef, mkForeignRef, withForeignRef
, ForeignHValue
, unsafeForeignRefToRemoteRef, finalizeForeignRef
) where
import Prelude -- See note [Why do we import Prelude here?]
import Control.DeepSeq
import Data.Word
import Foreign hiding (newForeignPtr)
import Foreign.Concurrent
import Data.Binary
import Unsafe.Coerce
import GHC.Exts
import GHC.ForeignPtr
-- -----------------------------------------------------------------------------
-- RemotePtr
-- Static pointers only; don't use this for heap-resident pointers.
-- Instead use HValueRef. We will fix the remote pointer to be 64 bits. This
-- should cover 64 and 32bit systems, and permits the exchange of remote ptrs
-- between machines of different word size. For example, when connecting to
-- an iserv instance on a different architecture with different word size via
-- -fexternal-interpreter.
newtype RemotePtr a = RemotePtr Word64
toRemotePtr :: Ptr a -> RemotePtr a
toRemotePtr p = RemotePtr (fromIntegral (ptrToWordPtr p))
fromRemotePtr :: RemotePtr a -> Ptr a
fromRemotePtr (RemotePtr p) = wordPtrToPtr (fromIntegral p)
castRemotePtr :: RemotePtr a -> RemotePtr b
castRemotePtr (RemotePtr a) = RemotePtr a
deriving instance Show (RemotePtr a)
deriving instance Binary (RemotePtr a)
deriving instance NFData (RemotePtr a)
-- -----------------------------------------------------------------------------
-- HValueRef
newtype HValue = HValue Any
instance Show HValue where
show _ = "<HValue>"
-- | A reference to a remote value. These are allocated and freed explicitly.
newtype RemoteRef a = RemoteRef (RemotePtr ())
deriving (Show, Binary)
-- We can discard type information if we want
toHValueRef :: RemoteRef a -> RemoteRef HValue
toHValueRef = unsafeCoerce
-- For convenience
type HValueRef = RemoteRef HValue
-- | Make a reference to a local value that we can send remotely.
-- This reference will keep the value that it refers to alive until
-- 'freeRemoteRef' is called.
mkRemoteRef :: a -> IO (RemoteRef a)
mkRemoteRef a = do
sp <- newStablePtr a
return $! RemoteRef (toRemotePtr (castStablePtrToPtr sp))
-- | Convert an HValueRef to an HValue. Should only be used if the HValue
-- originated in this process.
localRef :: RemoteRef a -> IO a
localRef (RemoteRef w) =
deRefStablePtr (castPtrToStablePtr (fromRemotePtr w))
-- | Release an HValueRef that originated in this process
freeRemoteRef :: RemoteRef a -> IO ()
freeRemoteRef (RemoteRef w) =
freeStablePtr (castPtrToStablePtr (fromRemotePtr w))
-- | An HValueRef with a finalizer
newtype ForeignRef a = ForeignRef (ForeignPtr ())
instance NFData (ForeignRef a) where
rnf x = x `seq` ()
type ForeignHValue = ForeignRef HValue
-- | Create a 'ForeignRef' from a 'RemoteRef'. The finalizer
-- should arrange to call 'freeHValueRef' on the 'HValueRef'. (since
-- this function needs to be called in the process that created the
-- 'HValueRef', it cannot be called directly from the finalizer).
mkForeignRef :: RemoteRef a -> IO () -> IO (ForeignRef a)
mkForeignRef (RemoteRef hvref) finalizer =
ForeignRef <$> newForeignPtr (fromRemotePtr hvref) finalizer
-- | Use a 'ForeignHValue'
withForeignRef :: ForeignRef a -> (RemoteRef a -> IO b) -> IO b
withForeignRef (ForeignRef fp) f =
withForeignPtr fp (f . RemoteRef . toRemotePtr)
unsafeForeignRefToRemoteRef :: ForeignRef a -> RemoteRef a
unsafeForeignRefToRemoteRef (ForeignRef fp) =
RemoteRef (toRemotePtr (unsafeForeignPtrToPtr fp))
finalizeForeignRef :: ForeignRef a -> IO ()
finalizeForeignRef (ForeignRef fp) = finalizeForeignPtr fp
| sdiehl/ghc | libraries/ghci/GHCi/RemoteTypes.hs | bsd-3-clause | 4,050 | 0 | 12 | 632 | 795 | 428 | 367 | 61 | 1 |
module Stars where
import Rumpus
-- Golden Section Spiral (via http://www.softimageblog.com/archives/115)
pointsOnSphere :: Int -> [V3 GLfloat]
pointsOnSphere (fromIntegral -> n) =
map (\k ->
let y = k * off - 1 + (off / 2)
r = sqrt (1 - y*y)
phi = k * inc
in V3 (cos phi * r) y (sin phi * r)
) [0..n]
where inc = pi * (3 - sqrt 5)
off = 2 / n
start :: Start
start = do
-- We only take half the request points to get the upper hemisphere
let numPoints = 200 :: Int
points = reverse $ drop (numPoints `div` 2) $ pointsOnSphere numPoints
hues = map ((/ fromIntegral numPoints) . fromIntegral) [0..numPoints]
forM_ (zip3 [0..] points hues) $ \(i, pos, hue) -> spawnChild $ do
myTransformType ==> AbsolutePose
myShape ==> Sphere
mySize ==> 0.001
myColor ==> colorHSL hue 0.8 0.8
myPose ==> position (pos * 500)
myStart ==> do
setDelayedAction (fromIntegral i * 0.05) $
setSize 5
| lukexi/rumpus | util/ObjectArchive/Home/Stars.hs | bsd-3-clause | 1,153 | 0 | 19 | 443 | 374 | 193 | 181 | -1 | -1 |
module Good where
good :: Int
good = 42
| juhp/stack | test/integration/tests/init-omit-packages/files/good/Good.hs | bsd-3-clause | 41 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Надстройка «Пользовательские полезные нагрузки» </title>
<maps>
<homeID>custompayloads</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/custompayloads/src/main/javahelp/org/zaproxy/zap/extension/custompayloads/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 1,080 | 81 | 65 | 159 | 545 | 273 | 272 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- | /Towards Haskell in the Cloud/ (Epstein et al., Haskell Symposium 2011)
-- proposes a new type construct called 'static' that characterizes values that
-- are known statically. Cloud Haskell uses the
-- 'Control.Distributed.Static.Static' implementation from
-- "Control.Distributed.Static". That module comes with its own extensive
-- documentation, which you should read if you want to know the details. Here
-- we explain the Template Haskell support only.
--
-- [Static values]
--
-- Given a top-level (possibly polymorphic, but unqualified) definition
--
-- > f :: forall a1 .. an. T
-- > f = ...
--
-- you can use a Template Haskell splice to create a static version of 'f':
--
-- > $(mkStatic 'f) :: forall a1 .. an. (Typeable a1, .., Typeable an) => Static T
--
-- Every module that you write that contains calls to 'mkStatic' needs to
-- have a call to 'remotable':
--
-- > remotable [ 'f, 'g, ... ]
--
-- where you must pass every function (or other value) that you pass as an
-- argument to 'mkStatic'. The call to 'remotable' will create a definition
--
-- > __remoteTable :: RemoteTable -> RemoteTable
--
-- which can be used to construct the 'RemoteTable' used to initialize
-- Cloud Haskell. You should have (at most) one call to 'remotable' per module,
-- and compose all created functions when initializing Cloud Haskell:
--
-- > let rtable :: RemoteTable
-- > rtable = M1.__remoteTable
-- > . M2.__remoteTable
-- > . ...
-- > . Mn.__remoteTable
-- > $ initRemoteTable
--
-- NOTE: If you get a type error from ghc along these lines
--
-- > The exact Name `a_a30k' is not in scope
-- > Probable cause: you used a unique name (NameU) in Template Haskell but did not bind it
--
-- then you need to enable the @ScopedTypeVariables@ language extension.
--
-- [Static serialization dictionaries]
--
-- Some Cloud Haskell primitives require static serialization dictionaries (**):
--
-- > call :: Serializable a => Static (SerializableDict a) -> NodeId -> Closure (Process a) -> Process a
--
-- Given some serializable type 'T' you can define
--
-- > sdictT :: SerializableDict T
-- > sdictT = SerializableDict
--
-- and then have
--
-- > $(mkStatic 'sdictT) :: Static (SerializableDict T)
--
-- However, since these dictionaries are so frequently required Cloud Haskell
-- provides special support for them. When you call 'remotable' on a
-- /monomorphic/ function @f :: T1 -> T2@
--
-- > remotable ['f]
--
-- then a serialization dictionary is automatically created for you, which you
-- can access with
--
-- > $(functionSDict 'f) :: Static (SerializableDict T1)
--
-- In addition, if @f :: T1 -> Process T2@, then a second dictionary is created
--
-- > $(functionTDict 'f) :: Static (SerializableDict T2)
--
-- [Closures]
--
-- Suppose you have a process
--
-- > isPrime :: Integer -> Process Bool
--
-- Then
--
-- > $(mkClosure 'isPrime) :: Integer -> Closure (Process Bool)
--
-- which you can then 'call', for example, to have a remote node check if
-- a number is prime.
--
-- In general, if you have a /monomorphic/ function
--
-- > f :: T1 -> T2
--
-- then
--
-- > $(mkClosure 'f) :: T1 -> Closure T2
--
-- provided that 'T1' is serializable (*) (remember to pass 'f' to 'remotable').
--
-- (You can also create closures manually--see the documentation of
-- "Control.Distributed.Static" for examples.)
--
-- [Example]
--
-- Here is a small self-contained example that uses closures and serialization
-- dictionaries. It makes use of the Control.Distributed.Process.SimpleLocalnet
-- Cloud Haskell backend.
--
-- > {-# LANGUAGE TemplateHaskell #-}
-- > import System.Environment (getArgs)
-- > import Control.Distributed.Process
-- > import Control.Distributed.Process.Closure
-- > import Control.Distributed.Process.Backend.SimpleLocalnet
-- > import Control.Distributed.Process.Node (initRemoteTable)
-- >
-- > isPrime :: Integer -> Process Bool
-- > isPrime n = return . (n `elem`) . takeWhile (<= n) . sieve $ [2..]
-- > where
-- > sieve :: [Integer] -> [Integer]
-- > sieve (p : xs) = p : sieve [x | x <- xs, x `mod` p > 0]
-- >
-- > remotable ['isPrime]
-- >
-- > master :: [NodeId] -> Process ()
-- > master [] = liftIO $ putStrLn "no slaves"
-- > master (slave:_) = do
-- > isPrime79 <- call $(functionTDict 'isPrime) slave ($(mkClosure 'isPrime) (79 :: Integer))
-- > liftIO $ print isPrime79
-- >
-- > main :: IO ()
-- > main = do
-- > args <- getArgs
-- > case args of
-- > ["master", host, port] -> do
-- > backend <- initializeBackend host port rtable
-- > startMaster backend master
-- > ["slave", host, port] -> do
-- > backend <- initializeBackend host port rtable
-- > startSlave backend
-- > where
-- > rtable :: RemoteTable
-- > rtable = __remoteTable initRemoteTable
--
-- [Notes]
--
-- (*) If 'T1' is not serializable you will get a type error in the generated
-- code. Unfortunately, the Template Haskell infrastructure cannot check
-- a priori if 'T1' is serializable or not due to a bug in the Template
-- Haskell libraries (<http://hackage.haskell.org/trac/ghc/ticket/7066>)
--
-- (**) Even though 'call' is passed an explicit serialization
-- dictionary, we still need the 'Serializable' constraint because
-- 'Static' is not the /true/ static. If it was, we could 'unstatic'
-- the dictionary and pattern match on it to bring the 'Typeable'
-- instance into scope, but unless proper 'static' support is added to
-- ghc we need both the type class argument and the explicit dictionary.
module Control.Distributed.Process.Closure
( -- * Serialization dictionaries (and their static versions)
SerializableDict(..)
, staticDecode
, sdictUnit
, sdictProcessId
, sdictSendPort
, sdictStatic
, sdictClosure
-- * The CP type and associated combinators
, CP
, idCP
, splitCP
, returnCP
, bindCP
, seqCP
-- * CP versions of Cloud Haskell primitives
, cpLink
, cpUnlink
, cpRelay
, cpSend
, cpExpect
, cpNewChan
-- * Working with static values and closures (without Template Haskell)
, RemoteRegister
, MkTDict(..)
, mkStaticVal
, mkClosureValSingle
, mkClosureVal
, call'
#ifdef TemplateHaskellSupport
-- * Template Haskell support for creating static values and closures
, remotable
, remotableDecl
, mkStatic
, mkClosure
, mkStaticClosure
, functionSDict
, functionTDict
#endif
) where
import Control.Distributed.Process.Serializable (SerializableDict(..))
import Control.Distributed.Process.Internal.Closure.BuiltIn
( -- Static dictionaries and associated operations
staticDecode
, sdictUnit
, sdictProcessId
, sdictSendPort
, sdictStatic
, sdictClosure
-- The CP type and associated combinators
, CP
, idCP
, splitCP
, returnCP
, bindCP
, seqCP
-- CP versions of Cloud Haskell primitives
, cpLink
, cpUnlink
, cpRelay
, cpSend
, cpExpect
, cpNewChan
)
import Control.Distributed.Process.Internal.Closure.Explicit
(
RemoteRegister
, MkTDict(..)
, mkStaticVal
, mkClosureValSingle
, mkClosureVal
, call'
)
#ifdef TemplateHaskellSupport
import Control.Distributed.Process.Internal.Closure.TH
( remotable
, remotableDecl
, mkStatic
, functionSDict
, functionTDict
, mkClosure
, mkStaticClosure
)
#endif
| qnikst/distributed-process | src/Control/Distributed/Process/Closure.hs | bsd-3-clause | 7,391 | 0 | 6 | 1,493 | 430 | 347 | 83 | 57 | 0 |
{-# LANGUAGE TypeInType, KindSignatures #-}
module T16344 where
import Data.Kind
data T ka (a::ka) b = MkT (T Type Int Bool)
(T (Type -> Type) Maybe Bool)
| sdiehl/ghc | testsuite/tests/dependent/should_fail/T16344.hs | bsd-3-clause | 198 | 0 | 10 | 70 | 57 | 34 | 23 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.ForeignPtr.Safe
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : ffi@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- The 'ForeignPtr' type and operations. This module is part of the
-- Foreign Function Interface (FFI) and will usually be imported via
-- the "Foreign" module.
--
-- Safe API Only.
--
-----------------------------------------------------------------------------
module Foreign.ForeignPtr.Safe {-# DEPRECATED "Safe is now the default, please use Foreign.ForeignPtr instead" #-} (
-- * Finalised data pointers
ForeignPtr
, FinalizerPtr
, FinalizerEnvPtr
-- ** Basic operations
, newForeignPtr
, newForeignPtr_
, addForeignPtrFinalizer
, newForeignPtrEnv
, addForeignPtrFinalizerEnv
, withForeignPtr
, finalizeForeignPtr
-- ** Low-level operations
, touchForeignPtr
, castForeignPtr
-- ** Allocating managed memory
, mallocForeignPtr
, mallocForeignPtrBytes
, mallocForeignPtrArray
, mallocForeignPtrArray0
) where
import Foreign.ForeignPtr.Imp
| tolysz/prepare-ghcjs | spec-lts8/base/Foreign/ForeignPtr/Safe.hs | bsd-3-clause | 1,424 | 0 | 4 | 343 | 87 | 66 | 21 | 20 | 0 |
module Main () where
import SimpleJSON
import PrettyJSON
import Prettify
main = putStrLn (pretty 10 value) where
value = renderJValue (JObject [("f", JNumber 1), ("q", JBool True)])
| pauldoo/scratch | RealWorldHaskell/ch05/Main.hs | isc | 194 | 0 | 12 | 39 | 72 | 41 | 31 | 6 | 1 |
import XMonad
import XMonad.Config.Desktop
import XMonad.Util.EZConfig (additionalKeysP)
import XMonad.Layout.NoBorders (noBorders)
import XMonad.Layout.ResizableTile -- ResizableTall
import XMonad.Actions.WindowBringer (gotoMenu, bringMenu)
import XMonad.Layout.Magnifier (maximizeVertical, MagnifyMsg(Toggle))
import XMonad.Actions.GroupNavigation -- historyHook, nextMatch
main :: IO ()
main = do
xmonad $ desktopConfig
--{ terminal = "st -f 'Iosevka-11'"
{ terminal = "urxvt -fn 'xft:Iosevka:size=11' -bg '#181E26' -fg '#98BE65'"
, focusedBorderColor = "#00FF00"
, modMask = mod4Mask -- use windows key
, layoutHook = desktopLayoutModifiers $
maximizeVertical
(ResizableTall 1 (1.5/100) (3/5) []) ||| (noBorders Full)
, logHook = historyHook
, focusFollowsMouse = False
} `additionalKeysP` [ ("M-i", sendMessage Toggle)
, ("M-g", gotoMenu) -- open dmenu and goto selected window
, ("M-b", bringMenu) -- open dmenu and bring selected window into current workspace
, ("M1-<Tab>", nextMatch History (return True)) -- open dmenu and bring selected window into current workspace
]
| grafoo/dot | files/xmonad.hs | mit | 1,258 | 0 | 17 | 311 | 244 | 149 | 95 | 23 | 1 |
module WykopLinks (
promotedLinks
, promotedLinks'
, upcomingLinks
, upcomingLinks'
, PSort(..)
, USort(..)
, module WykopTypes
) where
import WykopTypes
import WykopUtils hiding (toGet)
data PSort = Day | Week | Month
instance Show PSort where
show Day = "day"
show Week = "week"
show Month = "month"
promotedLinks :: Keys -> Int -> PSort -> IO (Maybe [Link])
promotedLinks k p s = get k [] (toGet p s) "links/promoted"
promotedLinks' :: Keys -> IO (Maybe [Link])
promotedLinks' k = promotedLinks k 1 Day
data USort = UDate | UVotes | UComments
instance Show USort where
show UDate = "date"
show UVotes = "votes"
show UComments = "comments"
upcomingLinks :: Keys -> Int -> USort -> IO (Maybe [Link])
upcomingLinks k p s = get k [] (toGet p s) "links/upcoming"
upcomingLinks' :: Keys -> IO (Maybe [Link])
upcomingLinks' k = upcomingLinks k 1 UVotes
toGet :: (Show a) => Int -> a -> GetData
toGet p s = [("page", show p), ("sort", show s)]
| mikusp/hwykop | WykopLinks.hs | mit | 1,003 | 0 | 11 | 231 | 386 | 208 | 178 | 30 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
-- | This module loads the MaxMind's GeoIp City database.
module Network.GeoIp.GeoCityIp (
GeoCityDB,
load,
findRange,
findLocation) where
import qualified Data.ByteString as B
import Data.Bits
import Network.GeoIp.GeoDB
data GeoCityDB = GeoCityDB GeoDB
-- | Load the city database. If an error is triggered then
-- Left is returned with an error string.
load :: FilePath -> IO (Either String GeoCityDB)
load geoFile = do
eGeo <- makeGeoDB geoFile
case eGeo of
Left e -> return $ Left e
Right geo ->
if dataBaseType geo == geoIPCountryEdition then
return $ Right (GeoCityDB geo)
else
return $ Left ("load: Incorrect database type. Database type is: " ++ (show $ dataBaseType geo))
extractRecordCity :: GeoDB -> Int -> (Double, Double)
extractRecordCity geo cursor =
(latitude, longitude)
where
recordCursor = cursor + (2 * (recordLength geo) - 1) * (dataBaseSegments geo)
memo = B.drop recordCursor (mem geo)
(_, countryMem) = getBytes 1 memo
(_, regionMem) = extractNullString countryMem
(_, cityMem) = extractNullString regionMem
(_, postalMem) = extractNullString cityMem
(latitude, latMem) = ((fromIntegral ((getNumber $ B.take 3 postalMem)::Integer)) / 10000 - 180, B.drop 3 postalMem)
(longitude, _) = ((fromIntegral ((getNumber $ B.take 3 latMem)::Integer)) / 10000 - 180, B.drop 3 latMem)
generateMask :: forall a. (Num a, Bits a) => Int -> Int -> a
generateMask from to =
if from <= to then
(bit from) .|. generateMask (from+1) to
else
(bit from)
-- | Find the IP range that the IP address is in. The result is monadic.
-- In most cases you will want to use the Maybe monad.
findRange :: (Monad m) => GeoCityDB -> Integer -> m (Integer, Integer)
findRange (GeoCityDB geo) address = do
(cursor, netMask) <- seekRecord geo address
let
bitMask = generateMask (31 - netMask) 31
hostMask = generateMask 0 netMask
if cursor == dataBaseSegments geo then
fail "Could not find IP"
else
return (address .&. bitMask, address .|. hostMask)
-- | Find the location of an IP address. The tuple returned is @(latitude, longitude)@.
-- The result is monadic, in most cases you will want to use the Maybe monad.
findLocation :: (Monad m) => GeoCityDB -> Integer -> m (Double, Double)
findLocation (GeoCityDB geo) address = do
(cursor, _) <- seekRecord geo address
if cursor == dataBaseSegments geo then
fail "Could not find IP"
else
return (extractRecordCity geo cursor)
| astro/GeoIp | Network/GeoIp/GeoCityIp.hs | mit | 2,502 | 40 | 17 | 484 | 804 | 425 | 379 | 50 | 3 |
import Data.List
import qualified Data.Map as Map
import qualified Data.List.Ordered as Ordered
import qualified Data.Array as Array
import Primes
import Utils
import Debug.Trace
maxTest = 1000
testPrimeTable = takeWhile (<maxTest) primes
nT = (length testPrimeTable) - 1
data PrimalityTest = PrimalityTest { table :: Array.Array Integer Bool, test :: Integer -> Bool }
primalityTest = PrimalityTest arr (\n -> if (fromIntegral n)<=arrayMax then arr Array.! (fromIntegral n) else isPrime n)
where arr = makePrimeArray arrayMax
arrayMax = concatNumbers maxTest maxTest
primeTest n = test primalityTest $ n
data PrimeTuple = PrimeTuple { list :: [Integer], toBeConsidered :: [Integer] } deriving (Show, Eq, Ord)
--makePrimeTuple n = PrimeTuple [n] $ PrimeTuple (allowed [n])
emptyTuple = PrimeTuple [] $ tail testPrimeTable
--findNplet :: Int -> PrimeTuple -> Maybe [PrimeTuple]
--findNplet nmax tuple
-- | n >= nmax = Just [tuple]
-- | nleft <= 0 = Nothing
-- | otherwise = findQuintuplet nmax tuples
-- where n = length $ list tuple
-- nleft = length $ toBeConsidered tuple
-- tuples = concatMap
makeChildren :: PrimeTuple -> [PrimeTuple]
makeChildren (PrimeTuple _ []) = []
makeChildren (PrimeTuple l (x:xs)) = (newTuple : (makeChildren newTuple)) ++ (makeChildren (PrimeTuple l xs))
where newL = x:l
newTuple = PrimeTuple newL (restOf x xs)
restOf :: Integer -> [Integer] -> [Integer]
restOf x xs = Ordered.isect xs $ primeMap Map.! x
answer = filter (\t -> (length $ list t) == 5) $ makeChildren emptyTuple
main = do
let ans = answer
print answer
print $ head $ map (sum . list) ans
--appendToPrimeTuple tuple
-- | trace ("\n*** appendToPrimeTuple " ++ show tuple) False = undefined
-- | null $ toBeConsidered tuple = []
-- | otherwise = [ PrimeTuple newList remaining |
-- p <- toBeConsidered tuple,
-- let oldList = list tuple,
-- let newList = p:oldList,
-- let remaining = allowed newList
-- ]
--
isectMany :: (Ord a) => [[a]] -> [a]
isectMany [x] = x
isectMany (l:ls) = Ordered.isect l $ isectMany ls
allowed aList = isectMany $ map (primeMap Map.!) aList
--
--primeTuples = tail $ appendToPrimeTuple emptyTuple
--
primeMap = Map.fromList [ (p, qs) | p <- testPrimeTable,
let qs = [ q | q <- testPrimeTable,
primeTest $ concatNumbers p q,
primeTest $ concatNumbers q p
]
]
--
--pairs = concatMap appendToPrimeTuple primeTuples
--
--triplets = concatMap appendToPrimeTuple pairs
--
--quadruplets = sort $ concatMap appendToPrimeTuple triplets
--
--quintuplets = sort $ concatMap appendToPrimeTuple quadruplets
--
--main = do
-- putStrLn $ "number of pairs: " ++ (show $ length pairs)
-- putStrLn $ "number of triplets: " ++ (show $ length triplets)
-- putStrLn $ "number of quadruplets: " ++ (show $ length quadruplets)
-- putStrLn $ "number of quintuplets: " ++ (show $ length quintuplets)
-- print $ take 10 quintuplets
-- print $ map sum $ map list quintuplets
| arekfu/project_euler | p0060/p0060.hs | mit | 3,299 | 0 | 14 | 903 | 692 | 388 | 304 | 37 | 2 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module TypeStructureTest where
import Test.Framework
import TypeStructure.Prelude.Basic
import TypeStructure.Prelude.Data
import TypeStructure
test_differentStructuresDontEqual = do
assertNotEqual (graph (undefined :: Int16)) (graph (undefined :: Int8))
test_sameStructuresDoEqual = do
assertEqual (graph (undefined :: Int)) (graph (undefined :: Int))
test_differentStructuresProduceDifferentHashes = do
assertNotEqual (hash $ graph (undefined :: Int16)) (hash $ graph (undefined :: Int8))
| nikita-volkov/type-structure | src/TypeStructureTest.hs | mit | 536 | 0 | 11 | 68 | 144 | 81 | 63 | 12 | 1 |
module CombinationTest where
import Combination
import Test.Tasty.HUnit
import Test.Tasty
combs = [
("repeat 1", [1,3,2,1], 3, [[1,1,2],[1,1,3],[1,2,3]]),
("1 choose 2", [1], 2, []),
("1 choose 1", [1], 1, [[1]]),
("0 choose 1", [], 1, []),
("all same", [1,1,1,1], 3, [[1,1,1]])
]
combTests = [
("combination1", combination1),
("combination2", combination2)
]
combinationTest = testGroup "Combination tests"
[testGroup testGroupDesc
[ testCase description $ f arg1 arg2 @?= expected | (description, arg1, arg2, expected) <- combs] |
(testGroupDesc, f) <- combTests]
| pterodragon/programming | Haskell/algorithm/test/CombinationTest.hs | mit | 710 | 0 | 11 | 221 | 282 | 177 | 105 | 17 | 1 |
loop :: Int -> IO ()
loop n = do
if n <= 10
then do
print (n * n)
loop (n + 1)
else
return ()
main :: IO ()
main = loop 1
| chsm/code | hs/soh/basics/04-01.hs | mit | 179 | 0 | 12 | 92 | 87 | 42 | 45 | 9 | 2 |
module PE0018 where
import Data.Char (isDigit)
type Row = [Int]
type TriangleGraph = [Row]
-- from position n in a given row,
-- positions n and n+1 are reachable in the next row
sample :: TriangleGraph
sample = [
[3],
[7, 4],
[2, 4, 6],
[8, 5, 9, 3]
]
-- combine two neighboring positions in pathsSoFar
-- with a corresponding nextRow position
combine :: (Int, Int) -> Int -> Int
combine (a, b) n = max (a+n) (b+n)
combine0 :: Int
combine0 = 0
nextPaths :: Row -> Row -> Row
nextPaths pathsSoFar nextRow = nextPaths0 (combine0:pathsSoFar) nextRow
nextPaths0 :: Row -> Row -> Row
nextPaths0 (a:b:paths) (n:row) = (combine (a, b) n) : (nextPaths0 (b:paths) row)
nextPaths0 (a:[]) (n:[]) = (combine (a, combine0) n) : []
nextPaths0 _ _ = error "wrong row lenght"
graphPath :: TriangleGraph -> Row
graphPath g = foldl nextPaths [] g
maximumPathSum :: TriangleGraph -> Int
maximumPathSum = maximum . graphPath
getGraph :: IO TriangleGraph
getGraph = do
text <- readFile "PE0018-Maximum-path-sum-I.txt"
let graphText = getGraphText text
return $ parseGraph graphText
getGraphText :: String -> String
getGraphText = unlines . (filter startsWithDigit) . lines
where startsWithDigit s = case s of c:_ -> isDigit c
_ -> False
parseGraph :: String -> TriangleGraph
parseGraph s = map parseGraphLine (lines s)
parseGraphLine :: String -> Row
parseGraphLine l = map read (words l)
main :: IO ()
main = do
g <- getGraph
print $ maximumPathSum g
| mvidner/projecteuler | src/PE0018.hs | mit | 1,541 | 0 | 10 | 352 | 560 | 302 | 258 | 41 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module RenderData where
import Control.Monad.Except
import Data.Text
import Syntax
import LinearProof
ctor :: Text -> [Text] -> Text
ctor c ps = c `append` "(" `append` (intercalate "," ps) `append` ")"
lst :: [Text] -> Text
lst xs = "[" `append` (intercalate "," xs) `append` "]"
int :: Int -> Text
int = pack . show
str :: Text -> Text
str x = "\"" `append` x `append` "\""
renderLinearProof :: Sequent -> [ProofFragment] -> Either String Text
renderLinearProof seq' frags = do
seqT <- renderSequent seq'
fragsT <- lst <$> mapM renderFragment frags
return $ ctor "" ["boxproof", seqT, fragsT]
renderSequent :: Sequent -> Either String Text
renderSequent (Sequent ant con) = do
antFormulas <- catchError (sequence ant) (\_ -> Left "Cannot export sequent with free term variables")
conFormula <- either (const $ Left "Cannot export sequent with hole in it") Right con
antT <- lst <$> mapM renderFormula antFormulas
conT <- renderFormula conFormula
return $ ctor "" [antT, conT]
renderFragment :: ProofFragment -> Either String Text
renderFragment (Line i (Sequent _ con) ruleName refs) = do
conFormula <- either (const $ Left "Cannot export sequent with hole in it") Right con
conT <- ctor "SOME" . (:[]) <$> renderFormula conFormula
ruleNameT <- renderRuleName ruleName
refsT <- lst <$> mapM renderLineRef refs
return $ ctor "" [conT, ruleNameT, refsT, str . int $ i]
renderFragment (Box i j frags) = do
fragTs <- mapM renderFragment frags
let discharge = ctor "" ["NONE", "Dis", lst [ctor "Line" [str . int $ i]], str ""]
let ls = fragTs ++ [discharge]
return $ intercalate "," ls
renderFragment (HoleLine _ _ _ _) = throwError "Cannt export line holes"
renderFragment (VarIntroduction _ _) = throwError "Cannot export variable introduction"
renderRuleName :: RuleName -> Either String Text
renderRuleName rn =
case rn of
"assumption" -> return "Ass"
"premise" -> return "Prm"
"copy" -> return "Cpy"
"con_i" -> return "Ain"
"con_e1" -> return "Ae1"
"con_e2" -> return "Ae2"
"dis_i1" -> return "Oi1"
"dis_i2" -> return "Oi2"
"dis_e" -> return "Oel"
"imp_i" -> return "Iin"
"imp_e" -> return "Iel"
"neg_i" -> return "Nin"
"neg_e" -> return "Nel"
"nne" -> return "Del"
"nni" -> return "Din"
"bot_e" -> return "Bel"
"pbc" -> return "Pbc"
"mt" -> return "Mod"
"lem" -> return "Lem"
_ -> throwError $ "Cannot export rule name: " ++ show rn
renderLineRef :: LineRef -> Either String Text
renderLineRef (LineRefSingle i) = return $ ctor "Line" [str . int $ i]
renderLineRef (LineRefMulti i j) = return $ ctor "Box" [str . int $ i, str . int $ j]
renderLineRef (LineRefHole _) = throwError "Cannot export reference hole"
renderFormula :: Formula -> Either String Text
renderFormula = go
where
go Top = return "TOP"
go Bot = return "BOT"
go (Conj phi1 phi2) = return . ctor "AND" =<< mapM go [phi1, phi2]
go (Disj phi1 phi2) = return . ctor "OR" =<< mapM go [phi1, phi2]
go (Imp phi1 phi2) = return . ctor "IMP" =<< mapM go [phi1, phi2]
go (Neg phi) = return . ctor "NEG" . (:[]) =<< go phi
go (Pred p []) = return $ ctor "Atom" [pack p]
go (Pred _ _) = throwError "Cannot export predicates"
go (Eq _ _) = throwError "Cannot export equality predicates"
go (All _ _) = throwError "Cannot export universal quantifiers"
go (Exi _ _) = throwError "Cannot export existential quantifiers"
| ulrikrasmussen/BoxProver | src/RenderData.hs | mit | 3,484 | 7 | 17 | 740 | 1,306 | 639 | 667 | 80 | 20 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module SetGameWeb.Data where
import Control.Lens (makeLenses, view)
import Control.Monad.Reader
import Control.Monad.State
import qualified Data.Map as Map
import Data.SafeCopy
import Data.Typeable
import SetGame.GameState
import Snap (Snaplet, snapletValue)
import Snap.Snaplet.AcidState
-- data persistence
type GameID = Integer
data WebGameState = SinglePlayer GameState
deriving (Typeable)
type GameStateMap = Map.Map GameID WebGameState
data GameStore = GameStore GameStateMap
deriving (Typeable)
makeLenses ''WebGameState
$(deriveSafeCopy 0 'base ''WebGameState)
$(deriveSafeCopy 0 'base ''GameStore)
insertGame :: GameID -> WebGameState -> Update GameStore ()
insertGame key value
= do GameStore m <- get
put (GameStore (Map.insert key value m))
lookupGame :: GameID -> Query GameStore (Maybe WebGameState)
lookupGame key
= do GameStore m <- ask
return (Map.lookup key m)
countAllGames :: Query GameStore Int
countAllGames = do
GameStore m <- ask
return (length . Map.toList $ m)
$(makeAcidic ''GameStore ['insertGame, 'lookupGame, 'countAllGames])
data App = App
{ _acid :: Snaplet (Acid GameStore)
}
makeLenses ''App
instance HasAcid App GameStore where
getAcidStore = view (acid.snapletValue)
| cmwilhelm/setGame | executable/SetGameWeb/Data.hs | mit | 1,575 | 0 | 12 | 342 | 414 | 220 | 194 | 43 | 1 |
{-# LANGUAGE RecordWildCards #-}
import Sprockell.System
-- Note that it never prints "First shared memaddr equals 5": all sprockells
-- are terminated before the shared memory gets a chance to write it.
prog :: [Instruction]
prog = [
Const 78 RegA
, Const 10 RegB
, Const 5 RegC
, Write RegA (Addr 0x1000000) -- write to stdout using explicit address
, Write RegB stdio -- or using the alias
, Write RegC (Addr 0)
-- If we add some Nop's to delay the EndProg
-- then the shared memory has time to handle all the writes.
-- And the debug message will be printed.
--, Nop,Nop,Nop,Nop,Nop,Nop,Nop,Nop,Nop,Nop
, EndProg
]
debug :: SystemState -> String
debug SysState{..} | (sharedMem !!! 0) == 5 = "First shared memaddr equals 5.\n"
debug _ = "Not 5\n"
main = runDebug debug 3 prog
| martijnbastiaan/sprockell | src/DemoDebug.hs | mit | 905 | 0 | 10 | 265 | 151 | 82 | 69 | 15 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
-- | A MySQL backend for @persistent@.
module Database.Persist.MySQL
( withMySQLPool
, withMySQLConn
, createMySQLPool
, module Database.Persist.Sql
, MySQL.ConnectInfo(..)
, MySQLBase.SSLInfo(..)
, MySQL.defaultConnectInfo
, MySQLBase.defaultSSLInfo
, MySQLConf(..)
, mockMigration
, insertOnDuplicateKeyUpdate
, insertManyOnDuplicateKeyUpdate
, SomeField(SomeField)
, copyUnlessNull
, copyUnlessEmpty
, copyUnlessEq
) where
import Control.Arrow
import Control.Monad.Logger (MonadLogger, runNoLoggingT)
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Except (runExceptT)
import Control.Monad.Trans.Reader (runReaderT, ReaderT, withReaderT)
import Control.Monad.Trans.Writer (runWriterT)
import Data.Either (partitionEithers)
import Data.Monoid ((<>))
import qualified Data.Monoid as Monoid
import Data.Aeson
import Data.Aeson.Types (modifyFailure)
import Data.ByteString (ByteString)
import Data.Fixed (Pico)
import Data.Function (on)
import Data.IORef
import Data.List (find, intercalate, sort, groupBy)
import Data.Pool (Pool)
import Data.Text (Text, pack)
import qualified Data.Text.IO as T
import Text.Read (readMaybe)
import System.Environment (getEnvironment)
import Data.Acquire (Acquire, mkAcquire, with)
import Data.Conduit
import qualified Blaze.ByteString.Builder.Char8 as BBB
import qualified Blaze.ByteString.Builder.ByteString as BBS
import qualified Data.Conduit.List as CL
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Database.Persist.Sql
import Database.Persist.Sql.Types.Internal (mkPersistBackend)
import Data.Int (Int64)
import qualified Database.MySQL.Simple as MySQL
import qualified Database.MySQL.Simple.Param as MySQL
import qualified Database.MySQL.Simple.Result as MySQL
import qualified Database.MySQL.Simple.Types as MySQL
import qualified Database.MySQL.Base as MySQLBase
import qualified Database.MySQL.Base.Types as MySQLBase
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Trans.Resource (runResourceT)
import Prelude
-- | Create a MySQL connection pool and run the given action.
-- The pool is properly released after the action finishes using
-- it. Note that you should not use the given 'ConnectionPool'
-- outside the action since it may be already been released.
withMySQLPool :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, IsSqlBackend backend)
=> MySQL.ConnectInfo
-- ^ Connection information.
-> Int
-- ^ Number of connections to be kept open in the pool.
-> (Pool backend -> m a)
-- ^ Action to be executed that uses the connection pool.
-> m a
withMySQLPool ci = withSqlPool $ open' ci
-- | Create a MySQL connection pool. Note that it's your
-- responsibility to properly close the connection pool when
-- unneeded. Use 'withMySQLPool' for automatic resource control.
createMySQLPool :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> MySQL.ConnectInfo
-- ^ Connection information.
-> Int
-- ^ Number of connections to be kept open in the pool.
-> m (Pool backend)
createMySQLPool ci = createSqlPool $ open' ci
-- | Same as 'withMySQLPool', but instead of opening a pool
-- of connections, only one connection is opened.
withMySQLConn :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> MySQL.ConnectInfo
-- ^ Connection information.
-> (backend -> m a)
-- ^ Action to be executed that uses the connection.
-> m a
withMySQLConn = withSqlConn . open'
-- | Internal function that opens a connection to the MySQL
-- server.
open' :: (IsSqlBackend backend) => MySQL.ConnectInfo -> LogFunc -> IO backend
open' ci logFunc = do
conn <- MySQL.connect ci
MySQLBase.autocommit conn False -- disable autocommit!
smap <- newIORef $ Map.empty
return . mkPersistBackend $ SqlBackend
{ connPrepare = prepare' conn
, connStmtMap = smap
, connInsertSql = insertSql'
, connInsertManySql = Nothing
, connUpsertSql = Nothing
, connClose = MySQL.close conn
, connMigrateSql = migrate' ci
, connBegin = const $ MySQL.execute_ conn "start transaction" >> return ()
, connCommit = const $ MySQL.commit conn
, connRollback = const $ MySQL.rollback conn
, connEscapeName = pack . escapeDBName
, connNoLimit = "LIMIT 18446744073709551615"
-- This noLimit is suggested by MySQL's own docs, see
-- <http://dev.mysql.com/doc/refman/5.5/en/select.html>
, connRDBMS = "mysql"
, connLimitOffset = decorateSQLWithLimitOffset "LIMIT 18446744073709551615"
, connLogFunc = logFunc
, connMaxParams = Nothing
}
-- | Prepare a query. We don't support prepared statements, but
-- we'll do some client-side preprocessing here.
prepare' :: MySQL.Connection -> Text -> IO Statement
prepare' conn sql = do
let query = MySQL.Query (T.encodeUtf8 sql)
return Statement
{ stmtFinalize = return ()
, stmtReset = return ()
, stmtExecute = execute' conn query
, stmtQuery = withStmt' conn query
}
-- | SQL code to be executed when inserting an entity.
insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult
insertSql' ent vals =
let sql = pack $ concat
[ "INSERT INTO "
, escapeDBName $ entityDB ent
, "("
, intercalate "," $ map (escapeDBName . fieldDB) $ entityFields ent
, ") VALUES("
, intercalate "," (map (const "?") $ entityFields ent)
, ")"
]
in case entityPrimary ent of
Just _ -> ISRManyKeys sql vals
Nothing -> ISRInsertGet sql "SELECT LAST_INSERT_ID()"
-- | Execute an statement that doesn't return any results.
execute' :: MySQL.Connection -> MySQL.Query -> [PersistValue] -> IO Int64
execute' conn query vals = MySQL.execute conn query (map P vals)
-- | Execute an statement that does return results. The results
-- are fetched all at once and stored into memory.
withStmt' :: MonadIO m
=> MySQL.Connection
-> MySQL.Query
-> [PersistValue]
-> Acquire (Source m [PersistValue])
withStmt' conn query vals = do
result <- mkAcquire createResult MySQLBase.freeResult
return $ fetchRows result >>= CL.sourceList
where
createResult = do
-- Execute the query
formatted <- MySQL.formatQuery conn query (map P vals)
MySQLBase.query conn formatted
MySQLBase.storeResult conn
fetchRows result = liftIO $ do
-- Find out the type of the columns
fields <- MySQLBase.fetchFields result
let getters = [ maybe PersistNull (getGetter f f . Just) | f <- fields]
convert = use getters
where use (g:gs) (col:cols) =
let v = g col
vs = use gs cols
in v `seq` vs `seq` (v:vs)
use _ _ = []
-- Ready to go!
let go acc = do
row <- MySQLBase.fetchRow result
case row of
[] -> return (acc [])
_ -> let converted = convert row
in converted `seq` go (acc . (converted:))
go id
-- | @newtype@ around 'PersistValue' that supports the
-- 'MySQL.Param' type class.
newtype P = P PersistValue
instance MySQL.Param P where
render (P (PersistText t)) = MySQL.render t
render (P (PersistByteString bs)) = MySQL.render bs
render (P (PersistInt64 i)) = MySQL.render i
render (P (PersistDouble d)) = MySQL.render d
render (P (PersistBool b)) = MySQL.render b
render (P (PersistDay d)) = MySQL.render d
render (P (PersistTimeOfDay t)) = MySQL.render t
render (P (PersistUTCTime t)) = MySQL.render t
render (P PersistNull) = MySQL.render MySQL.Null
render (P (PersistList l)) = MySQL.render $ listToJSON l
render (P (PersistMap m)) = MySQL.render $ mapToJSON m
render (P (PersistRational r)) =
MySQL.Plain $ BBB.fromString $ show (fromRational r :: Pico)
-- FIXME: Too Ambigous, can not select precision without information about field
render (P (PersistDbSpecific s)) = MySQL.Plain $ BBS.fromByteString s
render (P (PersistObjectId _)) =
error "Refusing to serialize a PersistObjectId to a MySQL value"
-- | @Getter a@ is a function that converts an incoming value
-- into a data type @a@.
type Getter a = MySQLBase.Field -> Maybe ByteString -> a
-- | Helper to construct 'Getter'@s@ using 'MySQL.Result'.
convertPV :: MySQL.Result a => (a -> b) -> Getter b
convertPV f = (f .) . MySQL.convert
-- | Get the corresponding @'Getter' 'PersistValue'@ depending on
-- the type of the column.
getGetter :: MySQLBase.Field -> Getter PersistValue
getGetter field = go (MySQLBase.fieldType field)
(MySQLBase.fieldLength field)
(MySQLBase.fieldCharSet field)
where
-- Bool
go MySQLBase.Tiny 1 _ = convertPV PersistBool
go MySQLBase.Tiny _ _ = convertPV PersistInt64
-- Int64
go MySQLBase.Int24 _ _ = convertPV PersistInt64
go MySQLBase.Short _ _ = convertPV PersistInt64
go MySQLBase.Long _ _ = convertPV PersistInt64
go MySQLBase.LongLong _ _ = convertPV PersistInt64
-- Double
go MySQLBase.Float _ _ = convertPV PersistDouble
go MySQLBase.Double _ _ = convertPV PersistDouble
go MySQLBase.Decimal _ _ = convertPV PersistDouble
go MySQLBase.NewDecimal _ _ = convertPV PersistDouble
-- ByteString and Text
-- The MySQL C client (and by extension the Haskell mysql package) doesn't distinguish between binary and non-binary string data at the type level.
-- (e.g. both BLOB and TEXT have the MySQLBase.Blob type).
-- Instead, the character set distinguishes them. Binary data uses character set number 63.
-- See https://dev.mysql.com/doc/refman/5.6/en/c-api-data-structures.html (Search for "63")
go MySQLBase.VarChar _ 63 = convertPV PersistByteString
go MySQLBase.VarString _ 63 = convertPV PersistByteString
go MySQLBase.String _ 63 = convertPV PersistByteString
go MySQLBase.VarChar _ _ = convertPV PersistText
go MySQLBase.VarString _ _ = convertPV PersistText
go MySQLBase.String _ _ = convertPV PersistText
go MySQLBase.Blob _ 63 = convertPV PersistByteString
go MySQLBase.TinyBlob _ 63 = convertPV PersistByteString
go MySQLBase.MediumBlob _ 63 = convertPV PersistByteString
go MySQLBase.LongBlob _ 63 = convertPV PersistByteString
go MySQLBase.Blob _ _ = convertPV PersistText
go MySQLBase.TinyBlob _ _ = convertPV PersistText
go MySQLBase.MediumBlob _ _ = convertPV PersistText
go MySQLBase.LongBlob _ _ = convertPV PersistText
-- Time-related
go MySQLBase.Time _ _ = convertPV PersistTimeOfDay
go MySQLBase.DateTime _ _ = convertPV PersistUTCTime
go MySQLBase.Timestamp _ _ = convertPV PersistUTCTime
go MySQLBase.Date _ _ = convertPV PersistDay
go MySQLBase.NewDate _ _ = convertPV PersistDay
go MySQLBase.Year _ _ = convertPV PersistDay
-- Null
go MySQLBase.Null _ _ = \_ _ -> PersistNull
-- Controversial conversions
go MySQLBase.Set _ _ = convertPV PersistText
go MySQLBase.Enum _ _ = convertPV PersistText
-- Conversion using PersistDbSpecific
go MySQLBase.Geometry _ _ = \_ m ->
case m of
Just g -> PersistDbSpecific g
Nothing -> error "Unexpected null in database specific value"
-- Unsupported
go other _ _ = error $ "MySQL.getGetter: type " ++
show other ++ " not supported."
----------------------------------------------------------------------
-- | Create the migration plan for the given 'PersistEntity'
-- @val@.
migrate' :: MySQL.ConnectInfo
-> [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
migrate' connectInfo allDefs getter val = do
let name = entityDB val
(idClmn, old) <- getColumns connectInfo getter val
let (newcols, udefs, fdefs) = mkColumns allDefs val
let udspair = map udToPair udefs
case (idClmn, old, partitionEithers old) of
-- Nothing found, create everything
([], [], _) -> do
let uniques = flip concatMap udspair $ \(uname, ucols) ->
[ AlterTable name $
AddUniqueConstraint uname $
map (findTypeAndMaxLen name) ucols ]
let foreigns = do
Column { cName=cname, cReference=Just (refTblName, _a) } <- newcols
return $ AlterColumn name (refTblName, addReference allDefs (refName name cname) refTblName cname)
let foreignsAlt = map (\fdef -> let (childfields, parentfields) = unzip (map (\((_,b),(_,d)) -> (b,d)) (foreignFields fdef))
in AlterColumn name (foreignRefTableDBName fdef, AddReference (foreignRefTableDBName fdef) (foreignConstraintNameDBName fdef) childfields parentfields)) fdefs
return $ Right $ map showAlterDb $ (addTable newcols val): uniques ++ foreigns ++ foreignsAlt
-- No errors and something found, migrate
(_, _, ([], old')) -> do
let excludeForeignKeys (xs,ys) = (map (\c -> case cReference c of
Just (_,fk) -> case find (\f -> fk == foreignConstraintNameDBName f) fdefs of
Just _ -> c { cReference = Nothing }
Nothing -> c
Nothing -> c) xs,ys)
(acs, ats) = getAlters allDefs name (newcols, udspair) $ excludeForeignKeys $ partitionEithers old'
acs' = map (AlterColumn name) acs
ats' = map (AlterTable name) ats
return $ Right $ map showAlterDb $ acs' ++ ats'
-- Errors
(_, _, (errs, _)) -> return $ Left errs
where
findTypeAndMaxLen tblName col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
addTable :: [Column] -> EntityDef -> AlterDB
addTable cols entity = AddTable $ concat
-- Lower case e: see Database.Persist.Sql.Migration
[ "CREATe TABLE "
, escapeDBName name
, "("
, idtxt
, if null cols then [] else ","
, intercalate "," $ map showColumn cols
, ")"
]
where
name = entityDB entity
idtxt = case entityPrimary entity of
Just pdef -> concat [" PRIMARY KEY (", intercalate "," $ map (escapeDBName . fieldDB) $ compositeFields pdef, ")"]
Nothing ->
let defText = defaultAttribute $ fieldAttrs $ entityId entity
sType = fieldSqlType $ entityId entity
autoIncrementText = case (sType, defText) of
(SqlInt64, Nothing) -> " AUTO_INCREMENT"
_ -> ""
maxlen = findMaxLenOfField (entityId entity)
in concat
[ escapeDBName $ fieldDB $ entityId entity
, " " <> showSqlType sType maxlen False
, " NOT NULL"
, autoIncrementText
, " PRIMARY KEY"
]
-- | Find out the type of a column.
findTypeOfColumn :: [EntityDef] -> DBName -> DBName -> (DBName, FieldType)
findTypeOfColumn allDefs name col =
maybe (error $ "Could not find type of column " ++
show col ++ " on table " ++ show name ++
" (allDefs = " ++ show allDefs ++ ")")
((,) col) $ do
entDef <- find ((== name) . entityDB) allDefs
fieldDef <- find ((== col) . fieldDB) (entityFields entDef)
return (fieldType fieldDef)
-- | Find out the maxlen of a column (default to 200)
findMaxLenOfColumn :: [EntityDef] -> DBName -> DBName -> (DBName, Integer)
findMaxLenOfColumn allDefs name col =
maybe (col, 200)
((,) col) $ do
entDef <- find ((== name) . entityDB) allDefs
fieldDef <- find ((== col) . fieldDB) (entityFields entDef)
findMaxLenOfField fieldDef
-- | Find out the maxlen of a field
findMaxLenOfField :: FieldDef -> Maybe Integer
findMaxLenOfField fieldDef = do
maxLenAttr <- find ((T.isPrefixOf "maxlen=") . T.toLower) (fieldAttrs fieldDef)
readMaybe . T.unpack . T.drop 7 $ maxLenAttr
-- | Helper for 'AddReference' that finds out the which primary key columns to reference.
addReference :: [EntityDef] -> DBName -> DBName -> DBName -> AlterColumn
addReference allDefs fkeyname reftable cname = AddReference reftable fkeyname [cname] referencedColumns
where
referencedColumns = maybe (error $ "Could not find ID of entity " ++ show reftable
++ " (allDefs = " ++ show allDefs ++ ")")
id $ do
entDef <- find ((== reftable) . entityDB) allDefs
return $ map fieldDB $ entityKeyFields entDef
data AlterColumn = Change Column
| Add' Column
| Drop
| Default String
| NoDefault
| Update' String
-- | See the definition of the 'showAlter' function to see how these fields are used.
| AddReference
DBName -- Referenced table
DBName -- Foreign key name
[DBName] -- Referencing columns
[DBName] -- Referenced columns
| DropReference DBName
type AlterColumn' = (DBName, AlterColumn)
data AlterTable = AddUniqueConstraint DBName [(DBName, FieldType, Integer)]
| DropUniqueConstraint DBName
data AlterDB = AddTable String
| AlterColumn DBName AlterColumn'
| AlterTable DBName AlterTable
udToPair :: UniqueDef -> (DBName, [DBName])
udToPair ud = (uniqueDBName ud, map snd $ uniqueFields ud)
----------------------------------------------------------------------
-- | Returns all of the 'Column'@s@ in the given table currently
-- in the database.
getColumns :: MySQL.ConnectInfo
-> (Text -> IO Statement)
-> EntityDef
-> IO ( [Either Text (Either Column (DBName, [DBName]))] -- ID column
, [Either Text (Either Column (DBName, [DBName]))] -- everything else
)
getColumns connectInfo getter def = do
-- Find out ID column.
stmtIdClmn <- getter "SELECT COLUMN_NAME, \
\IS_NULLABLE, \
\DATA_TYPE, \
\COLUMN_DEFAULT \
\FROM INFORMATION_SCHEMA.COLUMNS \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME = ?"
inter1 <- with (stmtQuery stmtIdClmn vals) ($$ CL.consume)
ids <- runResourceT $ CL.sourceList inter1 $$ helperClmns -- avoid nested queries
-- Find out all columns.
stmtClmns <- getter "SELECT COLUMN_NAME, \
\IS_NULLABLE, \
\DATA_TYPE, \
\COLUMN_TYPE, \
\CHARACTER_MAXIMUM_LENGTH, \
\NUMERIC_PRECISION, \
\NUMERIC_SCALE, \
\COLUMN_DEFAULT \
\FROM INFORMATION_SCHEMA.COLUMNS \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME <> ?"
inter2 <- with (stmtQuery stmtClmns vals) ($$ CL.consume)
cs <- runResourceT $ CL.sourceList inter2 $$ helperClmns -- avoid nested queries
-- Find out the constraints.
stmtCntrs <- getter "SELECT CONSTRAINT_NAME, \
\COLUMN_NAME \
\FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME <> ? \
\AND CONSTRAINT_NAME <> 'PRIMARY' \
\AND REFERENCED_TABLE_SCHEMA IS NULL \
\ORDER BY CONSTRAINT_NAME, \
\COLUMN_NAME"
us <- with (stmtQuery stmtCntrs vals) ($$ helperCntrs)
-- Return both
return (ids, cs ++ us)
where
vals = [ PersistText $ pack $ MySQL.connectDatabase connectInfo
, PersistText $ unDBName $ entityDB def
, PersistText $ unDBName $ fieldDB $ entityId def ]
helperClmns = CL.mapM getIt =$ CL.consume
where
getIt = fmap (either Left (Right . Left)) .
liftIO .
getColumn connectInfo getter (entityDB def)
helperCntrs = do
let check [ PersistText cntrName
, PersistText clmnName] = return ( cntrName, clmnName )
check other = fail $ "helperCntrs: unexpected " ++ show other
rows <- mapM check =<< CL.consume
return $ map (Right . Right . (DBName . fst . head &&& map (DBName . snd)))
$ groupBy ((==) `on` fst) rows
-- | Get the information about a column in a table.
getColumn :: MySQL.ConnectInfo
-> (Text -> IO Statement)
-> DBName
-> [PersistValue]
-> IO (Either Text Column)
getColumn connectInfo getter tname [ PersistText cname
, PersistText null_
, PersistText dataType
, PersistText colType
, colMaxLen
, colPrecision
, colScale
, default'] =
fmap (either (Left . pack) Right) $
runExceptT $ do
-- Default value
default_ <- case default' of
PersistNull -> return Nothing
PersistText t -> return (Just t)
PersistByteString bs ->
case T.decodeUtf8' bs of
Left exc -> fail $ "Invalid default column: " ++
show default' ++ " (error: " ++
show exc ++ ")"
Right t -> return (Just t)
_ -> fail $ "Invalid default column: " ++ show default'
-- Foreign key (if any)
stmt <- lift $ getter "SELECT REFERENCED_TABLE_NAME, \
\CONSTRAINT_NAME, \
\ORDINAL_POSITION \
\FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME = ? \
\AND REFERENCED_TABLE_SCHEMA = ? \
\ORDER BY CONSTRAINT_NAME, \
\COLUMN_NAME"
let vars = [ PersistText $ pack $ MySQL.connectDatabase connectInfo
, PersistText $ unDBName $ tname
, PersistText cname
, PersistText $ pack $ MySQL.connectDatabase connectInfo ]
cntrs <- with (stmtQuery stmt vars) ($$ CL.consume)
ref <- case cntrs of
[] -> return Nothing
[[PersistText tab, PersistText ref, PersistInt64 pos]] ->
return $ if pos == 1 then Just (DBName tab, DBName ref) else Nothing
_ -> fail "MySQL.getColumn/getRef: never here"
let colMaxLen' = case colMaxLen of
PersistInt64 l -> Just (fromIntegral l)
_ -> Nothing
ci = ColumnInfo
{ ciColumnType = colType
, ciMaxLength = colMaxLen'
, ciNumericPrecision = colPrecision
, ciNumericScale = colScale
}
(typ, maxLen) <- parseColumnType dataType ci
-- Okay!
return Column
{ cName = DBName $ cname
, cNull = null_ == "YES"
, cSqlType = typ
, cDefault = default_
, cDefaultConstraintName = Nothing
, cMaxLen = maxLen
, cReference = ref
}
getColumn _ _ _ x =
return $ Left $ pack $ "Invalid result from INFORMATION_SCHEMA: " ++ show x
-- | Extra column information from MySQL schema
data ColumnInfo = ColumnInfo
{ ciColumnType :: Text
, ciMaxLength :: Maybe Integer
, ciNumericPrecision :: PersistValue
, ciNumericScale :: PersistValue
}
-- | Parse the type of column as returned by MySQL's
-- @INFORMATION_SCHEMA@ tables.
parseColumnType :: Monad m => Text -> ColumnInfo -> m (SqlType, Maybe Integer)
-- Ints
parseColumnType "tinyint" ci | ciColumnType ci == "tinyint(1)" = return (SqlBool, Nothing)
parseColumnType "int" ci | ciColumnType ci == "int(11)" = return (SqlInt32, Nothing)
parseColumnType "bigint" ci | ciColumnType ci == "bigint(20)" = return (SqlInt64, Nothing)
-- Double
parseColumnType x@("double") ci | ciColumnType ci == x = return (SqlReal, Nothing)
parseColumnType "decimal" ci =
case (ciNumericPrecision ci, ciNumericScale ci) of
(PersistInt64 p, PersistInt64 s) ->
return (SqlNumeric (fromIntegral p) (fromIntegral s), Nothing)
_ ->
fail "missing DECIMAL precision in DB schema"
-- Text
parseColumnType "varchar" ci = return (SqlString, ciMaxLength ci)
parseColumnType "text" _ = return (SqlString, Nothing)
-- ByteString
parseColumnType "varbinary" ci = return (SqlBlob, ciMaxLength ci)
parseColumnType "blob" _ = return (SqlBlob, Nothing)
-- Time-related
parseColumnType "time" _ = return (SqlTime, Nothing)
parseColumnType "datetime" _ = return (SqlDayTime, Nothing)
parseColumnType "date" _ = return (SqlDay, Nothing)
parseColumnType _ ci = return (SqlOther (ciColumnType ci), Nothing)
----------------------------------------------------------------------
-- | @getAlters allDefs tblName new old@ finds out what needs to
-- be changed from @old@ to become @new@.
getAlters :: [EntityDef]
-> DBName
-> ([Column], [(DBName, [DBName])])
-> ([Column], [(DBName, [DBName])])
-> ([AlterColumn'], [AlterTable])
getAlters allDefs tblName (c1, u1) (c2, u2) =
(getAltersC c1 c2, getAltersU u1 u2)
where
getAltersC [] old = concatMap dropColumn old
getAltersC (new:news) old =
let (alters, old') = findAlters tblName allDefs new old
in alters ++ getAltersC news old'
dropColumn col =
map ((,) (cName col)) $
[DropReference n | Just (_, n) <- [cReference col]] ++
[Drop]
getAltersU [] old = map (DropUniqueConstraint . fst) old
getAltersU ((name, cols):news) old =
case lookup name old of
Nothing ->
AddUniqueConstraint name (map findTypeAndMaxLen cols) : getAltersU news old
Just ocols ->
let old' = filter (\(x, _) -> x /= name) old
in if sort cols == ocols
then getAltersU news old'
else DropUniqueConstraint name
: AddUniqueConstraint name (map findTypeAndMaxLen cols)
: getAltersU news old'
where
findTypeAndMaxLen col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
-- | @findAlters newColumn oldColumns@ finds out what needs to be
-- changed in the columns @oldColumns@ for @newColumn@ to be
-- supported.
findAlters :: DBName -> [EntityDef] -> Column -> [Column] -> ([AlterColumn'], [Column])
findAlters tblName allDefs col@(Column name isNull type_ def _defConstraintName maxLen ref) cols =
case filter ((name ==) . cName) cols of
-- new fkey that didnt exist before
[] -> case ref of
Nothing -> ([(name, Add' col)],[])
Just (tname, _b) -> let cnstr = [addReference allDefs (refName tblName name) tname name]
in (map ((,) tname) (Add' col : cnstr), cols)
Column _ isNull' type_' def' _defConstraintName' maxLen' ref':_ ->
let -- Foreign key
refDrop = case (ref == ref', ref') of
(False, Just (_, cname)) -> [(name, DropReference cname)]
_ -> []
refAdd = case (ref == ref', ref) of
(False, Just (tname, _cname)) -> [(tname, addReference allDefs (refName tblName name) tname name)]
_ -> []
-- Type and nullability
modType | showSqlType type_ maxLen False `ciEquals` showSqlType type_' maxLen' False && isNull == isNull' = []
| otherwise = [(name, Change col)]
-- Default value
-- Avoid DEFAULT NULL, since it is always unnecessary, and is an error for text/blob fields
modDef | def == def' = []
| otherwise = case def of
Nothing -> [(name, NoDefault)]
Just s -> if T.toUpper s == "NULL" then []
else [(name, Default $ T.unpack s)]
in ( refDrop ++ modType ++ modDef ++ refAdd
, filter ((name /=) . cName) cols )
where
ciEquals x y = T.toCaseFold (T.pack x) == T.toCaseFold (T.pack y)
----------------------------------------------------------------------
-- | Prints the part of a @CREATE TABLE@ statement about a given
-- column.
showColumn :: Column -> String
showColumn (Column n nu t def _defConstraintName maxLen ref) = concat
[ escapeDBName n
, " "
, showSqlType t maxLen True
, " "
, if nu then "NULL" else "NOT NULL"
, case def of
Nothing -> ""
Just s -> -- Avoid DEFAULT NULL, since it is always unnecessary, and is an error for text/blob fields
if T.toUpper s == "NULL" then ""
else " DEFAULT " ++ T.unpack s
, case ref of
Nothing -> ""
Just (s, _) -> " REFERENCES " ++ escapeDBName s
]
-- | Renders an 'SqlType' in MySQL's format.
showSqlType :: SqlType
-> Maybe Integer -- ^ @maxlen@
-> Bool -- ^ include character set information?
-> String
showSqlType SqlBlob Nothing _ = "BLOB"
showSqlType SqlBlob (Just i) _ = "VARBINARY(" ++ show i ++ ")"
showSqlType SqlBool _ _ = "TINYINT(1)"
showSqlType SqlDay _ _ = "DATE"
showSqlType SqlDayTime _ _ = "DATETIME"
showSqlType SqlInt32 _ _ = "INT(11)"
showSqlType SqlInt64 _ _ = "BIGINT"
showSqlType SqlReal _ _ = "DOUBLE"
showSqlType (SqlNumeric s prec) _ _ = "NUMERIC(" ++ show s ++ "," ++ show prec ++ ")"
showSqlType SqlString Nothing True = "TEXT CHARACTER SET utf8"
showSqlType SqlString Nothing False = "TEXT"
showSqlType SqlString (Just i) True = "VARCHAR(" ++ show i ++ ") CHARACTER SET utf8"
showSqlType SqlString (Just i) False = "VARCHAR(" ++ show i ++ ")"
showSqlType SqlTime _ _ = "TIME"
showSqlType (SqlOther t) _ _ = T.unpack t
-- | Render an action that must be done on the database.
showAlterDb :: AlterDB -> (Bool, Text)
showAlterDb (AddTable s) = (False, pack s)
showAlterDb (AlterColumn t (c, ac)) =
(isUnsafe ac, pack $ showAlter t (c, ac))
where
isUnsafe Drop = True
isUnsafe _ = False
showAlterDb (AlterTable t at) = (False, pack $ showAlterTable t at)
-- | Render an action that must be done on a table.
showAlterTable :: DBName -> AlterTable -> String
showAlterTable table (AddUniqueConstraint cname cols) = concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD CONSTRAINT "
, escapeDBName cname
, " UNIQUE("
, intercalate "," $ map escapeDBName' cols
, ")"
]
where
escapeDBName' (name, (FTTypeCon _ "Text" ), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, (FTTypeCon _ "String" ), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, (FTTypeCon _ "ByteString"), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, _ , _) = escapeDBName name
showAlterTable table (DropUniqueConstraint cname) = concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP INDEX "
, escapeDBName cname
]
-- | Render an action that must be done on a column.
showAlter :: DBName -> AlterColumn' -> String
showAlter table (oldName, Change (Column n nu t def defConstraintName maxLen _ref)) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " CHANGE "
, escapeDBName oldName
, " "
, showColumn (Column n nu t def defConstraintName maxLen Nothing)
]
showAlter table (_, Add' col) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD COLUMN "
, showColumn col
]
showAlter table (n, Drop) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP COLUMN "
, escapeDBName n
]
showAlter table (n, Default s) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ALTER COLUMN "
, escapeDBName n
, " SET DEFAULT "
, s
]
showAlter table (n, NoDefault) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ALTER COLUMN "
, escapeDBName n
, " DROP DEFAULT"
]
showAlter table (n, Update' s) =
concat
[ "UPDATE "
, escapeDBName table
, " SET "
, escapeDBName n
, "="
, s
, " WHERE "
, escapeDBName n
, " IS NULL"
]
showAlter table (_, AddReference reftable fkeyname t2 id2) = concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD CONSTRAINT "
, escapeDBName fkeyname
, " FOREIGN KEY("
, intercalate "," $ map escapeDBName t2
, ") REFERENCES "
, escapeDBName reftable
, "("
, intercalate "," $ map escapeDBName id2
, ")"
]
showAlter table (_, DropReference cname) = concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP FOREIGN KEY "
, escapeDBName cname
]
refName :: DBName -> DBName -> DBName
refName (DBName table) (DBName column) =
DBName $ T.concat [table, "_", column, "_fkey"]
----------------------------------------------------------------------
-- | Escape a database name to be included on a query.
escapeDBName :: DBName -> String
escapeDBName (DBName s) = '`' : go (T.unpack s)
where
go ('`':xs) = '`' : '`' : go xs
go ( x :xs) = x : go xs
go "" = "`"
-- | Information required to connect to a MySQL database
-- using @persistent@'s generic facilities. These values are the
-- same that are given to 'withMySQLPool'.
data MySQLConf = MySQLConf
{ myConnInfo :: MySQL.ConnectInfo
-- ^ The connection information.
, myPoolSize :: Int
-- ^ How many connections should be held on the connection pool.
} deriving Show
instance FromJSON MySQLConf where
parseJSON v = modifyFailure ("Persistent: error loading MySQL conf: " ++) $
flip (withObject "MySQLConf") v $ \o -> do
database <- o .: "database"
host <- o .: "host"
port <- o .: "port"
path <- o .:? "path"
user <- o .: "user"
password <- o .: "password"
pool <- o .: "poolsize"
let ci = MySQL.defaultConnectInfo
{ MySQL.connectHost = host
, MySQL.connectPort = port
, MySQL.connectPath = case path of
Just p -> p
Nothing -> MySQL.connectPath MySQL.defaultConnectInfo
, MySQL.connectUser = user
, MySQL.connectPassword = password
, MySQL.connectDatabase = database
}
return $ MySQLConf ci pool
instance PersistConfig MySQLConf where
type PersistConfigBackend MySQLConf = SqlPersistT
type PersistConfigPool MySQLConf = ConnectionPool
createPoolConfig (MySQLConf cs size) = runNoLoggingT $ createMySQLPool cs size -- FIXME
runPool _ = runSqlPool
loadConfig = parseJSON
applyEnv conf = do
env <- getEnvironment
let maybeEnv old var = maybe old id $ lookup ("MYSQL_" ++ var) env
return conf
{ myConnInfo =
case myConnInfo conf of
MySQL.ConnectInfo
{ MySQL.connectHost = host
, MySQL.connectPort = port
, MySQL.connectPath = path
, MySQL.connectUser = user
, MySQL.connectPassword = password
, MySQL.connectDatabase = database
} -> (myConnInfo conf)
{ MySQL.connectHost = maybeEnv host "HOST"
, MySQL.connectPort = read $ maybeEnv (show port) "PORT"
, MySQL.connectPath = maybeEnv path "PATH"
, MySQL.connectUser = maybeEnv user "USER"
, MySQL.connectPassword = maybeEnv password "PASSWORD"
, MySQL.connectDatabase = maybeEnv database "DATABASE"
}
}
mockMigrate :: MySQL.ConnectInfo
-> [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
mockMigrate _connectInfo allDefs _getter val = do
let name = entityDB val
let (newcols, udefs, fdefs) = mkColumns allDefs val
let udspair = map udToPair udefs
case () of
-- Nothing found, create everything
() -> do
let uniques = flip concatMap udspair $ \(uname, ucols) ->
[ AlterTable name $
AddUniqueConstraint uname $
map (findTypeAndMaxLen name) ucols ]
let foreigns = do
Column { cName=cname, cReference=Just (refTblName, _a) } <- newcols
return $ AlterColumn name (refTblName, addReference allDefs (refName name cname) refTblName cname)
let foreignsAlt = map (\fdef -> let (childfields, parentfields) = unzip (map (\((_,b),(_,d)) -> (b,d)) (foreignFields fdef))
in AlterColumn name (foreignRefTableDBName fdef, AddReference (foreignRefTableDBName fdef) (foreignConstraintNameDBName fdef) childfields parentfields)) fdefs
return $ Right $ map showAlterDb $ (addTable newcols val): uniques ++ foreigns ++ foreignsAlt
{- FIXME redundant, why is this here? The whole case expression is weird
-- No errors and something found, migrate
(_, _, ([], old')) -> do
let excludeForeignKeys (xs,ys) = (map (\c -> case cReference c of
Just (_,fk) -> case find (\f -> fk == foreignConstraintNameDBName f) fdefs of
Just _ -> c { cReference = Nothing }
Nothing -> c
Nothing -> c) xs,ys)
(acs, ats) = getAlters allDefs name (newcols, udspair) $ excludeForeignKeys $ partitionEithers old'
acs' = map (AlterColumn name) acs
ats' = map (AlterTable name) ats
return $ Right $ map showAlterDb $ acs' ++ ats'
-- Errors
(_, _, (errs, _)) -> return $ Left errs
-}
where
findTypeAndMaxLen tblName col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
-- | Mock a migration even when the database is not present.
-- This function will mock the migration for a database even when
-- the actual database isn't already present in the system.
mockMigration :: Migration -> IO ()
mockMigration mig = do
smap <- newIORef $ Map.empty
let sqlbackend = SqlBackend { connPrepare = \_ -> do
return Statement
{ stmtFinalize = return ()
, stmtReset = return ()
, stmtExecute = undefined
, stmtQuery = \_ -> return $ return ()
},
connInsertManySql = Nothing,
connInsertSql = undefined,
connStmtMap = smap,
connClose = undefined,
connMigrateSql = mockMigrate undefined,
connBegin = undefined,
connCommit = undefined,
connRollback = undefined,
connEscapeName = undefined,
connNoLimit = undefined,
connRDBMS = undefined,
connLimitOffset = undefined,
connLogFunc = undefined,
connUpsertSql = undefined,
connMaxParams = Nothing}
result = runReaderT . runWriterT . runWriterT $ mig
resp <- result sqlbackend
mapM_ T.putStrLn $ map snd $ snd resp
-- | MySQL specific 'upsert'. This will prevent multiple queries, when one will
-- do.
insertOnDuplicateKeyUpdate
:: ( backend ~ PersistEntityBackend record
, PersistEntity record
, MonadIO m
, PersistStore backend
, BackendCompatible SqlBackend backend
)
=> record
-> [Update record]
-> ReaderT backend m ()
insertOnDuplicateKeyUpdate record =
insertManyOnDuplicateKeyUpdate [record] []
-- | This type is used to determine how to update rows using MySQL's
-- @INSERT ON DUPLICATE KEY UPDATE@ functionality, exposed via
-- 'insertManyOnDuplicateKeyUpdate' in the library.
--
-- @since 2.6.2
data SomeField record where
-- | Copy the field directly from the record.
SomeField :: EntityField record typ -> SomeField record
-- | Only copy the field if it is not equal to the provided value.
CopyUnlessEq :: PersistField typ => EntityField record typ -> typ -> SomeField record
-- | Copy the field into the database only if the value in the
-- corresponding record is non-@NULL@.
--
-- @since 2.6.2
copyUnlessNull :: PersistField typ => EntityField record (Maybe typ) -> SomeField record
copyUnlessNull field = CopyUnlessEq field Nothing
-- | Copy the field into the database only if the value in the
-- corresponding record is non-empty, where "empty" means the Monoid
-- definition for 'mempty'. Useful for 'Text', 'String', 'ByteString', etc.
--
-- The resulting 'SomeField' type is useful for the
-- 'insertManyOnDuplicateKeyUpdate' function.
--
-- @since 2.6.2
copyUnlessEmpty :: (Monoid.Monoid typ, PersistField typ) => EntityField record typ -> SomeField record
copyUnlessEmpty field = CopyUnlessEq field Monoid.mempty
-- | Copy the field into the database only if the field is not equal to the
-- provided value. This is useful to avoid copying weird nullary data into
-- the database.
--
-- The resulting 'SomeField' type is useful for the
-- 'insertManyOnDuplicateKeyUpdate' function.
--
-- @since 2.6.2
copyUnlessEq :: PersistField typ => EntityField record typ -> typ -> SomeField record
copyUnlessEq = CopyUnlessEq
-- | Do a bulk insert on the given records in the first parameter. In the event
-- that a key conflicts with a record currently in the database, the second and
-- third parameters determine what will happen.
--
-- The second parameter is a list of fields to copy from the original value.
-- This allows you to specify which fields to copy from the record you're trying
-- to insert into the database to the preexisting row.
--
-- The third parameter is a list of updates to perform that are independent of
-- the value that is provided. You can use this to increment a counter value.
-- These updates only occur if the original record is present in the database.
--
-- === __More details on 'SomeField' usage__
--
-- The @['SomeField']@ parameter allows you to specify which fields (and
-- under which conditions) will be copied from the inserted rows. For
-- a brief example, consider the following data model and existing data set:
--
-- @
-- Item
-- name Text
-- description Text
-- price Double Maybe
-- quantity Int Maybe
--
-- Primary name
-- @
--
-- > items:
-- > +------+-------------+-------+----------+
-- > | name | description | price | quantity |
-- > +------+-------------+-------+----------+
-- > | foo | very good | | 3 |
-- > | bar | | 3.99 | |
-- > +------+-------------+-------+----------+
--
-- This record type has a single natural key on @itemName@. Let's suppose
-- that we download a CSV of new items to store into the database. Here's
-- our CSV:
--
-- > name,description,price,quantity
-- > foo,,2.50,6
-- > bar,even better,,5
-- > yes,wow,,
--
-- We parse that into a list of Haskell records:
--
-- @
-- records =
-- [ Item { itemName = "foo", itemDescription = ""
-- , itemPrice = Just 2.50, itemQuantity = Just 6
-- }
-- , Item "bar" "even better" Nothing (Just 5)
-- , Item "yes" "wow" Nothing Nothing
-- ]
-- @
--
-- The new CSV data is partial. It only includes __updates__ from the
-- upstream vendor. Our CSV library parses the missing description field as
-- an empty string. We don't want to override the existing description. So
-- we can use the 'copyUnlessEmpty' function to say: "Don't update when the
-- value is empty."
--
-- Likewise, the new row for @bar@ includes a quantity, but no price. We do
-- not want to overwrite the existing price in the database with a @NULL@
-- value. So we can use 'copyUnlessNull' to only copy the existing values
-- in.
--
-- The final code looks like this:
-- @
-- 'insertManyOnDuplicateKeyUpdate' records
-- [ 'copyUnlessEmpty' ItemDescription
-- , 'copyUnlessNull' ItemPrice
-- , 'copyUnlessNull' ItemQuantity
-- ]
-- []
-- @
--
-- Once we run that code on the datahase, the new data set looks like this:
--
-- > items:
-- > +------+-------------+-------+----------+
-- > | name | description | price | quantity |
-- > +------+-------------+-------+----------+
-- > | foo | very good | 2.50 | 6 |
-- > | bar | even better | 3.99 | 5 |
-- > | yes | wow | | |
-- > +------+-------------+-------+----------+
insertManyOnDuplicateKeyUpdate
:: forall record backend m.
( backend ~ PersistEntityBackend record
, BackendCompatible SqlBackend backend
, PersistEntity record
, MonadIO m
)
=> [record] -- ^ A list of the records you want to insert, or update
-> [SomeField record] -- ^ A list of the fields you want to copy over.
-> [Update record] -- ^ A list of the updates to apply that aren't dependent on the record being inserted.
-> ReaderT backend m ()
insertManyOnDuplicateKeyUpdate [] _ _ = return ()
insertManyOnDuplicateKeyUpdate records fieldValues updates =
withReaderT projectBackend
. uncurry rawExecute
$ mkBulkInsertQuery records fieldValues updates
-- | This creates the query for 'bulkInsertOnDuplicateKeyUpdate'. If you
-- provide an empty list of updates to perform, then it will generate
-- a dummy/no-op update using the first field of the record. This avoids
-- duplicate key exceptions.
mkBulkInsertQuery
:: PersistEntity record
=> [record] -- ^ A list of the records you want to insert, or update
-> [SomeField record] -- ^ A list of the fields you want to copy over.
-> [Update record] -- ^ A list of the updates to apply that aren't dependent on the record being inserted.
-> (Text, [PersistValue])
mkBulkInsertQuery records fieldValues updates =
(q, recordValues <> updsValues <> copyUnlessValues)
where
mfieldDef x = case x of
SomeField rec -> Right (fieldDbToText (persistFieldDef rec))
CopyUnlessEq rec val -> Left (fieldDbToText (persistFieldDef rec), toPersistValue val)
(fieldsToMaybeCopy, updateFieldNames) = partitionEithers $ map mfieldDef fieldValues
fieldDbToText = T.pack . escapeDBName . fieldDB
entityDef' = entityDef records
firstField = case entityFieldNames of
[] -> error "The entity you're trying to insert does not have any fields."
(field:_) -> field
entityFieldNames = map fieldDbToText (entityFields entityDef')
tableName = T.pack . escapeDBName . entityDB $ entityDef'
copyUnlessValues = map snd fieldsToMaybeCopy
recordValues = concatMap (map toPersistValue . toPersistFields) records
recordPlaceholders = commaSeparated $ map (parenWrapped . commaSeparated . map (const "?") . toPersistFields) records
mkCondFieldSet n _ = T.concat
[ n
, "=COALESCE("
, "NULLIF("
, "VALUES(", n, "),"
, "?"
, "),"
, n
, ")"
]
condFieldSets = map (uncurry mkCondFieldSet) fieldsToMaybeCopy
fieldSets = map (\n -> T.concat [n, "=VALUES(", n, ")"]) updateFieldNames
upds = map mkUpdateText updates
updsValues = map (\(Update _ val _) -> toPersistValue val) updates
updateText = case fieldSets <> upds <> condFieldSets of
[] -> T.concat [firstField, "=", firstField]
xs -> commaSeparated xs
q = T.concat
[ "INSERT INTO "
, tableName
, " ("
, commaSeparated entityFieldNames
, ") "
, " VALUES "
, recordPlaceholders
, " ON DUPLICATE KEY UPDATE "
, updateText
]
-- | Vendored from @persistent@.
mkUpdateText :: PersistEntity record => Update record -> Text
mkUpdateText x =
case updateUpdate x of
Assign -> n <> "=?"
Add -> T.concat [n, "=", n, "+?"]
Subtract -> T.concat [n, "=", n, "-?"]
Multiply -> T.concat [n, "=", n, "*?"]
Divide -> T.concat [n, "=", n, "/?"]
BackendSpecificUpdate up ->
error . T.unpack $ "BackendSpecificUpdate " <> up <> " not supported"
where
n = T.pack . escapeDBName . fieldDB . updateFieldDef $ x
commaSeparated :: [Text] -> Text
commaSeparated = T.intercalate ", "
parenWrapped :: Text -> Text
parenWrapped t = T.concat ["(", t, ")"]
-- | Gets the 'FieldDef' for an 'Update'. Vendored from @persistent@.
updateFieldDef :: PersistEntity v => Update v -> FieldDef
updateFieldDef (Update f _ _) = persistFieldDef f
updateFieldDef BackendUpdate {} = error "updateFieldDef did not expect BackendUpdate"
| psibi/persistent | persistent-mysql/Database/Persist/MySQL.hs | mit | 52,629 | 29 | 33 | 17,059 | 11,446 | 6,058 | 5,388 | -1 | -1 |
-- |Here we have the representation of variables inside Zeno; see 'ZVarClass' for the
-- different types of variable we can have.
module Zeno.Var (
ZVar (..), ZVarClass (..), HasSources (..),
ZDataType, ZType, ZExpr, ZTerm, ZAlt,
ZBinding, ZBindings, ZClause, ZExprSubstitution,
ZEquality, ZHypothesis, ZQuantified, ZProgram,
CriticalPath, CriticalTerm,
substituteTakingSources,
defaultVarClass, isConstructorVar, isConstructorTerm,
isUniversalVar, isDefinedVar,
universalVariables, freeUniversalVariables,
freshVariable,
) where
import Prelude ()
import Zeno.Prelude
import Zeno.DataType
import Zeno.Type
import Zeno.Id
import Zeno.Expression
import Zeno.Clause
import Zeno.Utils
import Zeno.Program
import Zeno.Unification
import Zeno.Traversing
import qualified Data.Map as Map
type ZProgram = Program ZVar
type ZDataType = DataType ZVar
type ZType = Type ZDataType
type ZExpr = Expr ZVar
type ZTerm = Term ZVar
type ZAlt = Alt ZVar
type ZBindings = Bindings ZVar
type ZBinding = Binding ZVar
type ZClause = Clause ZVar
type ZEquality = Equality ZVar
type ZQuantified = Quantified Clause ZVar
type ZHypothesis = Hypothesis ZVar
type ZExprSubstitution = ExprSubstitution ZVar
data ZVar
= ZVar { varId :: !Id,
varName :: !(Maybe String),
-- |The variable's 'Type'. This is non-strict so that we can tie the
-- knot for "variables have types which are made of data-types which
-- have constructors which are variables".
varType :: ZType,
varClass :: !ZVarClass }
instance Eq ZVar where
(==) = (==) `on` varId
instance Ord ZVar where
compare = compare `on` varId
instance Show ZVar where
show var = case varName var of
Just name ->
let name' = stripModuleName name
in if "$c" `isPrefixOf` name'
then drop 2 name'
else name'
Nothing -> "_" ++ show (varId var)
where
srs = case allSources var of
[] -> ""
srs -> "{" ++ (intercalate "," . map show) srs ++ "}"
-- |The different /classes/ of variable within Zeno.
data ZVarClass
-- |A 'UniversalVar' is one used in theorem proving
-- and hence is under universal quantification.
= UniversalVar { varSources :: ![CriticalPath] }
| ConstructorVar { isRecursiveConstructor :: !Bool }
-- |A variable defined with a 'Let'.
| DefinedVar {
-- |The definition of a variable will be 'Nothing' until its 'Let'
-- binding has been evaluated. Hence, top-level functions
-- should not have a 'Nothing' definition.
varDefinition :: !(Maybe ZExpr),
-- |Whether this was defined with recursive 'Let' 'Bindings'.
isRecursiveDefinition :: !Bool }
deriving ( Eq )
type CriticalPath = [Id]
type CriticalTerm = (ZTerm, CriticalPath)
instance Typed ZVar where
type TypeVar ZVar = ZDataType
getType = varType
updateType sub var = var
{ varType = updateType sub (varType var) }
instance Typed ZExpr where
type TypeVar ZExpr = ZDataType
updateType sub = fmap (updateType sub)
getType Err = PolyVarType reservedId
getType (Var v) = getType v
getType (Let _ rhs) = getType rhs
getType (Cse _ _ alts) = getType . altExpr . head $ alts
getType (Lam var rhs) = FunType (getType var) (getType rhs)
getType expr@(App lhs rhs) = updateType type_sub lhs_res
where
FunType lhs_arg lhs_res = snd (flattenForAllType (getType lhs))
type_sub = case unify lhs_arg (getType rhs) of
Unifier sub -> sub
NoUnifier -> error
$ "Could not unify types in term application."
++ "\n" ++ showTyped lhs
++ "\nand"
++ "\n" ++ showTyped rhs
defaultVarClass :: ZVarClass
defaultVarClass = UniversalVar []
isConstructorVar :: ZVar -> Bool
isConstructorVar (varClass -> ConstructorVar {}) = True
isConstructorVar _ = False
isConstructorTerm :: ZExpr -> Bool
isConstructorTerm =
fromMaybe False . fmap isConstructorVar . termFunction
isUniversalVar :: ZVar -> Bool
isUniversalVar (varClass -> UniversalVar {}) = True
isUniversalVar _ = False
isDefinedVar :: ZVar -> Bool
isDefinedVar (varClass -> DefinedVar {}) = True
isDefinedVar _ = False
freshVariable :: (Monad m, IdCounter s) => ZVar -> StateT s m ZVar
freshVariable (ZVar id _ typ cls) = do
new_id <- newIdS
return (ZVar new_id Nothing typ cls)
universalVariables :: Foldable f => f ZVar -> [ZVar]
universalVariables = nubOrd . filter isUniversalVar . toList
freeUniversalVariables :: (WithinTraversable ZExpr (f ZVar), Foldable f) =>
f ZVar -> [ZVar]
freeUniversalVariables = filter isUniversalVar . freeVariables
class HasSources a where
allSources :: a -> [CriticalPath]
addSources :: [CriticalPath] -> a -> a
clearSources :: a -> a
instance HasSources ZVar where
allSources (varClass -> UniversalVar srs) = srs
allSources _ = []
addSources more var@(varClass -> UniversalVar existing) =
var { varClass = UniversalVar (more ++ existing) }
addSources _ var = var
clearSources var@(varClass -> UniversalVar _) =
var { varClass = UniversalVar [] }
clearSources var = var
instance (Foldable f, Functor f) => HasSources (f ZVar) where
{-# SPECIALISE instance HasSources ZExpr #-}
allSources = concatMap allSources . nubOrd . toList
addSources srs = fmap (addSources srs)
clearSources = fmap clearSources
substituteTakingSources :: (Ord a, WithinTraversable a f, HasSources a) =>
Substitution a a -> f -> f
{-# SPECIALISE substituteTakingSources ::
ZExprSubstitution -> ZExpr -> ZExpr #-}
substituteTakingSources sub = mapWithin $ \from ->
case Map.lookup from sub of
Nothing -> from
Just to -> addSources (allSources from) to
| trenta3/zeno-0.2.0.1 | src/Zeno/Var.hs | mit | 5,924 | 0 | 16 | 1,466 | 1,571 | 844 | 727 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Eval where
import Types
import Data.Maybe
import Prelude hiding (lookup)
import Data.Map
import Control.Exception
eval :: Expression -> Env -> IO (Expression, Env)
eval (Identifier x) env = isValidLookup (lookup x env) x env
eval (Number x) env = return (Number x, env)
eval (Fn f) env = return (Fn f, env)
eval Unit env = return (Unit, env)
eval (Quote exp) env = evalQuote exp env
eval (SList (h:t)) env = callFn env h t
eval (LazySeq head rest lenv) env = do
res <- (realize (LazySeq head rest lenv))
eval res env
eval (Thunk exp env) oenv = realizeThunks (Thunk exp env) oenv
evalBody :: [Expression] -> Env -> IO (Expression, Env)
evalBody (h:[]) env = return ((Thunk h env), env)
evalBody (h:t) env = do
(res,nenv) <- eval h env
evalBody t nenv
evalVarArgs :: [Expression] -> Env -> IO [Expression]
evalVarArgs [] env = return []
evalVarArgs (h:t) env = do
(res, nenv) <- eval h env
tres <- evalVarArgs t nenv
return (res:tres)
evalQuote :: Expression -> Env -> IO (Expression, Env)
evalQuote (SList (h:t)) env = do
(res, env) <- evalUnquote h env
((SList tres), env) <- evalQuote (SList t) env
return ((SList (res:tres)),env)
evalQuote exp env = return (exp, env)
evalUnquote :: Expression -> Env -> IO (Expression, Env)
evalUnquote (Unquote exp) env = eval exp env
evalUnquote exp env = evalQuote exp env
isValidLookup :: Maybe Expression -> String -> Env -> IO (Expression, Env)
isValidLookup Nothing x env = do putStrLn (show env)
throwIO (CodeError ("var lookup failed for: " ++ x))
isValidLookup (Just (LazyVar a exp)) _ env = do
(res, _) <- eval exp env
return (res, (insert a res env))
isValidLookup (Just val) _ env = return (val, env)
callFn :: Env -> Expression -> [Expression] -> IO (Expression, Env)
callFn env ident t = do
i <- isIdentifier ident
(v, e) <- eval i env
f <- isFn v
(f t env)
isFn :: Expression -> IO ([Expression] -> Env -> IO (Expression, Env))
isFn (Fn f) = return f
isFn _ = throwIO (CodeError "invalid function call")
isIdentifier :: Expression -> IO (Expression)
isIdentifier (Identifier i) = return (Identifier i)
isIdentifier _ = throwIO (CodeError "expected an identifer")
evalFnArgs :: [Expression] -> Env -> IO ([Expression])
evalFnArgs [] env = return []
evalFnArgs (h:t) env = do
(arg, nenv) <- eval h env
(targ,tenv) <- realizeThunks arg nenv
res <- evalFnArgs t tenv
return (targ : res)
realizeThunks :: Expression -> Env -> IO (Expression, Env)
realizeThunks (Thunk exp env) _ = do
(res, _) <- eval exp env
case res of
(Thunk exp env) -> eval (Thunk exp env) env
exp -> return (exp,env)
realizeThunks exp env = return (exp, env)
realize :: Expression -> IO Expression
realize (Thunk exp env) = do
(res, _) <- realizeThunks (Thunk exp env) env
realize res
realize (LazySeq a exp env) = do
(res, _) <- (eval exp env)
t <- (realize res)
case t of
(SList b) -> return (SList (a:b))
_ -> throwIO (CodeError "invalid lazy sequence")
realize (SList a) = return (SList a)
realize exp = return exp
instance Exception LispError
instance Show LispError where
show (CodeError s) = ("Error: " ++ s)
show (ApplicationError exp str) = ("Error: " ++ (show exp) ++ "->" ++ str)
instance Show Expression where
show (Identifier a) = a
show (Number a) = show a
show (Fn b) = "*FN*"
show (Unit) = "*unit*"
show (Boolean b) = (show b)
show (Quote a) = "'" ++ (show a)
show (Unquote a) = "~" ++ (show a)
show (LazyVar i e) = "lazy("++(show e)++")"
show (LazySeq a exp env) = "*LazySeq*"
show (Thunk e env) = "thunk("++(show e)++")"
show (SList (h:t)) = "(" ++
show(h) ++
(Prelude.foldl (\ start exp -> (start ++ " " ++ show(exp))) "" t) ++
")"
show (SList []) = "()"
| ckirkendall/hlisp | Eval.hs | gpl-2.0 | 3,909 | 0 | 15 | 926 | 1,889 | 951 | 938 | 103 | 2 |
{- |
Module : ./SoftFOL/ProveDarwin.hs
Description : Interface to the TPTP theorem prover via Darwin.
Copyright : (c) Heng Jiang, Uni Bremen 2004-2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : needs POSIX
Interface for the Darwin service, uses GUI.GenericATP.
See <http://spass.mpi-sb.mpg.de/> for details on SPASS, and
<http://combination.cs.uiowa.edu/Darwin/> for Darwin.
-}
module SoftFOL.ProveDarwin
( darwinProver
, darwinCMDLautomaticBatch
, darwinConsChecker
, ProverBinary (..)
, darwinExe
, tptpProvers
) where
-- preliminary hacks for display of CASL models
import Logic.Prover
import SoftFOL.Sign
import SoftFOL.Translate
import SoftFOL.ProverState
import SoftFOL.EProver
import Common.AS_Annotation as AS_Anno
import qualified Common.Result as Result
import Common.ProofTree
import Common.ProverTools
import Common.SZSOntology
import Common.Utils
import Data.Char (isDigit)
import Data.List
import Data.Maybe
import Data.Time (timeToTimeOfDay)
import Data.Time.Clock (secondsToDiffTime)
import Control.Monad (when)
import qualified Control.Concurrent as Concurrent
import System.Directory
import System.Process (waitForProcess, runInteractiveCommand,
runProcess, terminateProcess)
import System.IO (hGetContents, openFile, hClose, IOMode (WriteMode))
import Control.Exception as Exception
import GUI.GenericATP
import Interfaces.GenericATPState
import Proofs.BatchProcessing
import qualified Data.Map as Map
import qualified Data.Set as Set
-- * Prover implementation
data ProverBinary = Darwin | DarwinFD | EDarwin | EProver | Leo | IProver
deriving (Enum, Bounded)
tptpProvers :: [ProverBinary]
tptpProvers = [minBound .. maxBound]
proverBinary :: ProverBinary -> String
proverBinary b = darwinExe b ++
case b of
Darwin -> "-non-fd"
_ -> ""
darwinExe :: ProverBinary -> String
darwinExe b = case b of
Darwin -> "darwin"
DarwinFD -> "darwin"
EDarwin -> "e-darwin"
EProver -> "eprover"
Leo -> "leo"
IProver -> "iproveropt"
{- | The Prover implementation. First runs the batch prover (with
graphical feedback), then starts the GUI prover. -}
darwinProver
:: ProverBinary -> Prover Sign Sentence SoftFOLMorphism () ProofTree
darwinProver b =
mkAutomaticProver (darwinExe b) (proverBinary b) () (darwinGUI b)
$ darwinCMDLautomaticBatchAux b
darwinConsChecker
:: ProverBinary -> ConsChecker Sign Sentence () SoftFOLMorphism ProofTree
darwinConsChecker b =
(mkUsableConsChecker (darwinExe b) (proverBinary b) () $ consCheck b)
{ ccNeedsTimer = False }
{- |
Record for prover specific functions. This is used by both GUI and command
line interface.
-}
atpFun :: ProverBinary -- ^ the actual binary
-> String -- ^ theory name
-> ATPFunctions Sign Sentence SoftFOLMorphism ProofTree SoftFOLProverState
atpFun b thName = ATPFunctions
{ initialProverState = spassProverState
, atpTransSenName = transSenName
, atpInsertSentence = insertSentenceGen
, goalOutput = showTPTPProblem thName
, proverHelpText = "no help for darwin available"
, batchTimeEnv = "HETS_SPASS_BATCH_TIME_LIMIT"
, fileExtensions = FileExtensions
{ problemOutput = ".tptp"
, proverOutput = ".spass"
, theoryConfiguration = ".spcf" }
, runProver = runDarwin b
, createProverOptions = extraOpts }
-- ** GUI
{- |
Invokes the generic prover GUI. SPASS specific functions are omitted by
data type ATPFunctions.
-}
darwinGUI
:: ProverBinary -- ^ the actual binary
-> String -- ^ theory name
-> Theory Sign Sentence ProofTree
-- ^ theory consisting of a signature and sentences
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO [ProofStatus ProofTree] -- ^ proof status for each goal
darwinGUI b thName th freedefs =
genericATPgui (atpFun b thName) True (proverBinary b) thName th
freedefs emptyProofTree
-- ** command line function
{- |
Implementation of 'Logic.Prover.proveCMDLautomaticBatch' which provides an
automatic command line interface to the Darwin prover.
Darwin specific functions are omitted by data type ATPFunctions.
-}
darwinCMDLautomaticBatch
:: Bool -- ^ True means include proved theorems
-> Bool -- ^ True means save problem file
-> Concurrent.MVar (Result.Result [ProofStatus ProofTree])
-- ^ used to store the result of the batch run
-> String -- ^ theory name
-> TacticScript -- ^ default tactic script
-> Theory Sign Sentence ProofTree
-- ^ theory consisting of a signature and sentences
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO (Concurrent.ThreadId, Concurrent.MVar ())
{- ^ fst: identifier of the batch thread for killing it
snd: MVar to wait for the end of the thread -}
darwinCMDLautomaticBatch = darwinCMDLautomaticBatchAux Darwin
darwinCMDLautomaticBatchAux
:: ProverBinary -- ^ the actual binary
-> Bool -- ^ True means include proved theorems
-> Bool -- ^ True means save problem file
-> Concurrent.MVar (Result.Result [ProofStatus ProofTree])
-- ^ used to store the result of the batch run
-> String -- ^ theory name
-> TacticScript -- ^ default tactic script
-> Theory Sign Sentence ProofTree
-- ^ theory consisting of a signature and sentences
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO (Concurrent.ThreadId, Concurrent.MVar ())
{- ^ fst: identifier of the batch thread for killing it
snd: MVar to wait for the end of the thread -}
darwinCMDLautomaticBatchAux b inclProvedThs saveProblem_batch resultMVar
thName defTS th freedefs =
genericCMDLautomaticBatch (atpFun b thName) inclProvedThs saveProblem_batch
resultMVar (proverBinary b) thName
(parseTacticScript batchTimeLimit [] defTS) th freedefs emptyProofTree
-- * Main prover functions
eproverOpts :: String -> String
eproverOpts verb = "-xAuto -tAuto --tptp3-format " ++ verb
++ " --memory-limit=2048 --soft-cpu-limit="
extras :: ProverBinary -> Bool -> String -> String
extras b cons tl = let
tOut = " -to " ++ tl
darOpt = "-pc false"
fdOpt = darOpt ++ (if cons then " -pmtptp true" else "") ++ " -fd true"
in case b of
EProver -> eproverOpts (if cons then "-s" else "") ++ tl
Leo -> "-t " ++ tl
Darwin -> darOpt ++ tOut
DarwinFD -> fdOpt ++ tOut
EDarwin -> fdOpt ++ " -eq Axioms" ++ tOut
IProver -> "--time_out_real " ++ tl ++ " --sat_mode true"
{- | Runs the Darwin service. The tactic script only contains a string for the
time limit. -}
consCheck
:: ProverBinary
-> String
-> TacticScript
-> TheoryMorphism Sign Sentence SoftFOLMorphism ProofTree
-> [FreeDefMorphism SPTerm SoftFOLMorphism] -- ^ freeness constraints
-> IO (CCStatus ProofTree)
consCheck b thName (TacticScript tl) tm freedefs = case tTarget tm of
Theory sig nSens -> do
let proverStateI = spassProverState sig (toNamedList nSens) freedefs
prob <- showTPTPProblemM thName proverStateI []
(exitCode, out, tUsed) <-
runDarwinProcess (darwinExe b) False (extras b True tl) thName prob
let outState = CCStatus
{ ccResult = Just True
, ccProofTree = ProofTree $ unlines $ exitCode : out
, ccUsedTime = timeToTimeOfDay $ secondsToDiffTime
$ toInteger tUsed }
return $ if szsProved exitCode then outState else
outState
{ ccResult = if szsDisproved exitCode then Just False
else Nothing }
runDarwinProcess
:: String -- ^ binary name
-> Bool -- ^ save problem
-> String -- ^ options
-> String -- ^ filename without extension
-> String -- ^ problem
-> IO (String, [String], Int)
runDarwinProcess bin saveTPTP options tmpFileName prob = do
let tmpFile = basename tmpFileName ++ ".tptp"
when saveTPTP (writeFile tmpFile prob)
noProg <- missingExecutableInPath bin
if noProg then
return (bin ++ " not found. Check your $PATH", [], -1)
else do
timeTmpFile <- getTempFile prob tmpFile
(_, pout, perr) <-
executeProcess bin (words options ++ [timeTmpFile]) ""
let l = lines $ pout ++ perr
(res, _, tUsed) = parseOutput l
removeFile timeTmpFile
return (res, l, tUsed)
mkGraph :: String -> IO ()
mkGraph f = do
(_, cat, _) <- executeProcess "cat" [f] ""
(_, tac, _) <- executeProcess "tac" [f] ""
let ((p_set, (cs, axs)), res) =
processProof (zipF proofInfo $ zipF conjectures axioms)
(Set.empty, ([], [])) $ lines tac
(aliases, _) = processProof alias Map.empty $ lines cat
same_rank = intercalate "; " $ map (\ (_, n) -> 'v' : n) $
filter (\ (_, n) -> Set.member n p_set
&& isNothing (Map.lookup n aliases)) $ cs ++ axs
case res of
Just s -> putStrLn s
_ -> return ()
writeFile "/tmp/graph.dot" $ unlines ["digraph {",
"subgraph { rank = same; " ++ same_rank ++ " }",
(\ (_, _, d, _) -> d) . fst $ processProof (digraph p_set aliases)
(Set.empty, Set.empty, "", Map.empty) $ lines cat, "}"]
runEProverBuffered
:: Bool -- ^ save problem
-> Bool
-> Bool
-> String -- ^ options
-> String -- ^ filename without extension
-> String -- ^ problem
-> IO (String, [String], Int)
runEProverBuffered saveTPTP graph fullgraph options tmpFileName prob = do
s <- supportsProofObject
let tmpFile = basename tmpFileName ++ ".tptp"
useProofObject = s && not fullgraph
bin = if useProofObject then "eprover"
else "eproof"
noProg <- missingExecutableInPath bin
when saveTPTP (writeFile tmpFile prob)
if noProg then return (bin ++ " not found. Check your $PATH", [], -1)
else do
(err, out) <-
do
timeTmpFile <- getTempFile prob tmpFile
grep <- executableWithDefault "ggrep" "grep"
(_, out, err, _) <-
if graph || fullgraph || not s then do
bufferFile <- getTempFile "" "eprover-proof-buffer"
buff <- openFile bufferFile WriteMode
h <- runProcess bin (words options ++
["--proof-object" | useProofObject] ++ [timeTmpFile])
Nothing Nothing Nothing (Just buff) (Just buff)
(waitForProcess h >> removeFile timeTmpFile)
`Exception.catch` (\ ThreadKilled -> terminateProcess h)
hClose buff
mkGraph bufferFile
runInteractiveCommand $ unwords [grep, "-e", "axiom",
"-e", "SZS", bufferFile, "&&", "rm", "-f", bufferFile]
else runInteractiveCommand $ unwords
[bin, "--proof-object", options, timeTmpFile,
"|", grep, "-e", "axiom", "-e", "SZS",
"&&", "rm", timeTmpFile]
return (out, err)
perr <- hGetContents err
pout <- hGetContents out
let l = lines $ perr ++ pout
(res, _, tUsed) = parseOutput l
return (res, l, tUsed)
runDarwin
:: ProverBinary
-> SoftFOLProverState
{- ^ logical part containing the input Sign and axioms and possibly
goals that have been proved earlier as additional axioms -}
-> GenericConfig ProofTree -- ^ configuration to use
-> Bool -- ^ True means save TPTP file
-> String -- ^ name of the theory in the DevGraph
-> AS_Anno.Named SPTerm -- ^ goal to prove
-> IO (ATPRetval, GenericConfig ProofTree)
-- ^ (retval, configuration with proof status and complete output)
runDarwin b sps cfg saveTPTP thName nGoal = do
let bin = darwinExe b
(options, graph, fullgraph) = case b of
EProver ->
let w = extraOpts cfg
in (filter (not . (\ e -> elem e ["--graph", "--full-graph"])) w,
elem "--graph" w, elem "--full-graph" w)
_ -> (extraOpts cfg, False, False)
tl = maybe "10" show $ timeLimit cfg
extraOptions = extras b False tl
tmpFileName = thName ++ '_' : AS_Anno.senAttr nGoal
prob <- showTPTPProblem thName sps nGoal
$ options ++ ["Requested prover: " ++ bin]
(exitCode, out, tUsed) <- case b of
EProver -> runEProverBuffered saveTPTP graph fullgraph
extraOptions tmpFileName prob
_ -> runDarwinProcess bin saveTPTP extraOptions tmpFileName prob
axs <- case b of
EProver | szsProved exitCode ||
szsDisproved exitCode ->
case processProof axioms [] out of
(l, Nothing) -> return $ map fst l
(_, Just err) -> do
putStrLn err
return $ getAxioms sps
_ -> return $ getAxioms sps
let ctime = timeToTimeOfDay $ secondsToDiffTime $ toInteger tUsed
(err, retval) = case () of
_ | szsProved exitCode -> (ATPSuccess, provedStatus)
_ | szsDisproved exitCode -> (ATPSuccess, disProvedStatus)
_ | szsTimeout exitCode ->
(ATPTLimitExceeded, defaultProofStatus)
_ | szsStopped exitCode ->
(ATPBatchStopped, defaultProofStatus)
_ -> (ATPError exitCode, defaultProofStatus)
defaultProofStatus =
(openProofStatus
(AS_Anno.senAttr nGoal) bin emptyProofTree)
{ usedTime = ctime
, tacticScript = TacticScript $ show ATPTacticScript
{tsTimeLimit = configTimeLimit cfg,
tsExtraOpts = options} }
disProvedStatus = defaultProofStatus {goalStatus = Disproved}
provedStatus = defaultProofStatus
{ goalName = AS_Anno.senAttr nGoal
, goalStatus = Proved True
, usedAxioms = axs
, usedProver = bin
, usedTime = timeToTimeOfDay $ secondsToDiffTime $ toInteger tUsed
}
return (err, cfg {proofStatus = retval,
resultOutput = case b of
EProver -> reverse out
_ -> out,
timeUsed = ctime })
getSZSStatusWord :: String -> Maybe String
getSZSStatusWord line = case words $ fromMaybe ""
$ stripPrefix "SZS status" $ dropWhile (`elem` "%# ") line of
[] -> Nothing
w : _ -> Just w
parseOutput :: [String] -> (String, Bool, Int)
-- ^ (exit code, status found, used time)
parseOutput = foldl' checkLine ("could not determine SZS status", False, -1)
where checkLine (exCode, stateFound, to) line =
if isPrefixOf "Couldn't find eprover" line
|| isInfixOf "darwin -h for further information" line
-- error by running darwin.
then (line, stateFound, to)
else case getSZSStatusWord line of
Just szsState | not stateFound ->
(szsState, True, to)
_ -> if "CPU Time" `isPrefixOf` line -- get cpu time
then let time = case takeWhile isDigit $ last (words line) of
ds@(_ : _) -> read ds
_ -> to
in (exCode, stateFound, time)
else (exCode, stateFound, to)
| gnn/Hets | SoftFOL/ProveDarwin.hs | gpl-2.0 | 15,270 | 0 | 23 | 4,089 | 3,681 | 1,953 | 1,728 | 315 | 10 |
fmap f (alpha h) = alpha (f . h) | hmemcpy/milewski-ctfp-pdf | src/content/2.4/code/haskell/snippet08.hs | gpl-3.0 | 32 | 1 | 7 | 8 | 28 | 13 | 15 | -1 | -1 |
module Main where
import Sara.Parser.ParserTest
import Sara.Ast.OperatorsTest
import Sara.Semantic.TypeCheckerTest
import Sara.CompilerTest
import Sara.RegressionTest
import Test.Framework
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests = [ regressionGroup
, typeCheckerGroup
, parserGroup
, compilerGroup
, operatorsGroup ]
| Lykos/Sara | tests/Tests.hs | gpl-3.0 | 374 | 0 | 6 | 75 | 83 | 51 | 32 | 15 | 1 |
module Interface.Torrent.List (torrentList) where
import HTorrentPrelude
import Interface.Torrent.Behavior
import Interface.Torrent.Entry
import qualified Graphics.UI.Threepenny as UI
import Graphics.UI.Threepenny.Core
import Reactive.Threepenny
torrentList :: [TorrentBehavior] -> UI (Element, Behavior (Maybe TorrentBehavior))
torrentList torrents = do
(elements, events) <- (map fst &&& map snd) <$> mapM mkEvent torrents
let clicked = foldr (UI.unionWith const) never events
tableElement <- UI.table #+ (torrentListHeader : (UI.element <$> elements))
focusB <- stepper Nothing (Just <$> clicked)
return (tableElement, focusB)
where mkEvent b = do
e <- torrentEntry b
return (e, b <$ UI.click e)
torrentListHeader :: UI Element
torrentListHeader = UI.tr #+ [nameH, progressH]
where nameH = UI.set text "Torrent Name" UI.th
progressH = UI.set text "Progress" UI.th
| ian-mi/hTorrent | Interface/Torrent/List.hs | gpl-3.0 | 947 | 0 | 13 | 190 | 305 | 161 | 144 | 21 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, OverloadedStrings, Arrows #-}
module Types (
ColValue(..)
, Row
, mshow
, ToString(..)
) where
import Data.List
import Data.String.ToString
import Prelude hiding (id)
import Data.Monoid
import CsvParser
writeHeader::Row -> String
writeHeader r = "<tr>" ++ concatMap (\f ->"<th>" ++ fst f ++ "</th>") r ++ "</tr>"
data ColValue = ColS [String] | ColN [Maybe Double] deriving (Eq, Ord)
tailColV :: ColValue -> ColValue
tailColV (ColS ss) = ColS $ tail ss
tailColV (ColN ns) = ColN $ tail ns
mshow::Show a => Maybe a -> String
mshow (Just v) = show v
mshow Nothing = ""
instance Semigroup (ColValue) where
l1 <> l2 = case (l1, l2) of
(ColN ln1, ColN ln2) -> ColN (ln1 ++ ln2)
(ColS ls1, ColS ls2) -> ColS (ls1 ++ ls2)
(ColS ls1, ColN ln2) -> ColS (ls1 ++ (map mshow ln2))
(ColN ln1, ColS ls2) -> ColS ((map mshow ln1) ++ ls2)
instance Monoid (ColValue) where
mappend = (<>)
mempty = ColN []
instance Show (ColValue) where
show (ColN ln) = concatMap (\md-> "<number>" ++ mshow md ++ "</number>" ) ln
show (ColS ls) = concatMap (\ms-> "<string>" ++ ms ++ "</string>" ) ls
instance ToString ColValue where
toString (ColN ln) = concatMap (\md-> "<number>" ++ mshow md ++ "</number>" ) ln
toString (ColS ls) = concatMap (\ms-> "<string>" ++ ms ++ "</string>" ) ls
instance ToString [String] where
toString xs = intercalate ", " xs
instance ToString [Maybe Double] where
toString xs = intercalate ", " $ fmap toString xs
instance ToString Double where
toString = show
instance (ToString a) => ToString (Maybe a) where
toString Nothing = ""
toString (Just x) = toString x
type Db = [Row]
type Row = [Cell]
type Cell = (String, Value Double) -- key, value
type Col = (String, ColValue) -- ie. all the same field - we need to make them all doubles or all strings...
type Tdb = [Col]
-- Writes a Db as a html table
{--
instance Show Db where
show [] = "<table></table>"
show db = "<table>" ++ (writeHeader (head db)) ++ (concatMap rshow db) ++ "</table>"
--}
dshow:: Db-> String
dshow [] = "<table></table>"
dshow db = "<table>" ++ (writeHeader (head db)) ++ (concatMap rshow db) ++ "</table>"
-- Writes a Record as a html table-record
-- where each field is a <td>
{--
instance Show Row where
show r = "<tr>" ++ concatMap (\f -> "<td>" ++ show (snd f) ++ "</td>") r ++ "</tr>"
--}
rshow:: Row->String
rshow r = "<tr>" ++ concatMap (\f -> "<td>" ++ show (snd f) ++ "</td>") r ++ "</tr>"
| b1g3ar5/CsvDatabase | src/Types.hs | gpl-3.0 | 2,501 | 0 | 14 | 504 | 880 | 467 | 413 | 54 | 1 |
{-
CC_Clones - Classic games reimplemented
© Callum Lowcay 2006-2011
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
module Common.HighScores where
import Common.Assets
import Common.Events
import Common.UserData
import Common.Util
import Control.Monad
import Control.Monad.State
import Data.Char
import Data.Function
import Data.List
import Data.Maybe
import Data.Ord
import Graphics.UI.SDL
import Graphics.UI.SDL.TTF
import System.Directory
import System.FilePath
import System.IO
data HighScoreState = HighScoreState {
game :: String,
scores :: [(String, Int)],
editing :: Maybe Int
} deriving (Show)
maxHighScores :: Int
maxHighScores = 5
maxNameLength :: Int
maxNameLength = 12
highScoresFileName :: String -> IO FilePath
highScoresFileName game = do
userDir <- userDataPath
return$ userDir </> (game ++ ".highscores")
-- load the high scores table
loadHighScoreTable :: String -> IO HighScoreState
loadHighScoreTable game = do
filename <- highScoresFileName game
exists <- doesFileExist filename
unless exists $
copyFile (getAssetPath "highscores") filename
file <- openFile filename ReadMode
contents <- hGetContents file
let scoresUnsorted = map (\line ->
let (name, score) = break (== '=') line in
(take maxNameLength (trim name), read (trim$ tail score))
) $ filter (not.null) (map trim (lines contents))
return HighScoreState {
game = game,
scores = reverse$ sortBy (comparing snd) scoresUnsorted,
editing = Nothing
}
-- write the high scores table
writeHighScoreTable :: HighScoreState -> IO ()
writeHighScoreTable highScores = do
filename <- highScoresFileName (game highScores)
file <- openFile filename WriteMode
forM_ (scores highScores) $ \(name, score) ->
hPutStrLn file (name ++ "=" ++ show score)
hClose file
return ()
-- determine if a score is worthy of the hall of fame
isNewHighScore :: Int -> HighScoreState -> Bool
isNewHighScore score (HighScoreState {scores}) =
length scores < maxHighScores ||
any (\(_, score') -> score' <= score) scores
-- add a new high score and start editing
insertHighScore :: Int -> HighScoreState -> HighScoreState
insertHighScore score (highScores@HighScoreState {scores}) =
let
i = fromMaybe (length scores) $
findIndex (\(_, score') -> score' <= score) scores
in highScores {
scores = take maxHighScores
(take i scores ++ [("", score)] ++ drop i scores),
editing = Just i
}
-- is the high score table being editing
isEditing :: HighScoreState -> Bool
isEditing (HighScoreState {editing}) = isJust editing
-- render the high scores table
renderHighScores :: Surface -> (Int, Int) -> Int ->
Font -> Color -> HighScoreState -> IO ()
renderHighScores dst (x, y) w font color state = do
let highScores = scores state
lineSkip <- fontLineSkip font
forM_ (zip [0..] highScores) $ \(i, (name, score)) -> do
let y' = y + (i * lineSkip)
let pname = name ++ (if isEditing i then "_" else "")
mNameSurface <- tryRenderUTF8Solid font pname color
case mNameSurface of
Just nameSurface -> do
blitSurface nameSurface Nothing dst (Just$ Rect x y' 0 0)
return ()
Nothing -> return ()
scoreSurface <- renderUTF8Solid font (show score) color
let scoreWidth = surfaceGetWidth scoreSurface
blitSurface scoreSurface Nothing
dst (Just$ Rect (x + w - scoreWidth) y' 0 0)
return ()
where
isEditing i = fromMaybe False (fmap (== i) $ editing state)
-- call when editing starts
startEditing :: IO ()
startEditing = typingMode True
-- call when editing is done
endEditing :: HighScoreState -> IO ()
endEditing state = do
typingMode False
writeHighScoreTable state
return ()
-- Enable or disable typing mode
typingMode :: Bool -> IO ()
typingMode enable = if enable then do
enableKeyRepeat 500 30
enableUnicode True
return ()
else do
enableKeyRepeat 0 0
enableUnicode False
return ()
highScoreEventHandler :: EventHandler HighScoreState
highScoreEventHandler Quit = return False
highScoreEventHandler (KeyDown sym) = do
(state@HighScoreState {scores}) <- get
let
iEditing = fromJust$ editing state
(editingName, editingScore) = scores !! iEditing
char = symUnicode sym
keySym = symKey sym
when (isValidNameChar char && length editingName < maxNameLength) $ do
put$state {
scores =
updateList iEditing (editingName ++ [char], editingScore) scores
}
return ()
when (char == '\b') $ do
put$state {
scores =
updateList iEditing (safeInit editingName, editingScore) scores
}
return ()
when (keySym `elem` [SDLK_RETURN, SDLK_KP_ENTER]) $ do
put$state {
editing = Nothing
}
return ()
return True
where
safeInit [] = []
safeInit xs = Data.List.init xs
isValidNameChar char = isAlphaNum char || char == ' ' ||
(isPunctuation char && char /= '=')
highScoreEventHandler _ = return True
-- Update a list at an index
updateList :: Int -> a -> [a] -> [a]
updateList i a xs = take i xs ++ [a] ++ drop (i + 1) xs
| CLowcay/CC_Clones | src/Common/HighScores.hs | gpl-3.0 | 5,561 | 67 | 20 | 1,031 | 1,705 | 867 | 838 | 138 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.DicomStores.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.dicomStores.getIamPolicy@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.DicomStores.GetIAMPolicy
(
-- * REST Resource
ProjectsLocationsDataSetsDicomStoresGetIAMPolicyResource
-- * Creating a Request
, projectsLocationsDataSetsDicomStoresGetIAMPolicy
, ProjectsLocationsDataSetsDicomStoresGetIAMPolicy
-- * Request Lenses
, pldsdsgipOptionsRequestedPolicyVersion
, pldsdsgipXgafv
, pldsdsgipUploadProtocol
, pldsdsgipAccessToken
, pldsdsgipUploadType
, pldsdsgipResource
, pldsdsgipCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.dicomStores.getIamPolicy@ method which the
-- 'ProjectsLocationsDataSetsDicomStoresGetIAMPolicy' request conforms to.
type ProjectsLocationsDataSetsDicomStoresGetIAMPolicyResource
=
"v1" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "options.requestedPolicyVersion"
(Textual Int32)
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Policy
-- | Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ 'projectsLocationsDataSetsDicomStoresGetIAMPolicy' smart constructor.
data ProjectsLocationsDataSetsDicomStoresGetIAMPolicy =
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy'
{ _pldsdsgipOptionsRequestedPolicyVersion :: !(Maybe (Textual Int32))
, _pldsdsgipXgafv :: !(Maybe Xgafv)
, _pldsdsgipUploadProtocol :: !(Maybe Text)
, _pldsdsgipAccessToken :: !(Maybe Text)
, _pldsdsgipUploadType :: !(Maybe Text)
, _pldsdsgipResource :: !Text
, _pldsdsgipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsDicomStoresGetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldsdsgipOptionsRequestedPolicyVersion'
--
-- * 'pldsdsgipXgafv'
--
-- * 'pldsdsgipUploadProtocol'
--
-- * 'pldsdsgipAccessToken'
--
-- * 'pldsdsgipUploadType'
--
-- * 'pldsdsgipResource'
--
-- * 'pldsdsgipCallback'
projectsLocationsDataSetsDicomStoresGetIAMPolicy
:: Text -- ^ 'pldsdsgipResource'
-> ProjectsLocationsDataSetsDicomStoresGetIAMPolicy
projectsLocationsDataSetsDicomStoresGetIAMPolicy pPldsdsgipResource_ =
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy'
{ _pldsdsgipOptionsRequestedPolicyVersion = Nothing
, _pldsdsgipXgafv = Nothing
, _pldsdsgipUploadProtocol = Nothing
, _pldsdsgipAccessToken = Nothing
, _pldsdsgipUploadType = Nothing
, _pldsdsgipResource = pPldsdsgipResource_
, _pldsdsgipCallback = Nothing
}
-- | Optional. The policy format version to be returned. Valid values are 0,
-- 1, and 3. Requests specifying an invalid value will be rejected.
-- Requests for policies with any conditional bindings must specify version
-- 3. Policies without any conditional bindings may specify any valid value
-- or leave the field unset. To learn which resources support conditions in
-- their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
pldsdsgipOptionsRequestedPolicyVersion :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Int32)
pldsdsgipOptionsRequestedPolicyVersion
= lens _pldsdsgipOptionsRequestedPolicyVersion
(\ s a ->
s{_pldsdsgipOptionsRequestedPolicyVersion = a})
. mapping _Coerce
-- | V1 error format.
pldsdsgipXgafv :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Xgafv)
pldsdsgipXgafv
= lens _pldsdsgipXgafv
(\ s a -> s{_pldsdsgipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldsdsgipUploadProtocol :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Text)
pldsdsgipUploadProtocol
= lens _pldsdsgipUploadProtocol
(\ s a -> s{_pldsdsgipUploadProtocol = a})
-- | OAuth access token.
pldsdsgipAccessToken :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Text)
pldsdsgipAccessToken
= lens _pldsdsgipAccessToken
(\ s a -> s{_pldsdsgipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldsdsgipUploadType :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Text)
pldsdsgipUploadType
= lens _pldsdsgipUploadType
(\ s a -> s{_pldsdsgipUploadType = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
pldsdsgipResource :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy Text
pldsdsgipResource
= lens _pldsdsgipResource
(\ s a -> s{_pldsdsgipResource = a})
-- | JSONP
pldsdsgipCallback :: Lens' ProjectsLocationsDataSetsDicomStoresGetIAMPolicy (Maybe Text)
pldsdsgipCallback
= lens _pldsdsgipCallback
(\ s a -> s{_pldsdsgipCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy
where
type Rs
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy
= Policy
type Scopes
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsDicomStoresGetIAMPolicy'{..}
= go _pldsdsgipResource
_pldsdsgipOptionsRequestedPolicyVersion
_pldsdsgipXgafv
_pldsdsgipUploadProtocol
_pldsdsgipAccessToken
_pldsdsgipUploadType
_pldsdsgipCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsDicomStoresGetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/DicomStores/GetIAMPolicy.hs | mpl-2.0 | 7,354 | 0 | 16 | 1,463 | 807 | 472 | 335 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.ReturnpolicyOnline.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all existing return policies.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.returnpolicyonline.list@.
module Network.Google.Resource.Content.ReturnpolicyOnline.List
(
-- * REST Resource
ReturnpolicyOnlineListResource
-- * Creating a Request
, returnpolicyOnlineList
, ReturnpolicyOnlineList
-- * Request Lenses
, rolXgafv
, rolMerchantId
, rolUploadProtocol
, rolAccessToken
, rolUploadType
, rolCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.returnpolicyonline.list@ method which the
-- 'ReturnpolicyOnlineList' request conforms to.
type ReturnpolicyOnlineListResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Int64) :>
"returnpolicyonline" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListReturnPolicyOnlineResponse
-- | Lists all existing return policies.
--
-- /See:/ 'returnpolicyOnlineList' smart constructor.
data ReturnpolicyOnlineList =
ReturnpolicyOnlineList'
{ _rolXgafv :: !(Maybe Xgafv)
, _rolMerchantId :: !(Textual Int64)
, _rolUploadProtocol :: !(Maybe Text)
, _rolAccessToken :: !(Maybe Text)
, _rolUploadType :: !(Maybe Text)
, _rolCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ReturnpolicyOnlineList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rolXgafv'
--
-- * 'rolMerchantId'
--
-- * 'rolUploadProtocol'
--
-- * 'rolAccessToken'
--
-- * 'rolUploadType'
--
-- * 'rolCallback'
returnpolicyOnlineList
:: Int64 -- ^ 'rolMerchantId'
-> ReturnpolicyOnlineList
returnpolicyOnlineList pRolMerchantId_ =
ReturnpolicyOnlineList'
{ _rolXgafv = Nothing
, _rolMerchantId = _Coerce # pRolMerchantId_
, _rolUploadProtocol = Nothing
, _rolAccessToken = Nothing
, _rolUploadType = Nothing
, _rolCallback = Nothing
}
-- | V1 error format.
rolXgafv :: Lens' ReturnpolicyOnlineList (Maybe Xgafv)
rolXgafv = lens _rolXgafv (\ s a -> s{_rolXgafv = a})
-- | Required. The id of the merchant for which to retrieve the return policy
-- online object.
rolMerchantId :: Lens' ReturnpolicyOnlineList Int64
rolMerchantId
= lens _rolMerchantId
(\ s a -> s{_rolMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
rolUploadProtocol :: Lens' ReturnpolicyOnlineList (Maybe Text)
rolUploadProtocol
= lens _rolUploadProtocol
(\ s a -> s{_rolUploadProtocol = a})
-- | OAuth access token.
rolAccessToken :: Lens' ReturnpolicyOnlineList (Maybe Text)
rolAccessToken
= lens _rolAccessToken
(\ s a -> s{_rolAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
rolUploadType :: Lens' ReturnpolicyOnlineList (Maybe Text)
rolUploadType
= lens _rolUploadType
(\ s a -> s{_rolUploadType = a})
-- | JSONP
rolCallback :: Lens' ReturnpolicyOnlineList (Maybe Text)
rolCallback
= lens _rolCallback (\ s a -> s{_rolCallback = a})
instance GoogleRequest ReturnpolicyOnlineList where
type Rs ReturnpolicyOnlineList =
ListReturnPolicyOnlineResponse
type Scopes ReturnpolicyOnlineList =
'["https://www.googleapis.com/auth/content"]
requestClient ReturnpolicyOnlineList'{..}
= go _rolMerchantId _rolXgafv _rolUploadProtocol
_rolAccessToken
_rolUploadType
_rolCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy :: Proxy ReturnpolicyOnlineListResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/ReturnpolicyOnline/List.hs | mpl-2.0 | 4,921 | 0 | 17 | 1,139 | 723 | 420 | 303 | 107 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.