code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Basal where
type DateTime = String
type Url = String
|
lesguillemets/forvo-pronounce.hs
|
Basal.hs
|
mit
| 61
| 0
| 4
| 12
| 16
| 11
| 5
| 3
| 0
|
module Unused.TermSearch
( SearchResults(..)
, SearchBackend(..)
, SearchTerm
, search
) where
import qualified Data.Maybe as M
import GHC.IO.Exception (ExitCode(ExitSuccess))
import qualified System.Process as P
import Unused.TermSearch.Internal
(commandLineOptions, parseSearchResult)
import Unused.TermSearch.Types
(SearchBackend(..), SearchResults(..))
import Unused.Types (SearchTerm, searchTermToString)
search :: SearchBackend -> SearchTerm -> IO SearchResults
search backend t =
SearchResults . M.mapMaybe (parseSearchResult backend t) <$>
(lines <$> performSearch backend (searchTermToString t))
performSearch :: SearchBackend -> String -> IO String
performSearch b t = extractSearchResults b <$> searchOutcome
where
searchOutcome = P.readProcessWithExitCode (backendToCommand b) (commandLineOptions b t) ""
backendToCommand Rg = "rg"
backendToCommand Ag = "ag"
extractSearchResults :: SearchBackend -> (ExitCode, String, String) -> String
extractSearchResults Rg (ExitSuccess, stdout, _) = stdout
extractSearchResults Rg (_, _, stderr) = stderr
extractSearchResults Ag (_, stdout, _) = stdout
|
joshuaclayton/unused
|
src/Unused/TermSearch.hs
|
mit
| 1,163
| 0
| 10
| 186
| 331
| 190
| 141
| 26
| 2
|
-- | Data.TSTP.Role module
module Data.TSTP.Role where
-- | Formula roles.
data Role = Assumption
| Axiom
| Conjecture
| Definition
| FiDomain
| FiFunctors
| FiPredicates
| Hypothesis
| Lemma
| NegatedConjecture
| Plain
| Theorem
| Type
| Unknown
deriving (Eq, Ord, Show, Read)
|
agomezl/tstp2agda
|
src/Data/TSTP/Role.hs
|
mit
| 423
| 0
| 6
| 190
| 74
| 47
| 27
| 16
| 0
|
-- Tree.hs
module Tree where
import Data.Monoid
import qualified Data.Map as M
import qualified Data.Tree as T
data TravelGuide = TravelGuide { title :: String, authors :: [String],
price :: Double } deriving (Show, Eq, Ord)
newtype TravelGuidePrice = TravelGuidePrice TravelGuide deriving Eq
instance Ord TravelGuidePrice where
(TravelGuidePrice (TravelGuide t1 a1 p1)) <=
(TravelGuidePrice(TravelGuide t2 a2 p2)) =
p1 < p2 || (p1 == p2 && (t1 < t2 || (t1 == t2 && a1 <= a2)))
data BinaryTree a = Node a (BinaryTree a) (BinaryTree a)
| Leaf
deriving Show
-- Binary Tres with Monoidal Cache
data BinaryTree3 v c = Node3 v c (BinaryTree3 v c) (BinaryTree3 v c)
| Leaf3
deriving (Show, Eq, Ord)
treeInsert4 :: (Ord v, Monoid c) => v -> c -> BinaryTree3 v c -> BinaryTree3 v c
treeInsert4 v c (Node3 v2 c2 l r) =
case compare v v2 of
EQ -> Node3 v2 c2 l r
LT -> let newLeft = treeInsert4 v c l
newCached = c2 <> cached newLeft <> cached r
in Node3 v2 newCached newLeft r
GT -> let newRight = treeInsert4 v c r
newCached = c2 <> cached l <> cached newRight
in Node3 v2 newCached l newRight
treeInsert4 v c Leaf3 = Node3 v c Leaf3 Leaf3
cached :: Monoid c => BinaryTree3 v c -> c
cached (Node3 _ c _ _) = c
cached Leaf3 = mempty
newtype Min = Min Double deriving Show
instance Monoid Min where
mempty = Min infinity where infinity = 1/0
mappend (Min x) (Min y) = Min $ min x y
modifyTravelGuidePrice :: Double -> [TravelGuide] -> [TravelGuide]
modifyTravelGuidePrice m = map (\tg -> tg { price = m * price tg })
modifyTravelGuidePriceMap :: Double -> M.Map a TravelGuide -> M.Map a TravelGuide
modifyTravelGuidePriceMap m = M.map (\tg -> tg { price = m * price tg })
modifyTravelGuidePriceTree :: Double -> T.Tree TravelGuide -> T.Tree TravelGuide
modifyTravelGuidePriceTree m = fmap (\tg -> tg { price = m * price tg })
modifyTravelGuidePrice' :: Functor f => Double -> f TravelGuide -> f TravelGuide
modifyTravelGuidePrice' m = fmap (\tg -> tg { price = m * price tg })
|
hnfmr/beginning_haskell
|
Chapter4.hs
|
mit
| 2,226
| 0
| 14
| 624
| 828
| 431
| 397
| 43
| 3
|
module Main where
import Rotations
main :: IO ()
main = do
inputWords <- lines <$> getContents
mapM_ (putStrLn . format . rotateWord) inputWords
where
format (n, word) = show n ++ " " ++ word
|
tyehle/programming-studio
|
2017-W20/tobin/app/Main.hs
|
mit
| 204
| 0
| 10
| 48
| 79
| 41
| 38
| 7
| 1
|
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleContexts, FlexibleInstances, UndecidableInstances, OverlappingInstances #-}
{- |
The HList library
(C) 2004, Oleg Kiselyov, Ralf Laemmel, Keean Schupke
A generic implementation of a type equality predicate. The given
implementation only works for GHC. The specific coding here is only
shown for completeness' sake. We actually favour the encoding from
"Data.Hlist.TypeEqGeneric1" for its conciseness. The specific coding here
does not rely on separate compilation (while TypeEqGeneric1.hs
does), but on some other tricks.
-}
module Data.HList.TypeEqGeneric2 where
-- We make everything self-contained to show that separate compilation
-- is not needed. Also, we need a new class constraint for TypeEqBool,
-- (unless we again employ separate compilation in some ways) so
-- that instance selection of its generic instance within client code
-- of TypeEqBool does not issue problems with the instance
-- constraints.
import Data.HList.FakePrelude hiding (TypeEq,typeEq,proxyEq,TypeCast,typeCast)
import Data.HList.TypeCastGeneric2
-- Re-enabled for testing
typeEq :: TypeEq t t' b => t -> t' -> b
typeEq = undefined
{-----------------------------------------------------------------------------}
-- The actual encoding
class TypeEq' () x y b => TypeEq x y b | x y -> b
class TypeEq' q x y b | q x y -> b
class TypeEq'' q x y b | q x y -> b
instance TypeEq' () x y b => TypeEq x y b
-- This instance used to work <= GHC 6.2
-- instance TypeEq' () x x HTrue
-- There were some problems however with GHC CVS 6.3.
-- So we favour the following, more stable (?) instance instead.
instance TypeCast b HTrue => TypeEq' () x x b
instance TypeEq'' q x y b => TypeEq' q x y b
instance TypeEq'' () x y HFalse
{-----------------------------------------------------------------------------}
|
bjornbm/HList-classic
|
Data/HList/TypeEqGeneric2.hs
|
mit
| 1,882
| 0
| 7
| 320
| 248
| 139
| 109
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-certificatemanager-certificate-domainvalidationoption.html
module Stratosphere.ResourceProperties.CertificateManagerCertificateDomainValidationOption where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- CertificateManagerCertificateDomainValidationOption. See
-- 'certificateManagerCertificateDomainValidationOption' for a more
-- convenient constructor.
data CertificateManagerCertificateDomainValidationOption =
CertificateManagerCertificateDomainValidationOption
{ _certificateManagerCertificateDomainValidationOptionDomainName :: Val Text
, _certificateManagerCertificateDomainValidationOptionValidationDomain :: Val Text
} deriving (Show, Eq)
instance ToJSON CertificateManagerCertificateDomainValidationOption where
toJSON CertificateManagerCertificateDomainValidationOption{..} =
object $
catMaybes
[ (Just . ("DomainName",) . toJSON) _certificateManagerCertificateDomainValidationOptionDomainName
, (Just . ("ValidationDomain",) . toJSON) _certificateManagerCertificateDomainValidationOptionValidationDomain
]
-- | Constructor for 'CertificateManagerCertificateDomainValidationOption'
-- containing required fields as arguments.
certificateManagerCertificateDomainValidationOption
:: Val Text -- ^ 'cmcdvoDomainName'
-> Val Text -- ^ 'cmcdvoValidationDomain'
-> CertificateManagerCertificateDomainValidationOption
certificateManagerCertificateDomainValidationOption domainNamearg validationDomainarg =
CertificateManagerCertificateDomainValidationOption
{ _certificateManagerCertificateDomainValidationOptionDomainName = domainNamearg
, _certificateManagerCertificateDomainValidationOptionValidationDomain = validationDomainarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-certificatemanager-certificate-domainvalidationoption.html#cfn-certificatemanager-certificate-domainvalidationoptions-domainname
cmcdvoDomainName :: Lens' CertificateManagerCertificateDomainValidationOption (Val Text)
cmcdvoDomainName = lens _certificateManagerCertificateDomainValidationOptionDomainName (\s a -> s { _certificateManagerCertificateDomainValidationOptionDomainName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-certificatemanager-certificate-domainvalidationoption.html#cfn-certificatemanager-certificate-domainvalidationoption-validationdomain
cmcdvoValidationDomain :: Lens' CertificateManagerCertificateDomainValidationOption (Val Text)
cmcdvoValidationDomain = lens _certificateManagerCertificateDomainValidationOptionValidationDomain (\s a -> s { _certificateManagerCertificateDomainValidationOptionValidationDomain = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/CertificateManagerCertificateDomainValidationOption.hs
|
mit
| 2,896
| 0
| 13
| 222
| 267
| 153
| 114
| 29
| 1
|
module Language.HAsm.Test where
import Language.HAsm.Types
import Language.HAsm.Parse
import Language.HAsm.Codegen
import Language.HAsm.PrettyPrint
{-
- Test Examples
-}
regl = OpndReg . RegL
imml = OpndImm . ImmL
immb = OpndImm . ImmB
label l = OpndRM noSIB (DisplLabel l)
instr i = HasmStInstr [] . Operation i
dir = HasmStDirective
regdspl b d = OpndRM (SIB 1 Nothing (Just b)) d
linux_null_s = [
HasmStLabel "_start", -- _start:
instr OpMov [imml 0x0, regl RegEAX], -- movl $1, %eax
instr OpMov [imml 0x1, regl RegEBX], -- movl $0, %ebx
instr OpInt [immb 0x80] ] -- int 0x80
label_mov_s = [
instr OpMov [label "x", regl RegEAX]]
loop_jmp_s = [
HasmStLabel "_start",
HasmStLabel "loop_start",
instr OpJmp [label "loop_start"] ]
factorial_s = [
instr OpMov [regdspl RegESP (Displ8 4), regl RegECX],
instr OpCmp [imml 1, regl RegECX],
instr OpJe [label "lbl1"],
instr OpMov [imml 1, regl RegEAX],
instr OpMov [imml 1, regl RegEDX],
HasmStLabel "lbl2",
instr OpIMul [regl RegEDX, regl RegEAX],
instr OpAdd [imml 1, regl RegEDX],
instr OpCmp [regl RegECX, regl RegEDX],
instr OpJne [label "lbl2"],
instr OpRet [],
HasmStLabel "lbl1",
instr OpMov [imml 1, regl RegEAX],
instr OpRet [] ]
--- test functions for GHCi ----
fromRight :: Show e => Either e a -> a
fromRight = either (error . show) id
assembleWithBase addr pstmts = firstPass (addr, emptyLblDb) pstmts >>= secondPass addr
assembleFromZero = assembleWithBase 0
withTestSrc = map (\s -> (s, SrcPos "test.s" 0 0))
assembleStmts :: [HasmStatement] -> [(HasmStatement, SrcPos, [Word8])]
assembleStmts = fromRight . assembleFromZero . withTestSrc
testParse s = fromRight $ hasmParseWithSource "~" s
testAssemble stmts = fromRight $ assembleFromZero stmts
test = testAssemble . testParse
testcmds = testAssemble . withTestSrc
-- e.g.
-- ghci> putPretty $ test "imull (%edi)"
-- ghci> putPretty $ testcmds factorial_s
|
EarlGray/hasm
|
src/Language/HAsm/Test.hs
|
mit
| 1,980
| 0
| 10
| 401
| 689
| 362
| 327
| 49
| 1
|
sayHello :: String -> IO ()
sayHello x = putStrLn ("Hello, " ++ x ++ "!")
triple x = x * 3
squareAndMultiplyByPi x = (x ^ 2) * pi
x = 7
y = 10
f = x + y
|
rasheedja/HaskellFromFirstPrinciples
|
Chapter2/test.hs
|
mit
| 155
| 0
| 8
| 43
| 87
| 45
| 42
| 7
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances,
MultiParamTypeClasses, DeriveDataTypeable, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.Errors
-- Copyright : 2007-2011 Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GPL
--
-- Maintainer : maintainer@leksah.org
-- Stability : provisional
-- Portability :
--
-- | A pane which displays a list of errors
--
-----------------------------------------------------------------------------
module IDE.Pane.Errors (
ErrorsPane
, ErrorsState
, fillErrorList
, getErrors
, addErrorToList
, selectMatchingErrors
) where
import Prelude ()
import Prelude.Compat
import Data.Typeable (Typeable)
import IDE.Core.State
import IDE.ImportTool
(resolveErrors, resolveMenuItems)
import Data.List (groupBy, sortBy, elemIndex)
import IDE.LogRef (showSourceSpan)
import Control.Monad.IO.Class (MonadIO(..))
import IDE.Utils.GUIUtils
(treeViewContextMenu', treeViewContextMenu, __, treeViewToggleRow)
import Data.Text (dropWhileEnd, Text)
import Control.Applicative (Alternative(..))
import Control.Monad (filterM, foldM_, unless, void, when)
import qualified Data.Text as T
(unlines, dropWhileEnd, unpack, pack, intercalate, lines,
takeWhile, length, drop)
import Data.IORef (writeIORef, readIORef, newIORef, IORef)
import Data.Maybe (isJust, isNothing)
import qualified Data.Foldable as F (toList)
import qualified Data.Sequence as Seq (null, elemIndexL)
import Data.Monoid ((<>))
import Data.Ord (comparing)
import Data.Char (isSpace)
import Data.Tree (Forest, Tree(..), Tree)
import Data.Function.Compat ((&))
import System.Log.Logger (debugM)
import Data.Foldable (forM_)
import GI.Gtk.Objects.VBox (vBoxNew, VBox(..))
import GI.Gtk.Objects.ScrolledWindow
(scrolledWindowSetPolicy, scrolledWindowSetShadowType,
scrolledWindowNew, ScrolledWindow(..))
import GI.Gtk.Objects.TreeView
(treeViewScrollToCell, treeViewExpandToPath,
onTreeViewRowActivated, treeViewGetSelection, treeViewAppendColumn,
treeViewRowExpanded, setTreeViewHeadersVisible, setTreeViewRulesHint,
setTreeViewLevelIndentation, treeViewSetModel, treeViewNew,
TreeView(..))
import GI.Gtk.Objects.ToggleButton
(toggleButtonGetActive, onToggleButtonToggled,
toggleButtonNewWithLabel, setToggleButtonActive, ToggleButton(..))
import GI.Gtk.Objects.Widget
(widgetShowAll, afterWidgetFocusInEvent, toWidget)
import Data.GI.Base (set, get)
import GI.Gtk.Objects.Notebook (Notebook(..))
import GI.Gtk.Objects.Window (Window(..))
import GI.Gtk.Objects.HBox (hBoxNew)
import Graphics.UI.Editor.Parameters (Packing(..), boxPackStart')
import GI.Gtk.Objects.TreeViewColumn
(noTreeViewColumn, TreeViewColumn(..), treeViewColumnSetSizing,
treeViewColumnNew)
import GI.Gtk.Objects.CellRendererPixbuf
(setCellRendererPixbufIconName, cellRendererPixbufNew)
import GI.Gtk.Interfaces.CellLayout (cellLayoutPackStart)
import Data.GI.Gtk.ModelView.CellLayout
(cellLayoutSetDataFunc', cellLayoutSetDataFunction)
import GI.Gtk.Enums
(PolicyType(..), ShadowType(..), SelectionMode(..),
TreeViewColumnSizing(..))
import GI.Gtk.Objects.CellRendererText
(setCellRendererTextText, cellRendererTextNew)
import GI.Gtk.Interfaces.TreeModel
(treeModelGetIterFirst, treeModelGetPath)
import Data.GI.Gtk.ModelView.CustomStore (customStoreGetRow)
import GI.Gtk.Objects.TreeSelection
(treeSelectionSelectPath, treeSelectionUnselectAll,
treeSelectionSetMode)
import GI.Gtk.Objects.Adjustment (noAdjustment)
import GI.Gtk.Objects.Container (containerAdd)
import Control.Monad.Reader (MonadReader(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Data.GI.Gtk.ModelView.ForestStore
(forestStoreInsert, forestStoreClear, forestStoreNew, ForestStore(..),
forestStoreGetTree, forestStoreGetValue, forestStoreGetForest)
import GI.Gtk.Objects.Button (buttonSetLabel)
import GI.Gtk.Structs.TreePath
(TreePath(..))
import GI.Gtk.Objects.Clipboard (clipboardSetText, clipboardGet)
import GI.Gdk.Structs.Atom (atomIntern)
import Data.Int (Int32)
import Data.GI.Gtk.ModelView.Types
(treeSelectionGetSelectedRows', treePathNewFromIndices')
import GI.Gtk (getToggleButtonActive)
-- | The representation of the Errors pane
data ErrorsPane = ErrorsPane {
vbox :: VBox
, scrolledView :: ScrolledWindow
, treeView :: TreeView
, errorStore :: ForestStore ErrorRecord
, autoClose :: IORef Bool -- ^ If the pane was only displayed to show current error
, errorsButton :: ToggleButton
, warningsButton :: ToggleButton
, suggestionsButton :: ToggleButton
, testFailsButton :: ToggleButton
} deriving Typeable
-- | The data for a single row in the Errors pane
data ErrorRecord = ERLogRef LogRef
| ERPackage IDEPackage Text
| ERIDE Text
| ERFullMessage Text (Maybe LogRef)
deriving (Eq)
-- | The additional state used when recovering the pane
data ErrorsState = ErrorsState
{
showErrors :: Bool
, showWarnings :: Bool
, showSuggestions :: Bool
, showTestFails :: Bool
}
deriving (Eq,Ord,Read,Show,Typeable)
instance Pane ErrorsPane IDEM
where
primPaneName _ = __ "Errors"
getTopWidget = liftIO . toWidget . vbox
paneId _b = "*Errors"
instance RecoverablePane ErrorsPane ErrorsState IDEM where
saveState ErrorsPane{..} = do
showErrors <- getToggleButtonActive errorsButton
showWarnings <- getToggleButtonActive warningsButton
showSuggestions <- getToggleButtonActive suggestionsButton
showTestFails <- getToggleButtonActive testFailsButton
return (Just ErrorsState{..})
recoverState pp ErrorsState{..} = do
nb <- getNotebook pp
mbErrors <- buildPane pp nb builder
forM_ mbErrors $ \ErrorsPane{..} -> do
setToggleButtonActive errorsButton showErrors
setToggleButtonActive warningsButton showWarnings
setToggleButtonActive suggestionsButton showSuggestions
setToggleButtonActive testFailsButton showTestFails
return mbErrors
builder = builder'
-- | Builds an 'ErrorsPane' pane together with a list of
-- event 'Connections'
builder' :: PanePath ->
Notebook ->
Window ->
IDEM (Maybe ErrorsPane, Connections)
builder' _pp _nb _windows = do
ideR <- ask
errorStore <- forestStoreNew []
vbox <- vBoxNew False 0
-- Top box with buttons
hbox <- hBoxNew False 0
boxPackStart' vbox hbox PackNatural 0
errorsButton <- toggleButtonNewWithLabel (__ "Errors")
warningsButton <- toggleButtonNewWithLabel (__ "Warnings")
suggestionsButton <- toggleButtonNewWithLabel (__ "Suggestions")
testFailsButton <- toggleButtonNewWithLabel (__ "Test Failures")
setToggleButtonActive suggestionsButton False
forM_ [errorsButton, warningsButton, suggestionsButton, testFailsButton] $ \b -> do
setToggleButtonActive b True
boxPackStart' hbox b PackNatural 3
onToggleButtonToggled b $ reflectIDE (fillErrorList False) ideR
boxPackStart' vbox hbox PackNatural 0
-- TreeView for bottom part of vbox
treeView <- treeViewNew
treeViewSetModel treeView (Just errorStore)
setTreeViewLevelIndentation treeView 20
setTreeViewRulesHint treeView True
setTreeViewHeadersVisible treeView False
column <- treeViewColumnNew
iconRenderer <- cellRendererPixbufNew
cellLayoutPackStart column iconRenderer False
cellLayoutSetDataFunction column iconRenderer errorStore
$ setCellRendererPixbufIconName iconRenderer . toIcon
treeViewColumnSetSizing column TreeViewColumnSizingAutosize
renderer <- cellRendererTextNew
cellLayoutPackStart column renderer False
cellLayoutSetDataFunc' column renderer errorStore $ \iter -> do
path <- treeModelGetPath errorStore iter
row <- customStoreGetRow errorStore iter
expanded <- treeViewRowExpanded treeView path
setCellRendererTextText renderer $ toDescription expanded row
treeViewAppendColumn treeView column
selB <- treeViewGetSelection treeView
treeSelectionSetMode selB SelectionModeMultiple
scrolledView <- scrolledWindowNew noAdjustment noAdjustment
scrolledWindowSetShadowType scrolledView ShadowTypeIn
containerAdd scrolledView treeView
scrolledWindowSetPolicy scrolledView PolicyTypeAutomatic PolicyTypeAutomatic
boxPackStart' vbox scrolledView PackGrow 0
autoClose <- liftIO $ newIORef False
let pane = ErrorsPane {..}
cid1 <- onIDE afterWidgetFocusInEvent treeView $ do
liftIDE $ makeActive pane
return True
cids2 <- treeViewContextMenu' treeView errorStore contextMenuItems
cid4 <- ConnectC treeView <$> onTreeViewRowActivated treeView (\path col -> do
record <- forestStoreGetValue errorStore path
case record of
ERLogRef logRef -> errorsSelect ideR errorStore path col
ERFullMessage _ ref -> errorsSelect ideR errorStore path col
_ -> return ())
fillErrorList' pane
return (Just pane, [cid1, cid4] ++ cids2)
toIcon :: ErrorRecord -> Text
toIcon (ERLogRef logRef) =
case logRefType logRef of
ErrorRef -> "ide_error"
WarningRef -> "ide_warning"
LintRef -> "ide_suggestion"
TestFailureRef -> "software-update-urgent"
_ -> ""
toIcon (ERPackage _ _) = "dialog-error"
toIcon (ERIDE _) = "dialog-error"
toIcon (ERFullMessage _ _) = ""
toDescription :: Bool -> ErrorRecord -> Text
toDescription expanded errorRec =
case errorRec of
(ERLogRef logRef) -> formatExpandableMessage (T.pack $ logRefFilePath logRef) (refDescription logRef)
(ERIDE msg) -> formatExpandableMessage "" msg
(ERPackage pkg msg) -> formatExpandableMessage (packageIdentifierToString (ipdPackageId pkg))
(packageIdentifierToString (ipdPackageId pkg) <> ": \n" <> msg)
(ERFullMessage msg _) -> removeIndentation msg
where
formatExpandableMessage location msg
| expanded = location
| otherwise = location <> ": " <> msg & removeIndentation
& T.lines
& map removeTrailingWhiteSpace
& T.intercalate " "
-- | Removes the unnecessary indentation
removeIndentation :: Text -> Text
removeIndentation t = T.intercalate "\n" $ map (T.drop minIndent) l
where
l = T.lines t
minIndent = minimum $ map (T.length . T.takeWhile (== ' ')) l
removeTrailingWhiteSpace :: Text -> Text
removeTrailingWhiteSpace = T.dropWhileEnd isSpace
cutOffAt :: Int -> Text -> Text
cutOffAt n t | T.length t < n = t
| otherwise = T.pack (take n (T.unpack t)) <> "..."
-- | Get the Errors pane
getErrors :: Maybe PanePath -> IDEM ErrorsPane
getErrors Nothing = forceGetPane (Right "*Errors")
getErrors (Just pp) = forceGetPane (Left pp)
-- | Repopulates the Errors pane
fillErrorList :: Bool -- ^ Whether to display the Errors pane
-> IDEAction
fillErrorList False = getPane >>= maybe (return ()) fillErrorList'
fillErrorList True = getErrors Nothing >>= \ p -> fillErrorList' p >> displayPane p False
-- | Fills the pane with the error list from the IDE state
fillErrorList' :: ErrorsPane -> IDEAction
fillErrorList' pane = do
liftIO $ debugM "leksah" "fillErrorList'"
refs <- F.toList <$> readIDE errorRefs
visibleRefs <- filterM (isRefVisible pane) refs
ac <- liftIO $ readIORef (autoClose pane)
when (null refs && ac) . void $ closePane pane
updateFilterButtons pane
let store = errorStore pane
let view = treeView pane
forestStoreClear store
forM_ (zip visibleRefs [0..]) $ \(ref, n) -> do
emptyPath <- treePathNewFromIndices' []
forestStoreInsert store emptyPath n (ERLogRef ref)
when (length (T.lines (refDescription ref)) > 1) $ do
p <- treePathNewFromIndices' [fromIntegral n]
forestStoreInsert store p 0 (ERFullMessage (refDescription ref) (Just ref))
treeViewExpandToPath view =<< treePathNewFromIndices' [fromIntegral n,0]
-- | Returns whether the `LogRef` should be visible in the errors pane
isRefVisible :: MonadIO m => ErrorsPane -> LogRef -> m Bool
isRefVisible pane ref =
case logRefType ref of
ErrorRef -> toggleButtonGetActive (errorsButton pane)
WarningRef -> toggleButtonGetActive (warningsButton pane)
LintRef -> toggleButtonGetActive (suggestionsButton pane)
TestFailureRef -> toggleButtonGetActive (testFailsButton pane)
_ -> return False
-- | Add any LogRef to the Errors pane at a given index
addErrorToList :: Bool -- ^ Whether to display the pane
-> Int -- ^ The index to insert at
-> LogRef
-> IDEAction
addErrorToList False index lr = getPane >>= maybe (return ()) (addErrorToList' index lr)
addErrorToList True index lr = getErrors Nothing >>= \ p -> addErrorToList' index lr p >> displayPane p False
-- | Add a 'LogRef' at a specific index to the Errors pane
addErrorToList' :: Int -> LogRef -> ErrorsPane -> IDEAction
addErrorToList' unfilteredIndex ref pane = do
liftIO $ debugM "leksah" "addErrorToList'"
visible <- isRefVisible pane ref
updateFilterButtons pane
when visible $ do
refs <- F.toList <$> readIDE errorRefs
index <- length <$> filterM (isRefVisible pane) (take unfilteredIndex refs)
ac <- liftIO $ readIORef (autoClose pane)
let store = errorStore pane
let view = treeView pane
emptyPath <- treePathNewFromIndices' []
forestStoreInsert store emptyPath index (ERLogRef ref)
when (length (T.lines (refDescription ref)) > 1) $ do
p <- treePathNewFromIndices' [fromIntegral index]
forestStoreInsert store p 0 (ERFullMessage (refDescription ref) (Just ref))
treeViewExpandToPath view =<< treePathNewFromIndices' [fromIntegral index,0]
-- | Updates the filter buttons in the Error Pane
updateFilterButtons :: ErrorsPane -> IDEAction
updateFilterButtons pane = do
liftIO $ debugM "leksah" "updateFilterButtons"
let numRefs refType = length . filter ((== refType) . logRefType) . F.toList <$> readIDE errorRefs
let setLabel name amount button = buttonSetLabel button (name <> " (" <> T.pack (show amount) <> ")" )
numErrors <- numRefs ErrorRef
numWarnings <- numRefs WarningRef
numSuggestions <- numRefs LintRef
numTestFails <- numRefs TestFailureRef
setLabel "Errors" numErrors (errorsButton pane)
setLabel "Warnings" numWarnings (warningsButton pane)
setLabel "Suggestions" numSuggestions (suggestionsButton pane)
setLabel "Test Failures" numTestFails (testFailsButton pane)
widgetShowAll (vbox pane)
-- | Get the currently selected error
getSelectedError :: TreeView
-> ForestStore ErrorRecord
-> IO (Maybe LogRef)
getSelectedError treeView store = do
liftIO $ debugM "leksah" "getSelectedError"
treeSelection <- treeViewGetSelection treeView
paths <- treeSelectionGetSelectedRows' treeSelection
case paths of
path:_ -> do
val <- forestStoreGetValue store path
case val of
ERLogRef logRef -> return (Just logRef)
_ -> return Nothing
_ -> return Nothing
-- | Select a 'LogRef' in the Errors pane if it is visible
selectError :: Maybe LogRef -- ^ When @Nothing@, the first row in the list is selected
-> IDEAction
selectError mbLogRef = do
liftIO $ debugM "leksah" "selectError"
(mbPane :: Maybe ErrorsPane) <- getPane
errors <- getErrors Nothing
when (isNothing mbPane) $ do
liftIO $ writeIORef (autoClose errors) True
displayPane errors False
reifyIDE $ \ideR -> do
selection <- treeViewGetSelection (treeView errors)
forest <- forestStoreGetForest (errorStore errors)
case mbLogRef of
Nothing -> do
unless (null forest) $ do
childPath <- treePathNewFromIndices' [0]
treeViewScrollToCell (treeView errors) (Just childPath) noTreeViewColumn False 0.0 0.0
treeSelectionUnselectAll selection
Just lr -> do
let mbPath = forestFind forest (ERLogRef lr)
forM_ mbPath $ \path' -> do
path <- treePathNewFromIndices' path'
treeViewScrollToCell (treeView errors) (Just path) noTreeViewColumn False 0.0 0.0
treeSelectionSelectPath selection path
where
forestFind :: Eq a => Forest a -> a -> Maybe [Int32]
forestFind = forestFind' [0]
where
forestFind' path [] _ = Nothing
forestFind' path (Node x trees : forest) y
| x == y = Just path
| otherwise = forestFind' (path ++ [0]) trees y
<|> forestFind' (sibling path) forest y
sibling [n] = [n+1]
sibling (x:xs) = x:sibling xs
sibling [] = error "Error in selectError sibling function"
contextMenuItems :: ErrorRecord -> TreePath -> ForestStore ErrorRecord -> IDEM [[(Text, IDEAction)]]
contextMenuItems record path store = return
[("Resolve Errors", resolveErrors) :
case record of
ERLogRef logRef -> resolveMenuItems logRef ++ [clipboardItem (refDescription logRef)]
ERIDE msg -> [clipboardItem msg]
ERPackage _ msg -> [clipboardItem msg]
_ -> []
]
where
clipboardItem str = ("Copy message to clipboard",
atomIntern "CLIBPOARD" False >>= clipboardGet >>= (\c -> clipboardSetText c str (-1)))
-- | Highlight an error refered to by the 'TreePath' in the given 'TreeViewColumn'
errorsSelect :: IDERef
-> ForestStore ErrorRecord
-> TreePath
-> TreeViewColumn
-> IO ()
errorsSelect ideR store path _ = do
liftIO $ debugM "leksah" "errorsSelect"
record <- forestStoreGetValue store path
case record of
ERLogRef logRef -> reflectIDE (setCurrentError (Just logRef)) ideR
ERFullMessage _ (Just ref) -> reflectIDE (setCurrentError (Just ref)) ideR
_ -> return ()
-- | Select the matching errors for a 'SrcSpan' in the Errors
-- pane, or none at all
selectMatchingErrors :: Maybe SrcSpan -- ^ When @Nothing@, unselects any errors in the pane
-> IDEAction
selectMatchingErrors mbSpan = do
liftIO $ debugM "leksah" "selectMatchingErrors"
mbErrors <- getPane
forM_ mbErrors $ \pane -> do
treeSel <- treeViewGetSelection (treeView pane)
treeSelectionUnselectAll treeSel
forM_ mbSpan $ \span -> do
spans <- map logRefSrcSpan . F.toList <$> readIDE errorRefs
matches <- matchingRefs span . F.toList <$> readIDE errorRefs
forM_ matches $ \ref ->
selectError (Just ref)
matchingRefs :: SrcSpan -> [LogRef] -> [LogRef]
matchingRefs span refs =
-- the path of the SrcSpan in the LogRef absolute, so comparison with the given SrcSpan goes right
let toAbsolute ref = ref {logRefSrcSpan = (logRefSrcSpan ref) {srcSpanFilename = logRefFullFilePath ref}}
in filter (\ref -> filesMatch (logRefSrcSpan (toAbsolute ref)) span && span `insideOf` logRefSrcSpan (toAbsolute ref)) refs
where
filesMatch span span' = srcSpanFilename span == srcSpanFilename span'
-- Test whether the first span is inside of the second
insideOf (SrcSpan _ lStart cStart lEnd cEnd) (SrcSpan _ lStart' cStart' lEnd' cEnd')
= (lStart, cStart) <= (lEnd', cEnd')
&& (lEnd, cEnd) >= (lStart', cStart')
|
JPMoresmau/leksah
|
src/IDE/Pane/Errors.hs
|
gpl-2.0
| 20,470
| 123
| 20
| 4,974
| 4,908
| 2,586
| 2,322
| 392
| 5
|
module Eval (evalProgram, evalExpression, evalString, displayStack, builtins) where
import Types
import Parser
import Data.Bits
import Data.Char
import Data.List
import Math.NumberTheory.Powers
import Math.NumberTheory.Primes
import qualified Data.Map as M
evalProgram :: SmProgram -> SmFunction
evalProgram = flip $ foldl $ flip evalExpression
evalExpression :: SmExpression -> SmFunction
evalExpression (SmOperator o) = case M.lookup o builtins of
Just f -> f
Nothing -> id
evalExpression x = (x:)
evalString :: String -> SmFunction
evalString = evalProgram . smParse
-- Some helper functions
isTruthy :: SmExpression -> Bool
isTruthy (SmInt 0) = False
isTruthy (SmFloat 0) = False
isTruthy (SmString "") = False
isTruthy (SmList []) = False
isTruthy _ = True
isFalsy :: SmExpression -> Bool
isFalsy = not . isTruthy
isAtom :: SmExpression -> Bool
isAtom (SmInt _) = True
isAtom (SmFloat _) = True
isAtom (SmChar _) = True
isAtom (SmOperator _) = True
isAtom _ = False
fromBool :: Bool -> SmExpression
fromBool True = SmInt 1
fromBool False = SmInt 0
head' :: SmStack -> SmExpression
head' (x:_) = x
head' [] = SmInt 0
evalIfList :: SmExpression -> SmFunction
evalIfList (SmList q) = evalProgram q
evalIfList (SmString q) = evalString q
evalIfList x = evalExpression x
evalIfList1 :: SmExpression -> SmStack -> SmExpression
evalIfList1 x s = head' $ evalIfList x s
displayExpression :: SmExpression -> String
displayExpression (SmInt x)
| x < 0 = "_" ++ show (-x) ++ " "
| otherwise = show x ++ " "
displayExpression (SmFloat x)
| x < 0 = "_" ++ show (-x) ++ " "
| otherwise = show x ++ " "
displayExpression (SmChar x) = '\'':[x]
displayExpression (SmString xs) = show xs
displayExpression (SmList xs) = "[" ++ (xs >>= displayExpression) ++ "]"
displayExpression (SmOperator x) = [x]
displayStack :: SmStack -> String
displayStack (SmString xs:_) = xs
displayStack (SmList xs:_) = xs >>= displayExpression
displayStack (SmChar x:_) = [x]
displayStack (x:_) = displayExpression x
displayStack [] = ""
smPopd :: SmFunction
smPopd (x1:x2:s) = x1:s
smPopd s = s
toListFunction :: SmFunction -> SmFunction
toListFunction f (xs:s) = SmList (map (head' . f . (:s)) $ toList xs):s
toListFunction2 :: SmFunction -> SmFunction
toListFunction2 f (x1:x2:s)
| isAtom x1 && isAtom x2 = f (x1:x2:s)
| isAtom x2 = smPopd $ toListFunction f (x1:x2:s)
| isAtom x1 = smPopd $ toListFunction (f . (x1:)) (x2:s)
| otherwise = SmList (zipWith (\y1 y2 -> head' $ f (y1:y2:s)) (toList x1) (toList x2)):s
zipWith' :: a -> b -> (a -> b -> c) -> [a] -> [b] -> [c]
zipWith' _ _ _ [] [] = []
zipWith' a0 b0 f (a:as) [] = f a b0 : zipWith' a0 b0 f as []
zipWith' a0 b0 f [] (b:bs) = f a0 b : zipWith' a0 b0 f [] bs
zipWith' a0 b0 f (a:as) (b:bs) = f a b : zipWith' a0 b0 f as bs
toListFunction2' :: SmExpression -> SmFunction -> SmFunction
toListFunction2' x f (xs1:xs2:s) = SmList (zipWith' x x (\x1 x2 -> head' $ f (x1:x2:s)) (toList xs1) (toList xs2)):s
toBase :: Integer -> Integer -> [Integer]
toBase b 0 = []
toBase 1 x = genericTake x $ repeat 1
toBase b x = (mod x b):toBase b (div x b)
builtins = M.fromList [('!', smPop),
('#', smSize),
('$', smSwap),
('%', smMod),
('&', smAnd),
('(', smPred),
(')', smSucc),
('*', smTimes),
('+', smAdd),
(',', smRange0),
('-', smMinus),
('.', smJoin),
('/', smDivide),
(':', smCons),
(';', smDup),
('<', smLess),
('=', smSame),
('>', smGreater),
('?', smIf),
('@', smRoll),
('A', smAnswer),
('B', smFromBase),
('D', smToBase),
('E', smE),
('F', smFoldList),
('G', smFold1List),
('N', smNaturals),
('O', smPi),
('P', smPrimes),
('W', smNestList),
('Y', smFixedPointList),
('\\', smUncons),
('^', smPower),
('_', smNegative),
('c', smTake),
('d', smDip),
('e', smDrop),
('f', smFold),
('g', smFold1),
('i', smI),
('m', smMap),
('o', smOuter),
('s', smFilter),
('t', smTwice),
('x', smX),
('w', smNest),
('y', smFixedPoint),
('z', smZipWith),
('{', smUnstack),
('|', smOr),
('}', smStack),
('~', smNot),
('Α', smAbs),
('Δ', smDiff),
('Ε', smExp),
('Λ', smLog),
('Π', smProduct),
('Σ', smSum),
('Φ', smFactor),
('γ', smGcd),
('λ', smLcm),
('ν', smNthPrime),
('ξ', smMin),
('ο', smMax),
('π', smPrimePi),
('σ', smDivisorSigma),
('φ', smEulerPhi),
('░', smToInt),
('▒', smToFloat),
('▓', smToChar),
('│', smDivisible),
('╡', smIndex),
('╢', smElem),
('╖', smTail),
('╕', smHead),
('╣', smSubsets),
('║', smReverse),
('╗', smTails),
('╝', smIntersperse),
('╜', smSort),
('╛', smNub),
('└', smFloor),
('─', smRound),
('╟', smPosition),
('╚', smConcat),
('╔', smInits),
('╦', smRotate),
('╠', smPermutations),
('═', smEq),
('╧', smCycle),
('╒', smLast),
('╓', smInit),
('┌', smCeiling),
('█', smToString),
('▄', smToList),
('▌', smReadOneNumber),
('▐', smReadNumbers),
('Ά', smBitAnd),
('Έ', smBitOr),
('Ή', smBitXor),
('Ί', smBitNot),
('±', smSign),
('≥', smGreaterEq),
('≤', smLessEq),
('÷', smDiv),
('∙', smConvolve),
('·', smDot),
('√', smSqrt),
('²', smIsSquare),
('ⁿ', smConvPower)]
-- Built-in functions, sorted by names
-- SmOperator 'Α'
smAbs (SmInt x:s) = SmInt (abs x):s
smAbs (SmFloat x:s) = SmFloat (abs x):s
smAbs (x:s)
| isAtom x = SmInt (abs $ toInt x):s
| otherwise = toListFunction smAbs (x:s)
smAbs s = s
-- SmOperator '+'
smAdd (SmInt x1:SmInt x2:s) = SmInt (x1 + x2):s
smAdd (SmInt x1:SmFloat x2:s) = SmFloat (fromInteger x1 + x2):s
smAdd (SmFloat x1:SmInt x2:s) = SmFloat (x1 + fromInteger x2):s
smAdd (SmFloat x1:SmFloat x2:s) = SmFloat (x1 + x2):s
smAdd (SmChar x1:x2:s) = smAdd $ smToInt (SmChar x1:x2:s)
smAdd (x1:SmChar x2:s) = smAdd (SmChar x2:x1:s)
smAdd (x1:x2:s)
| isAtom x1 && isAtom x2 = smAdd $ smToInt $ x1:smToInt (x2:s)
| isAtom x2 = toListFunction2 smAdd (x1:x2:s)
| isAtom x1 = smAdd (x2:x1:s)
| otherwise = toListFunction2' (SmInt 0) smAdd (x1:x2:s)
smAdd [] = [SmInt 0]
smAdd s = s
-- SmOperator '&'
smAnd (x1:x2:s) = fromBool (isTruthy x1 && isTruthy x2):s
smAnd [] = [SmInt 1]
smAnd s = s
-- SmOperator 'A'
smAnswer s = SmInt 42:s
-- SmOperator 'Ά'
smBitAnd (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (toInt x1 .&. toInt x2):s
| otherwise = SmList (union (toList x2) (toList x1)):s
smBitAnd s = s
-- SmOperator 'Ί'
smBitNot (x1:x2:s)
| isAtom x1 = SmInt (complement (toInt x1)):x2:s
| otherwise = SmList (toList x1 \\ toList x2):s
smBitNot s = s
-- SmOperator 'Έ'
smBitOr (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (toInt x1 .|. toInt x2):s
| otherwise = SmList (intersect (toList x2) (toList x1)):s
smBitOr s = s
-- SmOperator 'Ή'
smBitXor (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (xor (toInt x1) (toInt x2)):s
| otherwise = SmList (union (toList x2) (toList x1) \\ intersect (toList x2) (toList x1)):s
smBitXor s = s
-- SmOperator '┌'
smCeiling (SmInt x:s) = SmInt x:s
smCeiling (SmFloat x:s) = SmInt (ceiling x):s
smCeiling (SmChar x:s) = SmChar (toLower x):s
smCeiling (SmList x:s) = toListFunction smCeiling (SmList x:s)
smCeiling (SmString x:s) = smToString $ toListFunction smCeiling (SmString x:s)
smCeiling s = s
-- SmOperator '╚'
smConcat (x:s)
| isAtom x = x:s
| otherwise = smFold (SmOperator '.':x:SmList []:s)
smConcat s = [SmList []]
-- SmOperator ':'
smCons (SmList xs:x:s) = SmList (x:xs):s
smCons (SmString xs:x:s) = SmString (toString x ++ xs):s
smCons (SmChar x1:x2:s) = SmString (toString x2 ++ [x1]):s
smCons (x1:x2:s) = SmList [x2,x1]:s
smCons s = [SmList s]
-- SmOperator '∙'
smConvolve (SmList xs1:SmList []:s) = SmList []:s
smConvolve (SmList []:SmList xs2:s) = SmList []:s
smConvolve (SmList (x1:xs1):SmList xs2:s) = smAdd $ head (smTimes $ x1:SmList xs2:s):smCons (smSwap $ SmInt 0:smConvolve (SmList xs1:SmList xs2:s))
smConvolve (x1:x2:s)
| isAtom x1 && isAtom x2 = smFromBase $ SmInt 2:(smMod $ SmInt 2:smConvolve (smToBase $ SmInt 2:x1:(smToBase $ SmInt 2:x2:s)))
| otherwise = smConvolve $ smToList (x1:smToList (x2:s))
smConvolve s = s
-- SmOperator 'ⁿ'
smConvPower (x1:x2:s)
| isAtom x1 && isAtom x2 = smNest $ (SmList [x2, SmOperator '∙']:x1:SmInt 1:s)
| isAtom x1 = smNest $ (SmList [x2, SmOperator '∙']:x1:SmList [SmInt 1]:s)
| otherwise = smPopd $ toListFunction smConvPower (x1:x2:s)
smConvPower s = s
-- SmOperator '╧'
smCycle (SmChar x:s) = SmString (repeat x):s
smCycle (SmString xs:s) = SmString (cycle xs):s
smCycle (SmList xs:s) = SmList (cycle xs):s
smCycle (x:s) = SmList (repeat x):s
smCycle s = s
-- SmOperator 'Δ'
smDiff (SmList []:s) = SmList []:s
smDiff (SmList xs:s) = smMinus (SmList xs:SmList (tail xs):s)
smDiff s = smDiff $ smToList s
-- SmOperator 'd'
smDip (q:x:s) = x:evalIfList q s
smDip s = s
-- SmOperator '÷'
smDiv (SmInt x1:SmInt x2:s) = SmInt (div x2 x1):s
smDiv (SmInt x1:SmFloat x2:s) = smFloor $ smDivide $SmInt x1:SmFloat x2:s
smDiv (SmFloat x1:SmInt x2:s) = smFloor $ smDivide $SmFloat x1:SmInt x2:s
smDiv (SmFloat x1:SmFloat x2:s) = smFloor $ smDivide $SmFloat x1:SmFloat x2:s
smDiv (SmChar x1:x2:s) = smDiv $ smToInt (SmChar x1:x2:s)
smDiv (x1:SmChar x2:s) = smDiv $ x1:smToInt (SmChar x2:s)
smDiv (x1:x2:s)
| isAtom x1 && isAtom x2 = smDiv $ smToInt $ x1:smToInt (x2:s)
| otherwise = toListFunction2 smDiv (x1:x2:s)
-- SmOperator '/'
smDivide (x1:x2:s)
| isAtom x1 && isAtom x2 = SmFloat (toFloat x2 / toFloat x1):s
| otherwise = toListFunction2 smDivide (x1:x2:s)
smDivide [] = [SmFloat 1]
smDivide s = smToFloat s
-- SmOperator '│'
smDivisible (x1:x2:s)
| isAtom x1 && isAtom x2 = smNot $ smMod (x1:x2:s)
| otherwise = toListFunction2 smDivisible (x1:x2:s)
smDivisible s = s
-- SmOperator 'σ'
smDivisorSigma (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (sigma (fromInteger $ toInt x1) (toInt x2)):s
| otherwise = toListFunction2 smDivisorSigma (x1:x2:s)
smDivisorSigma s = s
-- SmOperator '·'
smDot (x1:x2:s)
| isAtom x1 && isAtom x2 = smTimes (x1:x2:s)
| otherwise = smSum $ smTimes (x1:x2:s)
smDot s = s
-- SmOperator 'e'
smDrop (x:SmList xs:s)
| isAtom x = SmList (genericDrop (toInt x) xs):s
| otherwise = SmList (dropWhile (isTruthy . evalIfList1 x . (:s)) xs):s
smDrop (x:SmString xs:s) = smDrop $ x:smToList (SmString xs:s)
smDrop s = s
-- SmOperator ';'
smDup (x:s) = x:x:s
smDup s = s
-- SmOperator 'E'
smE s = SmFloat (exp 1):s
-- SmOperator 'Ε'
smExp (x:s)
| isAtom x = SmFloat (exp $ toFloat x):s
| otherwise = toListFunction smExp (x:s)
smExp s = s
-- SmOperator '╢'
smElem (x:SmList xs:s) = fromBool (elem x xs):s
smElem (x:SmString xs:s) = fromBool (elem (toChar x) xs):s
smElem s = s
-- SmOperator '═'
smEq (x1:x2:s)
| isAtom x1 && isAtom x2 = fromBool (toFloat x2 == toFloat x1):s
| otherwise = toListFunction2 smEq (x1:x2:s)
smEq s = s
-- SmOperator 'φ'
smEulerPhi (x:s)
| isAtom x = SmInt (totient $ toInt x):s
| otherwise = toListFunction smEulerPhi (x:s)
smEulerPhi s = s
-- SmOperator 'Φ'
smFactor (x:s)
| isAtom x = SmList (map (\(p, n) -> SmList [SmInt p, SmInt $ fromIntegral n]) $ factorise $ toInt x):s
| otherwise = toListFunction smFactor (x:s)
smFactor s = s
-- SmOperator 's'
smFilter (q:SmList xs:s) = SmList (filter (isTruthy . evalIfList1 q . (:s)) xs):s
smFilter (q:SmString xs:s) = SmString (filter (isTruthy . evalIfList1 q . (:s) . SmChar) xs):s
smFilter s = s
-- SmOperator 'y'
smFixedPoint (q:s)
| evalIfList q s == s = s
| otherwise = smFixedPoint (q:evalIfList q s)
smFixedPoint s = s
-- SmOperator 'Y'
smFixedPointList (q:y:s)
| evalIfList1 q (y:s) == y = SmList [y]:s
| otherwise = let SmList z:u = smFixedPointList (q:evalIfList q (y:s)) in SmList (y:z):u
smFixedPointList s = s
-- SmOperator '└'
smFloor (SmInt x:s) = SmInt x:s
smFloor (SmFloat x:s) = SmInt (floor x):s
smFloor (SmChar x:s) = SmChar (toLower x):s
smFloor (SmList x:s) = toListFunction smFloor (SmList x:s)
smFloor (SmString x:s) = smToString $ toListFunction smFloor (SmString x:s)
smFloor s = s
-- SmOperator 'f'
smFold (q:SmList []:s) = s
smFold (q:SmList (x:xs):s) = smFold (q:SmList xs:evalIfList q (x:s))
smFold (q:SmString xs:s) = smFold (q:SmList (toList $ SmString xs):s)
smFold (q:x:s) = smFold (q:smRange0 (x:s))
smFold s = s
-- SmOperator 'g'
smFold1 (q:x:s)
| isAtom x = smFold1 (q:smRange0 (x:s))
| otherwise = smFold $ q:smUncons (x:s)
smFold1 s = s
-- SmOperator 'G'
smFold1List (q:x:s)
| isAtom x = smFold1List (q:smRange0 (x:s))
| otherwise = smFoldList $ q:smUncons (x:s)
smFold1List s = s
-- SmOperator 'F'
smFoldList (q:SmList []:y:s) = SmList [y]:s
smFoldList (q:SmList (x:xs):y:s) = let SmList z:u = smFoldList (q:SmList xs:evalIfList q (x:y:s)) in SmList (y:z):u
smFoldList (q:SmString xs:s) = smFoldList (q:SmList (toList $ SmString xs):s)
smFoldList (q:x:s) = smFoldList (q:smRange0 (x:s))
smFoldList s = s
-- SmOperator 'B'
smFromBase (x1:x2:s)
| isAtom x1 = smFold $ SmList [SmOperator '$', x1, SmOperator '*', SmOperator '+']:smReverse (x2:SmInt 0:s)
| otherwise = smPopd $ toListFunction smFromBase (x1:x2:s)
smFromBase s = s
-- SmOperator 'γ'
smGcd (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (gcd (toInt x1) (toInt x2)):s
| otherwise = toListFunction2 smGcd (x1:x2:s)
smGcd s = s
-- SmOperator '>'
smGreater (x1:x2:s)
| isAtom x1 && isAtom x2 = fromBool (toFloat x2 > toFloat x1):s
| otherwise = toListFunction2 smGreater (x1:x2:s)
smGreater s = s
-- SmOperator '≥'
smGreaterEq (x1:x2:s)
| isAtom x1 && isAtom x2 = fromBool (toFloat x2 >= toFloat x1):s
| otherwise = toListFunction2 smGreaterEq (x1:x2:s)
smGreaterEq s = s
-- SmOperator '╕'
smHead (SmList (x:xs):s) = x:s
smHead (SmList []:s) = s
smHead (SmString (x:xs):s) = SmChar x:s
smHead (SmString []:s) = s
smHead s = s
-- SmOperator 'i'
smI (q:s) = evalIfList q s
smI s = s
-- SmOperator '?'
smIf (q1:q2:t:s) = case evalIfList t s of
u:_ | isTruthy u -> evalIfList q2 s
| otherwise -> evalIfList q1 s
_ -> evalIfList q1 s
smIf s = s
-- SmOperator '╡'
smIndex (x:SmList xs:s)
| isAtom x = genericIndex (cycle xs) (toInt x):s
| otherwise = toListFunction smIndex (x:SmList xs:s)
smIndex (x:SmString xs:s)
| isAtom x = SmChar (genericIndex (cycle xs) (toInt x)):s
| otherwise = smToString $ toListFunction smIndex (x:SmString xs:s)
smIndex s = s
-- SmOperator '╓'
smInit (SmList (x:xs):s) = SmList (init $ x:xs):s
smInit (SmList []:s) = s
smInit (SmString (x:xs):s) = SmString (init $ x:xs):s
smInit (SmString []:s) = s
smInit s = s
-- SmOperator '╔'
smInits (SmList xs:s) = SmList (map SmList $ inits xs):s
smInits (SmString xs:s) = SmList (map SmString $ inits xs):s
smInits s = s
-- SmOperator '╝'
smIntersperse (x:SmList xs:s) = SmList (intersperse x xs):s
smIntersperse (x:SmString xs:s) = SmString (intersperse (toChar x) xs):s
smIntersperse s = s
-- SmOperator 'τ'
smIsPrime (x:s)
| isAtom x = fromBool (isCertifiedPrime $ toInt x):s
| otherwise = toListFunction smIsPrime (x:s)
smIsPrime s = s
-- SmOperator '²'
smIsSquare (SmInt x:s) = fromBool (isSquare x):s
smIsSquare (SmFloat x:s)
| x == fromInteger (floor x) = fromBool (isSquare $ floor x):s
| otherwise = SmInt 0:s
smIsSquare (x:s)
| isAtom x = smIsSquare $ smToInt $ (x:s)
| otherwise = toListFunction smIsSquare (x:s)
smIsSquare s = s
-- SmOperator '.'
smJoin (SmList xs1:SmList xs2:s) = SmList (xs2 ++ xs1):s
smJoin (SmString xs1:SmString xs2:s) = SmString (xs2 ++ xs1):s
smJoin (x:SmString xs:s) = SmString (xs ++ toString x):s
smJoin (SmString xs:x:s) = SmString (toString x ++ xs):s
smJoin (x:SmList xs:s) = SmList (xs ++ toList x):s
smJoin (x1:x2:s) = smJoin (x1:SmList [x2]:s)
smJoin [SmList xs] = [SmList xs]
smJoin s = [SmList s]
-- SmOperator '╒'
smLast (SmList (x:xs):s) = (last $ x:xs):s
smLast (SmList []:s) = s
smLast (SmString (x:xs):s) = SmChar (last $ x:xs):s
smLast (SmString []:s) = s
smLast s = s
-- SmOperator 'λ'
smLcm (x1:x2:s)
| isAtom x1 && isAtom x2 = SmInt (lcm (toInt x1) (toInt x2)):s
| otherwise = toListFunction2 smLcm (x1:x2:s)
smLcm s = s
-- SmOperator '<'
smLess (x1:x2:s)
| isAtom x1 && isAtom x2 = fromBool (toFloat x2 < toFloat x1):s
| otherwise = toListFunction2 smLess (x1:x2:s)
smLess s = s
-- SmOperator '≤'
smLessEq (x1:x2:s)
| isAtom x1 && isAtom x2 = fromBool (toFloat x2 <= toFloat x1):s
| otherwise = toListFunction2 smLessEq (x1:x2:s)
smLessEq s = s
-- SmOperator 'Λ'
smLog (x:s)
| isAtom x = SmFloat (log $ toFloat x):s
| otherwise = toListFunction smLog (x:s)
smLog s = s
-- SmOperator 'm'
smMap (q:x:s)
| isAtom x = smMap (q:smRange0 (x:s))
| otherwise = toListFunction (evalIfList q) (x:s)
smMap s = s
-- SmOperator 'ο'
smMax (x1:x2:s)
| isAtom x1 && isAtom x2 = max x1 x2:s
| otherwise = toListFunction2 smMax (x1:x2:s)
smMax s = s
-- SmOperator 'ξ'
smMin (x1:x2:s)
| isAtom x1 && isAtom x2 = min x1 x2:s
| otherwise = toListFunction2 smMin (x1:x2:s)
smMin s = s
-- SmOperator '-'
smMinus s = smAdd $ smNegative s
-- SmOperator '%'
smMod (x1:x2:s) = smMinus (y1:x2:s) where
y1:_ = smTimes $ y2:x1:s
y2:_ = smDiv $ x1:x2:s
smMod s = s
-- SmOperator 'N'
smNaturals s = SmList (map SmInt [0..]):s
-- SmOperator '_'
smNegative (SmInt x:s) = SmInt (-x):s
smNegative (SmFloat x:s) = SmFloat (-x):s
smNegative (SmChar x:s) = SmInt (- (toInt $ SmChar x)):s
smNegative (x:s)
| isAtom x = x:s
| otherwise = toListFunction smNegative (x:s)
smNegative s = s
-- SmOperator 'w'
smNest (q:t:s)
| isAtom t = if toInt t <= 0 then s else smNest $ q:smPred (t:evalIfList q s)
| otherwise = case evalIfList t s of
u:_ | isTruthy u -> smNest (q:t:evalIfList q s)
| otherwise -> s
_ -> []
smNest s = s
-- SmOperator 'W'
smNestList (q:t:y:s)
| isAtom t = if toInt t <= 0 then SmList [y]:s else let SmList z:u = smNestList (q:smPred (t:evalIfList q (y:s))) in SmList (y:z):u
| otherwise = case evalIfList t s of
w:_ | isTruthy w -> let SmList z:u = smNestList (q:t:evalIfList q (y:s)) in SmList (y:z):u
| otherwise -> SmList [y]:s
smNestList s = s
-- SmOperator '~'
smNot (x:s) = fromBool (isFalsy x):s
smNot s = [SmInt 0]
-- SmOperator 'ν'
smNthPrime (x:s)
| isAtom x = SmInt (nthPrime $ toInt x):s
| otherwise = toListFunction smNthPrime (x:s)
smNthPrime s = s
-- SmOperator '╛'
smNub (SmList xs:s) = SmList (nub xs):s
smNub (SmString xs:s) = SmString (nub xs):s
smNub s = s
-- SmOperator '|'
smOr (x1:x2:s) = fromBool (isTruthy x1 || isTruthy x2):s
smOr [] = [SmInt 0]
smOr s = s
-- SmOperator 'o'
smOuter (q:x1:x2:s)
| isAtom x1 = smOuter (q:smRange0 (x1:x2:s))
| isAtom x2 = smOuter (q:x1:smRange0 (x2:s))
| otherwise = toListFunction (toListFunction (evalIfList q) . (x1:)) (x2:s)
smOuter s = s
-- SmOperator '╠'
smPermutations (SmList xs:s) = SmList (map SmList $permutations xs):s
smPermutations (SmString xs:s) = SmList (map SmString $ permutations xs):s
smPermutations (x:s) = smPermutations $ smRange0 (x:s)
smPermutations s = s
-- SmOperator 'O'
smPi s = SmFloat pi:s
-- SmOperator '!'
smPop (x:s) = s
smPop s = s
-- SmOperator '╟'
smPosition (x:SmList xs:s) = SmList (map (SmInt . fromIntegral) $ elemIndices x xs):s
smPosition (x:SmString xs:s) = SmList (map (SmInt . fromIntegral) $ elemIndices (toChar x) xs):s
smPosition s = s
-- SmOperator '^'
smPower (SmInt x1:SmInt x2:s) = SmInt (x2 ^ x1):s
smPower (SmInt x1:SmFloat x2:s) = SmFloat (x2 ^ x1):s
smPower (SmFloat x1:SmInt x2:s) = SmFloat (fromInteger x2 ** x1):s
smPower (SmFloat x1:SmFloat x2:s) = SmFloat (x2 ** x1):s
smPower (SmChar x1:x2:s) = smPower $ smToInt (SmChar x1:x2:s)
smPower (x1:SmChar x2:s) = smPower $ x1:smToInt (SmChar x2:s)
smPower (x1:x2:s)
| isAtom x1 && isAtom x2 = smPower $ smToInt $ x1:smToInt (x2:s)
| otherwise = toListFunction2 smPower (x1:x2:s)
smPower s = s
-- SmOperator '('
smPred s = smMinus (SmInt 1:s)
--SmOperator 'π'
smPrimePi (x:s)
| isAtom x = SmInt (primeCount $ toInt x):s
| otherwise = toListFunction smPrimePi (x:s)
smPrimePi s = s
-- SmOperator 'P'
smPrimes s = SmList (map SmInt primes):s
-- SmOperator 'Π'
smProduct (x:s)
| isAtom x = smProduct $ smUncons $ x:s
| otherwise = smFold (SmOperator '*':x:SmInt 1:s)
smProduct s = [SmInt 1]
-- SmOperator ','
smRange0 (x:s)
| isAtom x = let y = toInt x in SmList (map SmInt $ if y>=0 then [0..y-1] else [-y-1,-y-2..0]):s
| otherwise = smRange0 $ smSize (x:s)
smRange0 s = [SmList []]
-- SmOperator '▐'
smReadNumbers (SmString x:s) = readNumbers x:s
smReadNumbers s = s
-- SmOperator '▌'
smReadOneNumber (SmString x:s) = readOneNumber x:s
smReadOneNumber s = s
-- SmOperator '║'
smReverse (SmList xs:s) = SmList (reverse xs):s
smReverse (SmString xs:s) = SmString (reverse xs):s
smReverse s = s
-- SmOperator '@'
smRoll (x1:x2:x3:s) = x3:x1:x2:s
smRoll s = s
-- SmOperator '─'
smRound (SmInt x:s) = SmInt x:s
smRound (SmFloat x:s) = SmInt (round x):s
smRound (SmList x:s) = toListFunction smRound (SmList x:s)
smRound s = s
-- SmOperator '╦'
smRotate (x:SmList y:s)
| isAtom x = if toInt x == 0 || y == []
then SmList y:s
else if toInt x > 0
then smRotate (SmInt (toInt x - 1):SmList (tail y ++ [head y]):s)
else smRotate (SmInt (toInt x + 1):SmList ([last y] ++ init y):s)
| otherwise = toListFunction smRotate (x:SmList y:s)
smRotate (x:SmString y:s)
| isAtom x = smToString $ smRotate (x:smToList (SmString y:s))
| otherwise = toListFunction smRotate (x:SmString y:s)
smRotate (x:y:s)
| isAtom x = SmInt (rotate (toInt y) (fromInteger $ toInt x)):s
| otherwise = toListFunction smRotate (x:y:s)
smRotate s = s
-- SmOperator '='
smSame (x1:x2:s)
| x1 == x2 = SmInt 1:s
| otherwise = SmInt 0:s
smSame s = [SmInt 0]
-- SmOperator '±'
smSign (x:s)
| isAtom x = case compare (toFloat x) 0 of
GT -> SmInt 1:s
EQ -> SmInt 0:s
LT -> SmInt (-1):s
| otherwise = toListFunction smSign (x:s)
smSign s = s
-- SmOperator '#'
smSize (SmList xs:s) = SmInt (genericLength xs):s
smSize (SmString xs:s) = SmInt (genericLength xs):s
smSize (x:s) = SmInt 1:s
smSize s = SmInt 0:s
-- SmOperator '╜'
smSort (SmList xs:s) = SmList (sort xs):s
smSort (SmString xs:s) = SmString (sort xs):s
smSort s = s
-- SmOperator '√'
smSqrt s = smPower (SmFloat 0.5:s)
-- SmOperator '}'
smStack s = SmList s:s
-- SmOperator ')'
smSucc s = smAdd (SmInt 1:s)
-- SmOperator '╣'
smSubsets (SmList xs:s) = SmList (map SmList $subsequences xs):s
smSubsets (SmString xs:s) = SmList (map SmString $ subsequences xs):s
smSubsets (x:s) = smSubsets $ smRange0 (x:s)
smSubsets s = s
-- SmOperator 'Σ'
smSum (x:s)
| isAtom x = smSum $ smUncons $ x:s
| otherwise = smFold (SmOperator '+':x:SmInt 0:s)
smSum s = [SmInt 0]
-- SmOperator '$'
smSwap (x1:x2:s) = x2:x1:s
smSwap s = s
-- SmOperator '╖'
smTail (SmList (x:xs):s) = SmList xs:s
smTail (SmList []:s) = s
smTail (SmString (x:xs):s) = SmString xs:s
smTail (SmString []:s) = s
smTail s = s
-- SmOperator '╗'
smTails (SmList xs:s) = SmList (map SmList $ tails xs):s
smTails (SmString xs:s) = SmList (map SmString $ tails xs):s
smTails s = s
-- SmOperator 'c'
smTake (x:SmList xs:s)
| isAtom x = SmList (genericTake (toInt x) xs):s
| otherwise = SmList (takeWhile (isTruthy . evalIfList1 x . (:s)) xs):s
smTake (x:SmString xs:s) = smTake $ x:smToList (SmString xs:s)
smTake s = s
-- SmOperator '*'
smTimes (SmInt x1:SmInt x2:s) = SmInt (x1 * x2):s
smTimes (SmInt x1:SmFloat x2:s) = SmFloat (fromInteger x1 * x2):s
smTimes (SmFloat x1:SmInt x2:s) = SmFloat (x1 * fromInteger x2):s
smTimes (SmFloat x1:SmFloat x2:s) = SmFloat (x1 * x2):s
smTimes (SmChar x1:x2:s) = smTimes $ smToInt (SmChar x1:x2:s)
smTimes (x1:SmChar x2:s) = smTimes (SmChar x2:x1:s)
smTimes (x1:x2:s)
| isAtom x1 && isAtom x2 = smTimes $ smToInt $ x1:smToInt (x2:s)
| otherwise = toListFunction2 smTimes (x1:x2:s)
smTimes [] = [SmInt 1]
smTimes s = s
-- SmOperator 'D'
smToBase (x1:x2:s)
| isAtom x1 && isAtom x2 = SmList (map SmInt $ toBase (abs $ toInt x1) (abs $ toInt x2)):s
| otherwise = toListFunction2 smToBase (x1:x2:s)
smToBase s = s
-- SmOperator '▓'
smToChar (x:s)
| isAtom x = SmChar (toChar x):s
| otherwise = smToString $ toListFunction smToChar (x:s)
smToChar s = s
-- SmOperator '▒'
smToFloat (x:s)
| isAtom x = SmFloat (toFloat x):s
| otherwise = toListFunction smToFloat (x:s)
smToFloat s = s
-- SmOperator '░'
smToInt (x:s)
| isAtom x = SmInt (toInt x):s
| otherwise = toListFunction smToInt (x:s)
smToInt s = s
-- SmOperator '▄'
smToList (x:s) = SmList (toList x):s
smToList s = [SmList []]
-- SmOperator '█'
smToString (x:s) = SmString (toString x):s
smToString s = [SmString ""]
-- SmOperator 't'
smTwice (q:x1:x2:s) = evalIfList1 q (x1:s):evalIfList1 q (x2:s):s
smTwice s = smI s
-- SmOperator '\\'
smUncons (SmList (x:xs):s) = SmList xs:x:s
smUncons (SmString (x:xs):s) = SmString xs:SmChar x:s
smUncons (x:s)
| isAtom x = SmList (map SmInt [1..toInt x]):s
| otherwise = x:s
smUncons s = s
-- SmOperator '{'
smUnstack (x:s) = toList x
smUnstack s = s
-- SmOperator 'x'
smX (q:s) = evalIfList q (q:s)
smX s = s
-- SmOperator 'z'
smZipWith (q:x1:x2:s)
| isAtom x1 = smZipWith (q:smRange0 (x1:x2:s))
| isAtom x2 = smZipWith (q:x1:smRange0 (x2:s))
| otherwise = toListFunction2 (evalIfList q) (x1:x2:s)
smZipWith s = s
|
AlephAlpha/Samau
|
OldSamau/Eval.hs
|
gpl-2.0
| 30,173
| 0
| 19
| 9,677
| 14,228
| 7,141
| 7,087
| 652
| 3
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
{- |
Module : Yi.CompletionTree
License : GPL-2
Maintainer : yi-devel@googlegroups.com
Stability : experimental
Portability : portable
Little helper for completion interfaces.
Intended to be imported qualified:
>import qualified Yi.CompletionTree as CT
-}
module Yi.CompletionTree (
-- * CompletionTree type
CompletionTree (CompletionTree),
-- * Lists
fromList, toList,
-- * Modification
complete, update,
-- * Debugging
pretty,
-- ** Lens
unCompletionTree
) where
import Control.Arrow (first)
import Data.Function (on)
import Data.List (partition, maximumBy, intercalate)
import qualified Data.Map.Strict as M
import Data.Map.Strict (Map)
import Data.Maybe (isJust, fromJust, listToMaybe, catMaybes)
import qualified Data.ListLike as LL
import Data.ListLike (ListLike)
import Lens.Micro.Platform (over, Lens', _2, (.~), (&))
import Data.Binary (Binary)
import Data.Semigroup (Semigroup)
-- | A CompletionTree is a map of partial completions.
--
-- Example:
--
-- fromList ["put","putStr","putStrLn","print","abc"]
--
-- Gives the following tree:
--
-- / \
-- "p" "abc"
-- / \
-- "ut" "rint"
-- / \
-- "Str" ""
-- / \
-- "Ln" ""
--
-- (The empty strings are needed to denote the end of a word)
-- (A CompletionTree is not limited to a binary tree)
newtype CompletionTree a = CompletionTree {_unCompletionTree :: (Map a (CompletionTree a))}
deriving (Semigroup, Monoid, Eq, Binary)
unCompletionTree :: Lens' (CompletionTree a) (Map a (CompletionTree a))
unCompletionTree f ct = (\unCompletionTree' -> ct {_unCompletionTree = unCompletionTree'}) <$>
f (_unCompletionTree ct)
instance (Ord a, Show a, ListLike a i) => Show (CompletionTree a) where
show ct = "fromList " ++ show (toList ct)
-- | This function converts a list of completable elements to a CompletionTree
-- It finds elements that share a common prefix and groups them.
--
-- prop> fromList . toList = id
fromList :: (Ord a, ListLike a i, Eq i) => [a] -> CompletionTree a
fromList [] = mempty
fromList (x:xs)
| x == mempty = over unCompletionTree (M.insert mempty mempty) (fromList xs)
| otherwise = case maximumBy' (compare `on` childrenIn xs) (tail $ LL.inits x) of
Nothing -> over unCompletionTree (M.insert x mempty) (fromList xs)
Just parent -> case first (x:) $ partition (parent `LL.isPrefixOf`) xs of
([_],rest) -> over unCompletionTree (M.insert parent mempty) $ fromList rest
(hasParent, rest) -> over unCompletionTree (M.insert parent (fromList $
map (fromJust . LL.stripPrefix parent) hasParent)) $ fromList rest
-- A parent is the prefix and the children are the items with the parent as prefix
where
childrenIn :: (ListLike a i, Eq i) => [a] -> a -> Int
childrenIn list parent = length $ filter (parent `LL.isPrefixOf`) list
-- | The largest element of a non-empty structure with respect to the
-- given comparison function, Nothing if there are multiple 'largest' elements.
maximumBy' :: Eq a => (a -> a -> Ordering) -> [a] -> Maybe a
maximumBy' cmp l | atleast 2 (== max') l = Nothing
| otherwise = Just max'
where max' = maximumBy cmp l
-- This short-circuits if the condition is met n times before the end of the list.
atleast :: Int -> (a -> Bool) -> [a] -> Bool
atleast 0 _ _ = True
atleast _ _ [] = False
atleast n cmp' (x:xs) | cmp' x = atleast (n - 1) cmp' xs
| otherwise = atleast n cmp' xs
-- | Complete as much as possible without guessing.
--
-- Examples:
--
-- >>> complete $ fromList ["put","putStrLn","putStr"]
-- ("put", fromList ["","Str","StrLn"])
--
-- >>> complete $ fromList ["put","putStr","putStrLn","abc"]
-- ("", fromList ["put","putStr","putStrLn","abc"])
complete :: (Eq i, Ord a, ListLike a i) => CompletionTree a -> (a, CompletionTree a)
complete (CompletionTree ct)
| M.size ct == 1 = if snd (M.elemAt 0 ct) == mempty
then M.elemAt 0 ct & _2 .~ fromList [mempty]
else M.elemAt 0 ct
| otherwise = (mempty,CompletionTree ct)
-- | Update the CompletionTree with new information.
-- An empty list means that there is no completion left.
-- A [mempty] means that the end of a word is reached.
--
-- Examples:
--
-- >>> update (fromList ["put","putStr"]) "p"
-- fromList ["ut","utStr"]
--
-- >>> update (fromList ["put","putStr"]) "put"
-- fromList ["","Str"]
--
-- >>> update (fromList ["put","putStr"]) "putS"
-- fromList ["tr"]
--
-- >>> update (fromList ["put"]) "find"
-- fromList []
--
-- >>> update (fromList ["put"]) "put"
-- fromList [""]
update :: (Ord a, ListLike a i, Eq i) => CompletionTree a -> a -> CompletionTree a
update (CompletionTree ct) p
-- p is empty, this case just doesn't make sense:
| mempty == p = error "Can't update a CompletionTree with a mempty"
-- p is a key in the map ct that doesn't have children:
-- (This means the end of a word is reached)
| isJust one && mempty == fromJust one = CompletionTree $ M.singleton mempty mempty
-- p is a key in the map ct with children:
| isJust one = fromJust one
-- a substring of p is a key in ct:
| isJust remaining = uncurry update $ fromJust remaining
-- p is a substring of a key in ct:
| otherwise = CompletionTree $ M.mapKeys fromJust
$ M.filterWithKey (const . isJust)
$ M.mapKeys (LL.stripPrefix p) ct
where
one = M.lookup p ct
remaining = listToMaybe . catMaybes $
map (\p' -> (,fromJust $ LL.stripPrefix p' p) <$> M.lookup p' ct) (tail $ LL.inits p)
-- | Converts a CompletionTree to a list of completions.
--
-- prop> toList . fromList = sort . nub
--
-- Examples:
--
-- >>> toList mempty
-- []
--
-- >>> toList (fromList ["a"])
-- ["a"]
--
-- >>> toList (fromList ["a","a","a"])
-- ["a"]
--
-- >>> toList (fromList ["z","x","y"])
-- ["x","y","z"]
toList :: (Ord a, ListLike a i) => CompletionTree a -> [a]
toList ct
| mempty == ct = []
| otherwise = toList' ct
where
toList' :: (Ord a, ListLike a i) => CompletionTree a -> [a]
toList' (CompletionTree ct')
| M.null ct' = [mempty]
| otherwise = concat $ M.elems $ M.mapWithKey (\k v -> map (k `LL.append`) $ toList' v) ct'
-- TODO: make this function display a tree and rename to showTree
-- | For debugging purposes.
--
-- Example:
--
-- >>> putStrLn $ pretty $ fromList ["put", "putStr", "putStrLn"]
-- ["put"[""|"Str"[""|"Ln"]]]
pretty :: Show a => CompletionTree a -> String
pretty (CompletionTree ct)
| M.null ct = ""
| otherwise = "[" ++ intercalate "|" (M.elems (M.mapWithKey (\k v -> shows k (pretty v)) ct)) ++ "]"
|
yi-editor/yi
|
yi-core/src/Yi/CompletionTree.hs
|
gpl-2.0
| 7,035
| 0
| 22
| 1,704
| 1,705
| 924
| 781
| 82
| 3
|
module Http where
import Data.Char
import Data.List
import Network.HTTP
import Network.Stream
import System.IO
import Numeric
import Content
import File
import Util
-- http types
type HttpKey = String
type HttpValue = String
data HttpPair = HttpPair {
httpKey :: HttpKey,
httpVal :: HttpValue
}
-- implement serialisable
instance Show HttpPair where
show (HttpPair k v) = (k ++ ": " ++ v)
data HttpVerb =
HttpGet
| HttpPost
| HttpPut
| HttpDelete
data HttpRequest = HttpRequest {
reqVerb :: HttpVerb,
urlString :: String,
query :: String
}
data HttpResponse = HttpResponse {
httpCode :: Integer,
header :: [HttpPair],
content :: StreamTransfer
}
data HttpResponseHandler = HttpResponseHandler {
responseData :: HttpResponse,
responseSource :: StreamSet
}
-- either a content producer, or a link to one
data RouteItem =
RouteLeaf (HttpRequest -> IO HttpResponseHandler)
| RouteNode (String -> Maybe RouteItem)
-- types which can appear as routes
class RouteType r where
routeName :: r -> String
routeKey :: r -> [String]
routeMap :: r -> RouteItem
showRequest :: HttpRequest -> String
showRequest request = ((show (urlSplit request)) ++ "\n" ++ (query request) ++ "\n")
-- matches path to content producer
routeMatch :: RouteItem -> [String] -> Maybe (HttpRequest -> IO HttpResponseHandler)
routeMatch (RouteLeaf c) _ = Just $ c
routeMatch (RouteNode n) [] = case (n "") of
Nothing -> Nothing
Just r -> routeMatch r []
routeMatch (RouteNode n) path = case (n (head path)) of
Nothing -> Nothing
Just r -> routeMatch r (tail path)
-- similiar to route match
requestMatch :: RouteItem -> HttpRequest -> Maybe (IO HttpResponseHandler)
requestMatch routes req =
case (routeMatch routes (urlSplit req)) of
Nothing -> Nothing
Just fn -> Just $ fn req
verbMatch :: String -> Maybe HttpVerb
verbMatch str
| str == "GET" = Just HttpGet
| str == "POST" = Just HttpPost
| str == "PUT" = Just HttpPut
| str == "DELETE" = Just HttpDelete
| otherwise = Nothing
firstHeaderLine :: [String] -> Maybe HttpRequest
firstHeaderLine (verb:path:version:[]) =
case (verbMatch verb) of
Just httpverb -> let (file, query) = break (=='?') path in
Just $ HttpRequest httpverb file query
Nothing -> Nothing
firstHeaderLine _ = Nothing
urlChar :: String -> String
urlChar ('%':xs) = [(hexDigitToChar xs)]
urlChar _ = ""
-- replace symbols in url
urlReplace :: String -> String
urlReplace "" = ""
urlReplace url = let (a, b) = break (=='%') url in
a ++ (urlChar (take 3 b)) ++ (urlReplace (drop 3 b))
urlSplitString :: String -> [String]
urlSplitString str = filterEmpty (wordDelim (=='/') str)
urlSplit :: HttpRequest -> [String]
urlSplit req = urlSplitString (urlReplace (urlString req))
subUrl :: Int -> HttpRequest -> String
subUrl index request = intercalate "/" $ drop 1 (urlSplit request)
-- split path into 2 parts
breakRequest :: HttpRequest -> Int -> (String, String)
breakRequest req index = ((absolutePath (take index str)), (absolutePath (drop index str))) where
str = (urlSplit req)
codeName :: Integer -> String
codeName code =
case code of
200 -> "OK"
400 -> "Bad Request"
404 -> "Not Found"
otherwise -> "Unknown"
-- create the first header response line
makeResponseLine :: Integer -> String
makeResponseLine code = ("HTTP/1.1 " ++ (show code) ++ " " ++ (codeName code))
-- creates a general response from a string of content
generalResponse :: String -> HttpResponse
generalResponse content = streamResponse (createStringTransfer content)
maybeHeaderElement :: (Show t) => String -> Maybe t -> Maybe HttpPair
maybeHeaderElement label mb =
case mb of
Nothing -> Nothing
Just obj -> Just $ HttpPair label (show obj)
streamResponseLength :: StreamTransfer -> Maybe HttpPair
streamResponseLength content =
maybeHeaderElement "Content-Length" (transferLength content)
streamResponseType :: StreamTransfer -> Maybe HttpPair
streamResponseType content =
maybeHeaderElement "Content-Type" (transferMimeType content)
streamResponse :: StreamTransfer -> HttpResponse
streamResponse c = (HttpResponse 200 h c) where
h = mapFnMaybe [streamResponseLength, streamResponseType] c
headerFromPairs :: [(StreamTransfer -> HttpPair)] -> StreamTransfer -> [HttpPair]
headerFromPairs fns str = mapFn fns str
responseHeadString :: HttpResponse -> String
responseHeadString r = ((makeResponseLine (httpCode r)) ++ (intercalate "\n" (map show (header r))) ++ "\n\n")
sendAllResponse :: Handle -> HttpResponse -> IO ()
sendAllResponse hdl response = do
hPutStr hdl (responseHeadString response)
sendAllContent hdl (content response) (16 * 1024)
-- unused
type RequestHandler = Request_String -> IO Response_String
-- unused
req_handler :: Request String -> (String, Bool)
req_handler request = ("test", True)
-- unused
req_handler2 :: Request a -> (String, Bool)
req_handler2 request = ("test", True)
-- unused
readRequest :: HStream a => HandleStream a -> IO String
readRequest stream = do
http_data <- receiveHTTP stream
case http_data of
Left e -> do
return $ show e
Right m -> do
return $ show m
|
Jon0/status
|
src/Http.hs
|
gpl-3.0
| 5,323
| 0
| 13
| 1,124
| 1,710
| 889
| 821
| 131
| 4
|
module SugarScape.Model
( SugAgentState (..)
, SugAgentObservable (..)
, SugEnvCell (..)
, SugAgentMonad
, SugAgentMonadT
, SugEnvironment
, SugAgent
, SugAgentDef
, SugAgentOut
, AgentAgeSpan (..)
, SugarScapeParams (..)
, AgentDistribution (..)
, SugarRegrow (..)
, PolutionFormation (..)
, mkSugarScapeParams
, mkParamsAnimationII_1
, mkParamsAnimationII_2
, mkParamsAnimationII_3
, mkParamsAnimationII_4
, mkParamsAnimationII_6
, mkParamsAnimationII_7
, mkParamsAnimationII_8
, mkParamsTerracing
, mkParamsCarryingCapacity
, mkParamsWealthDistr
, maxSugarCapacityCell
, sugarscapeDimensions
, sugarEnvSpec
) where
import Control.Monad.Random
import SugarScape.AgentMonad
import SugarScape.Discrete
------------------------------------------------------------------------------------------------------------------------
-- AGENT-DEFINITIONS
------------------------------------------------------------------------------------------------------------------------
data SugAgentState = SugAgentState
{ sugAgCoord :: Discrete2dCoord
, sugAgSugarMetab :: Int -- integer because discrete, otherwise no exact replication possible
, sugAgVision :: Int
, sugAgSugarLevel :: Double -- floating point because regrow-rate can be set to floating point values
, sugAgAge :: Int
, sugAgMaxAge :: Maybe Int
} deriving (Show, Eq)
data SugAgentObservable = SugAgentObservable
{ sugObsCoord :: Discrete2dCoord
, sugObsVision :: Int
, sugObsAge :: Int
, sugObsSugLvl :: Double
, sugObsSugMetab :: Int
} deriving (Show, Eq)
data SugEnvCell = SugEnvCell
{ sugEnvCellSugarCapacity :: Double
, sugEnvCellSugarLevel :: Double
, sugEnvCellPolutionLevel :: Double
, sugEnvCellOccupier :: Maybe AgentId
} deriving (Show, Eq)
type SugEnvironment = Discrete2d SugEnvCell
type SugAgentMonad g = Rand g
type SugAgentMonadT g = AgentT (Rand g)
type SugAgent g = Agent (SugAgentMonad g) SugAgentObservable SugEnvironment
type SugAgentDef g = AgentDef (SugAgentMonad g) SugAgentObservable SugEnvironment
type SugAgentOut g = AgentOut (SugAgentMonad g) SugAgentObservable SugEnvironment
------------------------------------------------------------------------------------------------------------------------
------------------------------------------------------------------------------------------------------------------------
-- SUGARSCAPE PARAMETERS
------------------------------------------------------------------------------------------------------------------------
maxSugarCapacityCell :: Int
maxSugarCapacityCell = 4
-- the sugarscape is 51x51 in our implementation
sugarscapeDimensions :: Discrete2dCoord
sugarscapeDimensions = (51, 51)
-- taken from Iain Weaver Sugarscape implementation
-- https://www2.le.ac.uk/departments/interdisciplinary-science/research/replicating-sugarscape
-- http://ccl.northwestern.edu/netlogo/models/community/
sugarEnvSpec :: [String]
sugarEnvSpec =
[ "111111111111111111111111111112222222222111111111111"
, "111111111111111111111111111222222222222222111111111"
, "111111111111111111111111112222222222222222221111111"
, "111111111111111111111111122222222222222222222211111"
, "111111111111111111111111222222222222222222222221111"
, "111110000000111111111111222222222223332222222222111"
, "111110000000001111111111222222223333333332222222111"
, "111110000000000111111112222222333333333333222222211"
, "111110000000000111111112222223333333333333322222211"
, "111110000000000011111112222223333333333333332222221"
, "111110000000000011111122222233333344444333333222221"
, "111110000000000111111122222233333444444433333222221"
, "111111000000000111111122222333334444444443333222222"
, "111111000000001111111222222333334444444443333322222"
, "111111100000011111111222222333334444444443333322222"
, "111111111001111111111222222333334444444443333322222"
, "111111111111111111111222222333334444444443333222222"
, "111111111111111111112222222333333444444433333222222"
, "111111111111111111112222222233333344444333333222222"
, "111111111111111111122222222233333333333333332222222"
, "111111111111111112222222222223333333333333332222222"
, "111111111111122222222222222223333333333333322222222"
, "111111111122222222222222222222233333333332222222221"
, "111111122222222222222222222222222333333222222222221"
, "111122222222222222222222222222222222222222222222211"
, "111222222222222222222222222222222222222222222222111"
, "112222222222222222222222222222222222222222222221111"
, "122222222222333333222222222222222222222222221111111"
, "122222222233333333332222222222222222222221111111111"
, "222222223333333333333322222222222222221111111111111"
, "222222233333333333333322222222222211111111111111111"
, "222222233333333333333332222222221111111111111111111"
, "222222333333444443333332222222211111111111111111111"
, "222222333334444444333333222222211111111111111111111"
, "222222333344444444433333222222111111111111111111111"
, "222223333344444444433333222222111111111100111111111"
, "222223333344444444433333222222111111110000001111111"
, "222223333344444444433333222222111111100000000111111"
, "222222333344444444433333222221111111000000000111111"
, "122222333334444444333332222221111111000000000011111"
, "122222333333444443333332222221111110000000000011111"
, "122222233333333333333322222211111110000000000011111"
, "112222223333333333333322222211111111000000000011111"
, "112222222333333333333222222211111111000000000011111"
, "111222222233333333322222222111111111100000000011111"
, "111222222222233322222222222111111111111000000011111"
, "111122222222222222222222222111111111111111111111111"
, "111112222222222222222222221111111111111111111111111"
, "111111122222222222222222211111111111111111111111111"
, "111111111222222222222222111111111111111111111111111"
, "111111111111222222222211111111111111111111111111111"
]
data AgentAgeSpan = Forever
| Range Int Int deriving (Show, Eq)
data AgentDistribution = Scatter
| Corner Discrete2dCoord deriving (Show, Eq)
data SugarRegrow = Immediate
| Rate Double
| Season Double Double Int deriving (Show, Eq)
data PolutionFormation = NoPolution
| Polute Double Double deriving (Show, Eq)
data SugarScapeParams = SugarScapeParams
{ sgAgentCount :: Int
, sgAgentDistribution :: AgentDistribution
, spSugarRegrow :: SugarRegrow -- negative value means G_inf: regrow to max in next step, floating point to allow grow-back of less than 1
, spSugarEndowmentRange :: (Int, Int)
, spSugarMetabolismRange :: (Int, Int)
, spVisionRange :: (Int, Int)
, spReplaceAgents :: Bool -- replacement rule R_[a, b] on/off
, spAgeSpan :: AgentAgeSpan
, spPolutionFormation :: PolutionFormation
, spPolutionDiffusion :: Maybe Int
}
mkSugarScapeParams :: SugarScapeParams
mkSugarScapeParams = SugarScapeParams {
sgAgentCount = 0
, spSugarRegrow = Immediate
, sgAgentDistribution = Scatter
, spSugarEndowmentRange = (0, 0)
, spSugarMetabolismRange = (0, 0)
, spVisionRange = (0, 0)
, spReplaceAgents = False
, spAgeSpan = Forever
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
------------------------------------------------------------------------------------------------------------------------
-- CHAPTER II: Life And Death On The Sugarscape
------------------------------------------------------------------------------------------------------------------------
-- Social Evolution with immediate regrow, page 27
mkParamsAnimationII_1 :: SugarScapeParams
mkParamsAnimationII_1 = SugarScapeParams {
sgAgentCount = 400 -- page 28
, sgAgentDistribution = Scatter
, spSugarRegrow = Immediate -- regrow to max immediately
, spSugarEndowmentRange = (5, 25) -- NOTE: this is specified in book page 33 where the initial endowments are set to 5-25
, spSugarMetabolismRange = (1, 4) -- NOTE: specified where? 1 - 4
, spVisionRange = (1, 6) -- NOTE: set to 1-6 on page 24
, spReplaceAgents = False -- no replacing of died agents
, spAgeSpan = Forever -- agents dont die of age in this case
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
-- terracing phenomenon as described on page 28
mkParamsTerracing :: SugarScapeParams
mkParamsTerracing = mkParamsAnimationII_1
-- Social Evolution with regrow rate of 1, page 29
mkParamsAnimationII_2 :: SugarScapeParams
mkParamsAnimationII_2 = SugarScapeParams {
sgAgentCount = 400 -- page 28
, sgAgentDistribution = Scatter
, spSugarRegrow = Rate 1 -- regrow by 1 unit per step
, spSugarEndowmentRange = (5, 25) -- NOTE: this is specified in book page 33 where the initial endowments are set to 5-25
, spSugarMetabolismRange = (1, 4)
, spVisionRange = (1, 6)
, spReplaceAgents = False -- no replacing of died agents
, spAgeSpan = Forever -- agents dont die of age in this case
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
-- carrying capacity property as described on page 30
mkParamsCarryingCapacity :: SugarScapeParams
mkParamsCarryingCapacity = mkParamsAnimationII_2
-- Wealth Distribution page 34
mkParamsAnimationII_3 :: SugarScapeParams
mkParamsAnimationII_3 = SugarScapeParams {
sgAgentCount = 250 -- page 33
, sgAgentDistribution = Scatter
, spSugarRegrow = Rate 1 -- page 33
, spSugarEndowmentRange = (5, 25) -- page 33
, spSugarMetabolismRange = (1, 4)
, spVisionRange = (1, 6)
, spReplaceAgents = True -- page 33
, spAgeSpan = Range 60 100 -- page 33
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
-- wealth distribution as described on page 32-37
mkParamsAnimationII_4 :: SugarScapeParams
mkParamsAnimationII_4 = mkParamsAnimationII_3 -- same as G_1, M, R_60,100 => same as Animiation II-3
-- wealth distribution as described on page 32-37
mkParamsWealthDistr :: SugarScapeParams
mkParamsWealthDistr = mkParamsAnimationII_3 -- same as G_1, M, R_60,100 => same as Animiation II-3
-- Migration as described on page 42 and 43 in Animation II-6
mkParamsAnimationII_6 :: SugarScapeParams
mkParamsAnimationII_6 = SugarScapeParams {
sgAgentCount = 300 -- 300 otherwise no waves, see https://www2.le.ac.uk/departments/interdisciplinary-science/research/replicating-sugarscape
, sgAgentDistribution = Corner (20, 20)
, spSugarRegrow = Rate 0.5 -- 0.5 otherwise no waves, see https://www2.le.ac.uk/departments/interdisciplinary-science/research/replicating-sugarscape
, spSugarEndowmentRange = (5, 25)
, spSugarMetabolismRange = (1, 4)
, spVisionRange = (1, 10) -- increase vision to 10, see page 42, we suggest to to 15 to make the waves really prominent
, spReplaceAgents = False -- agents in migration experiment are not replaced
, spAgeSpan = Forever -- agents in Migration experiment do not die of age
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
-- Seasonal Migration as described on page 44 and 45 in Animation II-7
mkParamsAnimationII_7 :: SugarScapeParams
mkParamsAnimationII_7 = SugarScapeParams {
sgAgentCount = 400
, sgAgentDistribution = Scatter
, spSugarRegrow = Season 1 8 50
, spSugarEndowmentRange = (5, 25)
, spSugarMetabolismRange = (1, 4)
, spVisionRange = (1, 6)
, spReplaceAgents = False
, spAgeSpan = Forever
, spPolutionFormation = NoPolution
, spPolutionDiffusion = Nothing
}
-- Polution as described on page 45 to 50 in Animation II-8
mkParamsAnimationII_8 :: SugarScapeParams
mkParamsAnimationII_8 = SugarScapeParams {
sgAgentCount = 400
, sgAgentDistribution = Scatter
, spSugarRegrow = Rate 1
, spSugarEndowmentRange = (5, 25)
, spSugarMetabolismRange = (1, 4)
, spVisionRange = (1, 6)
, spReplaceAgents = False
, spAgeSpan = Forever
, spPolutionFormation = Polute 1 1
, spPolutionDiffusion = Just 1
}
------------------------------------------------------------------------------------------------------------------------
|
thalerjonathan/phd
|
public/towards/SugarScape/experimental/chapter2_environment/src/SugarScape/Model.hs
|
gpl-3.0
| 12,822
| 0
| 9
| 2,480
| 1,542
| 993
| 549
| 228
| 1
|
{-# LANGUAGE UnicodeSyntax #-}
--
-- Utility functions to process XmlNode's directly, i.e. outside of XmlArrow.
--
module XmlNodeUtil where
import Prelude.Unicode
import Control.Arrow
import Data.AssocList
import Data.Maybe
import qualified Text.XML.HXT.DOM.XmlNode as XN
type LocalName = String
localPart ∷ XN.XmlNode ξ ⇒ ξ → LocalName
localPart = fromMaybe "" ∘ XN.getLocalPart
text ∷ XN.XmlNode ξ ⇒ ξ → String
text = fromMaybe "" ∘ XN.getText
attributes ∷ XN.XmlNode ξ ⇒ ξ → AssocList LocalName String
attributes = map (localPart &&& value) ∘ fromMaybe [] ∘ XN.getAttrl
where
value = headDef "" ∘ map text ∘ XN.getChildren
headDef x [] = x -- TODO use Safe package instead
headDef x xs = head xs
attribute ∷ XN.XmlNode ξ ⇒ LocalName → ξ → String
attribute name = lookup1 name ∘ attributes
hasName ∷ XN.XmlNode ξ ⇒ LocalName → ξ → Bool
hasName name = (≡name) ∘ localPart
|
c0c0n3/hAppYard
|
inkscape-util/src/shared/XmlNodeUtil.hs
|
gpl-3.0
| 1,004
| 0
| 9
| 215
| 286
| 152
| 134
| 21
| 2
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.DescribeDomain
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns information about the specified domain, including description and
-- status.
--
-- Access Control
--
-- You can use IAM policies to control this action's access to Amazon SWF
-- resources as follows:
--
-- Use a 'Resource' element with the domain name to limit the action to only
-- specified domains. Use an 'Action' element to allow or deny permission to call
-- this action. You cannot use an IAM policy to constrain this action's
-- parameters. If the caller does not have sufficient permissions to invoke the
-- action, or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute's cause parameter will be set to
-- OPERATION_NOT_PERMITTED. For details and example IAM policies, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAMto Manage Access to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_DescribeDomain.html>
module Network.AWS.SWF.DescribeDomain
(
-- * Request
DescribeDomain
-- ** Request constructor
, describeDomain
-- ** Request lenses
, ddName
-- * Response
, DescribeDomainResponse
-- ** Response constructor
, describeDomainResponse
-- ** Response lenses
, ddrConfiguration
, ddrDomainInfo
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
newtype DescribeDomain = DescribeDomain
{ _ddName :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DescribeDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddName' @::@ 'Text'
--
describeDomain :: Text -- ^ 'ddName'
-> DescribeDomain
describeDomain p1 = DescribeDomain
{ _ddName = p1
}
-- | The name of the domain to describe.
ddName :: Lens' DescribeDomain Text
ddName = lens _ddName (\s a -> s { _ddName = a })
data DescribeDomainResponse = DescribeDomainResponse
{ _ddrConfiguration :: DomainConfiguration
, _ddrDomainInfo :: DomainInfo
} deriving (Eq, Read, Show)
-- | 'DescribeDomainResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddrConfiguration' @::@ 'DomainConfiguration'
--
-- * 'ddrDomainInfo' @::@ 'DomainInfo'
--
describeDomainResponse :: DomainInfo -- ^ 'ddrDomainInfo'
-> DomainConfiguration -- ^ 'ddrConfiguration'
-> DescribeDomainResponse
describeDomainResponse p1 p2 = DescribeDomainResponse
{ _ddrDomainInfo = p1
, _ddrConfiguration = p2
}
ddrConfiguration :: Lens' DescribeDomainResponse DomainConfiguration
ddrConfiguration = lens _ddrConfiguration (\s a -> s { _ddrConfiguration = a })
ddrDomainInfo :: Lens' DescribeDomainResponse DomainInfo
ddrDomainInfo = lens _ddrDomainInfo (\s a -> s { _ddrDomainInfo = a })
instance ToPath DescribeDomain where
toPath = const "/"
instance ToQuery DescribeDomain where
toQuery = const mempty
instance ToHeaders DescribeDomain
instance ToJSON DescribeDomain where
toJSON DescribeDomain{..} = object
[ "name" .= _ddName
]
instance AWSRequest DescribeDomain where
type Sv DescribeDomain = SWF
type Rs DescribeDomain = DescribeDomainResponse
request = post "DescribeDomain"
response = jsonResponse
instance FromJSON DescribeDomainResponse where
parseJSON = withObject "DescribeDomainResponse" $ \o -> DescribeDomainResponse
<$> o .: "configuration"
<*> o .: "domainInfo"
|
dysinger/amazonka
|
amazonka-swf/gen/Network/AWS/SWF/DescribeDomain.hs
|
mpl-2.0
| 4,604
| 0
| 11
| 985
| 522
| 320
| 202
| 63
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSenseHost.CustomChannels.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Update a custom channel in the host AdSense account. This method
-- supports patch semantics.
--
-- /See:/ <https://developers.google.com/adsense/host/ AdSense Host API Reference> for @adsensehost.customchannels.patch@.
module Network.Google.Resource.AdSenseHost.CustomChannels.Patch
(
-- * REST Resource
CustomChannelsPatchResource
-- * Creating a Request
, customChannelsPatch
, CustomChannelsPatch
-- * Request Lenses
, ccpCustomChannelId
, ccpPayload
, ccpAdClientId
) where
import Network.Google.AdSenseHost.Types
import Network.Google.Prelude
-- | A resource alias for @adsensehost.customchannels.patch@ method which the
-- 'CustomChannelsPatch' request conforms to.
type CustomChannelsPatchResource =
"adsensehost" :>
"v4.1" :>
"adclients" :>
Capture "adClientId" Text :>
"customchannels" :>
QueryParam "customChannelId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CustomChannel :>
Patch '[JSON] CustomChannel
-- | Update a custom channel in the host AdSense account. This method
-- supports patch semantics.
--
-- /See:/ 'customChannelsPatch' smart constructor.
data CustomChannelsPatch =
CustomChannelsPatch'
{ _ccpCustomChannelId :: !Text
, _ccpPayload :: !CustomChannel
, _ccpAdClientId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomChannelsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccpCustomChannelId'
--
-- * 'ccpPayload'
--
-- * 'ccpAdClientId'
customChannelsPatch
:: Text -- ^ 'ccpCustomChannelId'
-> CustomChannel -- ^ 'ccpPayload'
-> Text -- ^ 'ccpAdClientId'
-> CustomChannelsPatch
customChannelsPatch pCcpCustomChannelId_ pCcpPayload_ pCcpAdClientId_ =
CustomChannelsPatch'
{ _ccpCustomChannelId = pCcpCustomChannelId_
, _ccpPayload = pCcpPayload_
, _ccpAdClientId = pCcpAdClientId_
}
-- | Custom channel to get.
ccpCustomChannelId :: Lens' CustomChannelsPatch Text
ccpCustomChannelId
= lens _ccpCustomChannelId
(\ s a -> s{_ccpCustomChannelId = a})
-- | Multipart request metadata.
ccpPayload :: Lens' CustomChannelsPatch CustomChannel
ccpPayload
= lens _ccpPayload (\ s a -> s{_ccpPayload = a})
-- | Ad client in which the custom channel will be updated.
ccpAdClientId :: Lens' CustomChannelsPatch Text
ccpAdClientId
= lens _ccpAdClientId
(\ s a -> s{_ccpAdClientId = a})
instance GoogleRequest CustomChannelsPatch where
type Rs CustomChannelsPatch = CustomChannel
type Scopes CustomChannelsPatch =
'["https://www.googleapis.com/auth/adsensehost"]
requestClient CustomChannelsPatch'{..}
= go _ccpAdClientId (Just _ccpCustomChannelId)
(Just AltJSON)
_ccpPayload
adSenseHostService
where go
= buildClient
(Proxy :: Proxy CustomChannelsPatchResource)
mempty
|
brendanhay/gogol
|
gogol-adsense-host/gen/Network/Google/Resource/AdSenseHost/CustomChannels/Patch.hs
|
mpl-2.0
| 3,913
| 0
| 15
| 883
| 469
| 280
| 189
| 78
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Bidders.FilterSets.NonBillableWinningBids.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all reasons for which winning bids were not billable, with the
-- number of bids not billed for each reason.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.bidders.filterSets.nonBillableWinningBids.list@.
module Network.Google.Resource.AdExchangeBuyer2.Bidders.FilterSets.NonBillableWinningBids.List
(
-- * REST Resource
BiddersFilterSetsNonBillableWinningBidsListResource
-- * Creating a Request
, biddersFilterSetsNonBillableWinningBidsList
, BiddersFilterSetsNonBillableWinningBidsList
-- * Request Lenses
, bfsnbwblXgafv
, bfsnbwblUploadProtocol
, bfsnbwblFilterSetName
, bfsnbwblAccessToken
, bfsnbwblUploadType
, bfsnbwblPageToken
, bfsnbwblPageSize
, bfsnbwblCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.bidders.filterSets.nonBillableWinningBids.list@ method which the
-- 'BiddersFilterSetsNonBillableWinningBidsList' request conforms to.
type BiddersFilterSetsNonBillableWinningBidsListResource
=
"v2beta1" :>
Capture "filterSetName" Text :>
"nonBillableWinningBids" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListNonBillableWinningBidsResponse
-- | List all reasons for which winning bids were not billable, with the
-- number of bids not billed for each reason.
--
-- /See:/ 'biddersFilterSetsNonBillableWinningBidsList' smart constructor.
data BiddersFilterSetsNonBillableWinningBidsList =
BiddersFilterSetsNonBillableWinningBidsList'
{ _bfsnbwblXgafv :: !(Maybe Xgafv)
, _bfsnbwblUploadProtocol :: !(Maybe Text)
, _bfsnbwblFilterSetName :: !Text
, _bfsnbwblAccessToken :: !(Maybe Text)
, _bfsnbwblUploadType :: !(Maybe Text)
, _bfsnbwblPageToken :: !(Maybe Text)
, _bfsnbwblPageSize :: !(Maybe (Textual Int32))
, _bfsnbwblCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BiddersFilterSetsNonBillableWinningBidsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bfsnbwblXgafv'
--
-- * 'bfsnbwblUploadProtocol'
--
-- * 'bfsnbwblFilterSetName'
--
-- * 'bfsnbwblAccessToken'
--
-- * 'bfsnbwblUploadType'
--
-- * 'bfsnbwblPageToken'
--
-- * 'bfsnbwblPageSize'
--
-- * 'bfsnbwblCallback'
biddersFilterSetsNonBillableWinningBidsList
:: Text -- ^ 'bfsnbwblFilterSetName'
-> BiddersFilterSetsNonBillableWinningBidsList
biddersFilterSetsNonBillableWinningBidsList pBfsnbwblFilterSetName_ =
BiddersFilterSetsNonBillableWinningBidsList'
{ _bfsnbwblXgafv = Nothing
, _bfsnbwblUploadProtocol = Nothing
, _bfsnbwblFilterSetName = pBfsnbwblFilterSetName_
, _bfsnbwblAccessToken = Nothing
, _bfsnbwblUploadType = Nothing
, _bfsnbwblPageToken = Nothing
, _bfsnbwblPageSize = Nothing
, _bfsnbwblCallback = Nothing
}
-- | V1 error format.
bfsnbwblXgafv :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Xgafv)
bfsnbwblXgafv
= lens _bfsnbwblXgafv
(\ s a -> s{_bfsnbwblXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
bfsnbwblUploadProtocol :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Text)
bfsnbwblUploadProtocol
= lens _bfsnbwblUploadProtocol
(\ s a -> s{_bfsnbwblUploadProtocol = a})
-- | Name of the filter set that should be applied to the requested metrics.
-- For example: - For a bidder-level filter set for bidder 123:
-- \`bidders\/123\/filterSets\/abc\` - For an account-level filter set for
-- the buyer account representing bidder 123:
-- \`bidders\/123\/accounts\/123\/filterSets\/abc\` - For an account-level
-- filter set for the child seat buyer account 456 whose bidder is 123:
-- \`bidders\/123\/accounts\/456\/filterSets\/abc\`
bfsnbwblFilterSetName :: Lens' BiddersFilterSetsNonBillableWinningBidsList Text
bfsnbwblFilterSetName
= lens _bfsnbwblFilterSetName
(\ s a -> s{_bfsnbwblFilterSetName = a})
-- | OAuth access token.
bfsnbwblAccessToken :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Text)
bfsnbwblAccessToken
= lens _bfsnbwblAccessToken
(\ s a -> s{_bfsnbwblAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
bfsnbwblUploadType :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Text)
bfsnbwblUploadType
= lens _bfsnbwblUploadType
(\ s a -> s{_bfsnbwblUploadType = a})
-- | A token identifying a page of results the server should return.
-- Typically, this is the value of
-- ListNonBillableWinningBidsResponse.nextPageToken returned from the
-- previous call to the nonBillableWinningBids.list method.
bfsnbwblPageToken :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Text)
bfsnbwblPageToken
= lens _bfsnbwblPageToken
(\ s a -> s{_bfsnbwblPageToken = a})
-- | Requested page size. The server may return fewer results than requested.
-- If unspecified, the server will pick an appropriate default.
bfsnbwblPageSize :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Int32)
bfsnbwblPageSize
= lens _bfsnbwblPageSize
(\ s a -> s{_bfsnbwblPageSize = a})
. mapping _Coerce
-- | JSONP
bfsnbwblCallback :: Lens' BiddersFilterSetsNonBillableWinningBidsList (Maybe Text)
bfsnbwblCallback
= lens _bfsnbwblCallback
(\ s a -> s{_bfsnbwblCallback = a})
instance GoogleRequest
BiddersFilterSetsNonBillableWinningBidsList
where
type Rs BiddersFilterSetsNonBillableWinningBidsList =
ListNonBillableWinningBidsResponse
type Scopes
BiddersFilterSetsNonBillableWinningBidsList
=
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient
BiddersFilterSetsNonBillableWinningBidsList'{..}
= go _bfsnbwblFilterSetName _bfsnbwblXgafv
_bfsnbwblUploadProtocol
_bfsnbwblAccessToken
_bfsnbwblUploadType
_bfsnbwblPageToken
_bfsnbwblPageSize
_bfsnbwblCallback
(Just AltJSON)
adExchangeBuyer2Service
where go
= buildClient
(Proxy ::
Proxy
BiddersFilterSetsNonBillableWinningBidsListResource)
mempty
|
brendanhay/gogol
|
gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Bidders/FilterSets/NonBillableWinningBids/List.hs
|
mpl-2.0
| 7,696
| 0
| 18
| 1,585
| 891
| 520
| 371
| 137
| 1
|
module Forms
( newTaskForm
, editTaskForm
, reorderTaskForm
, newNoteForm
) where
import Import
import Util (fieldListOptions)
import Control.Monad (unless)
newTaskForm :: Bool -> Form NewTask
newTaskForm includeNonDaily = renderBootstrap $ NewTask
<$> areq textField titleSettings Nothing
<*> areq (radioFieldList' scheduleListOptions) scheduleSettings (pure Once)
<*> pure Nothing
where
titleSettings = fieldSettingsWithAttrs "Title" [("placeholder", "Add a task")]
scheduleListOptions | includeNonDaily = fieldListOptions
| otherwise = filter (not . nonDaily . snd) fieldListOptions
scheduleSettings = fieldSettingsWithAttrs "Schedule" [("class", "inline")]
editTaskForm :: Form TaskEdit
editTaskForm = renderDivs $ TaskTitleEdit <$> areq textField "Title" Nothing
reorderTaskForm :: Form TaskEdit
reorderTaskForm = renderDivs $ TaskOrderEdit <$> areq intField "Delta" Nothing
newNoteForm :: Form NewNote
newNoteForm = renderDivs $ NewNote <$> unTextarea <$> areq textareaField bodySettings Nothing
where
bodySettings = fieldSettingsWithAttrs "Body" [("placeholder", "Add a note")]
fieldSettingsWithAttrs :: SomeMessage master -> [(Text, Text)] -> FieldSettings master
fieldSettingsWithAttrs s = FieldSettings s Nothing Nothing Nothing
-- backported from yesod 28e0bf8 (yesod-form 1.1.0.1)
radioFieldList' :: (Eq a, RenderMessage master FormMessage, RenderMessage master msg) => [(msg, a)] -> Field sub master a
radioFieldList' = radioField' . optionsPairs
radioField' :: (Eq a, RenderMessage master FormMessage) => GHandler sub master (OptionList a) -> Field sub master a
radioField' = selectFieldHelper
(\theId _name inside -> [whamlet|
<div ##{theId}>^{inside}
|])
(\theId name isSel -> [whamlet|
<label .radio for=#{theId}-none>
<div>
<input id=#{theId}-none type=radio name=#{name} value=none :isSel:checked>
_{MsgSelectNone}
|])
(\theId name attrs value isSel text -> [whamlet|
<label .radio for=#{theId}-#{value}>
<div>
<input id=#{theId}-#{value} type=radio name=#{name} value=#{value} :isSel:checked *{attrs}>
\#{text}
|])
selectFieldHelper
:: (Eq a, RenderMessage master FormMessage)
=> (Text -> Text -> GWidget sub master () -> GWidget sub master ())
-> (Text -> Text -> Bool -> GWidget sub master ())
-> (Text -> Text -> [(Text, Text)] -> Text -> Bool -> Text -> GWidget sub master ())
-> GHandler sub master (OptionList a) -> Field sub master a
selectFieldHelper outside onOpt inside opts' = Field
{ fieldParse = \x -> do
opts <- opts'
return $ selectParser opts x
, fieldView = \theId name attrs val isReq -> do
opts <- fmap olOptions $ lift opts'
outside theId name $ do
unless isReq $ onOpt theId name $ not $ render opts val `elem` map optionExternalValue opts
flip mapM_ opts $ \opt -> inside
theId
name
attrs
(optionExternalValue opt)
((render opts val) == optionExternalValue opt)
(optionDisplay opt)
}
where
render _ (Left _) = ""
render opts (Right a) = maybe "" optionExternalValue $ listToMaybe $ filter ((== a) . optionInternalValue) opts
selectParser _ [] = Right Nothing
selectParser opts (s:_) = case s of
"" -> Right Nothing
"none" -> Right Nothing
x -> case olReadExternal opts x of
Nothing -> Left $ SomeMessage $ MsgInvalidEntry x
Just y -> Right $ Just y
|
samstokes/yesodoro-reboot
|
Forms.hs
|
bsd-2-clause
| 3,625
| 0
| 19
| 887
| 1,024
| 527
| 497
| -1
| -1
|
{-# LANGUAGE RecordWildCards #-}
module NLP.EarleyFacile where
import Prelude hiding (init)
import Control.Monad (void, forM_, when, msum)
import qualified Control.Monad.RWS.Strict as RWS
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Class (lift)
import Options.Applicative hiding (some)
import System.IO (hFlush, stdout)
-- import System.Environment (getProgName)
import qualified Data.Tree as R
import Data.Maybe (maybeToList)
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import qualified Pipes as P
import qualified NLP.EarleyFacile.GrammarParser as G
--------------------------------------------------
-- Item
--------------------------------------------------
-- | Position in the input sentence.
type Pos = Int
-- | Item's ID type.
type ID = Int
-- | Chart item based on some CFG rule.
data Item n t = Item
{ ihead :: n
-- ^ Head of the underlying rule.
, left :: [Either n t]
-- ^ The part of the body of the rule on the left of the dot.
, right :: [Either n t]
-- ^ The part on the right of the dot.
, beg :: Pos
-- ^ Where the span begins.
, end :: Pos
-- ^ Where the span ends.
} deriving (Show, Eq, Ord)
-- | Deconstruct the right part of the item (i.e. non-terminals
-- still to process).
expects
:: Item n t
-> Maybe (Either n t, [Either n t])
expects = decoList . right
-- | Is it a passive (with the dot at the end) item?
passive :: Item n t -> Bool
passive = null .right
-- | Is it an active item?
active :: Item n t -> Bool
active = not . passive
-- | Print the item to stdout.
printItem
:: (Ord n, Ord t, Show n, Show t)
=> Item n t -> IO ()
printItem Item{..} = do
putStr "("
putStr (show ihead)
putStr " -> "
putStr (unwords . map showLab $ reverse left)
putStr " * "
putStr (unwords . map showLab $ right)
putStr ", "
putStr (show beg)
putStr ", "
putStr (show end)
putStr ")"
--------------------------------------------------
-- Traversal
--------------------------------------------------
-- | Traversal represents an action of infering a new item on the
-- basis of zero, one or two other chart items.
-- It can be seen as an application of one of the inference rules
-- specifying the parsing algorithm.
data Trav n t
= Scan
{ _scanFrom :: Item n t
-- ^ The input active state
}
| Comp
{ _actArg :: Item n t
-- ^ The active argument of the action
, _pasArg :: Item n t
-- ^ The passive argument of the action
}
| Pred
-- ^ Predicted the item (we don't really care how).
deriving (Show, Eq, Ord)
-- | Print the item to stdout.
printTrav
:: (Ord n, Ord t, Show n, Show t)
=> Trav n t -> IO ()
printTrav (Scan q) = do
putStr "[S] "
printItem q
putStr ""
printTrav (Comp q p) = do
putStr "[C] "
printItem q
putStr " + "
printItem p
printTrav Pred = do
putStr "[P]"
--------------------------------------------------
-- Traversal set
--------------------------------------------------
-- | Traversal set preserves information about the traversals
-- leading to the given chart item.
type TravSet n t = S.Set (Trav n t)
-- -- | Join two traversal sets.
-- joinTravSet
-- :: (Ord n, Ord t)
-- => (ID, TravSet n t)
-- -> (ID, TravSet n t)
-- -> (ID, TravSet n t)
-- joinTravSet (x1, x2) (y1, y2) = (max x1 y1, S.union x2 y2)
--------------------------------------------------
-- Earley monad
--------------------------------------------------
-- | A hypergraph dynamically constructed during parsing.
-- Plus some static information.
data Hype n t = Hype
{ gram :: M.Map n (S.Set [Either n t])
-- ^ The set of grammar rules
, done :: M.Map (Item n t) (TravSet n t)
-- ^ The set of *processed* chart items.
, queue :: M.Map (Item n t) (TravSet n t)
-- ^ The set of *waiting* chart items.
, idMap :: M.Map ID (Item n t)
-- ^ The map from IDs to items.
}
-- -- | Print the chart/hypergraph.
-- printHype
-- :: (Ord n, Ord t, Show n, Show t)
-- => Hype n t
-- -> IO ()
-- printHype Hype{..} = do
-- forM_ (M.toList idMap) $ \(i, q) -> do
-- putStr "[" >> putStr (show i) >> putStr "] "
-- printItem q
-- putStrLn ""
-- | Print single column of the chart/hypergraph.
printColumn
:: (Ord n, Ord t, Show n, Show t)
=> Pos -- ^ Which column?
-> Bool -- ^ Verbose?
-> Hype n t
-> IO ()
printColumn k verbose hype@Hype{..} = do
forM_ (M.toList idMap) $ \(i, q) -> when (end q == k) $ do
if M.member q done
then do
putStr "#"
putStr (show i)
putStr "# "
else do
putStr "<"
putStr (show i)
putStr "> "
printItem q
putStrLn ""
when verbose $ do
forM_ (travList hype q) $ \t -> when (t /= Pred) $ do
putStr " * " >> printTrav t >> putStrLn ""
-- | Earley parser monad. Contains the input sentence (reader)
-- and the state of the computation `Hype'.
type Earley n t = RWS.RWST [S.Set t] () (Hype n t) IO
-- | Read word from the given position of the input.
readInput :: Pos -> P.ListT (Earley n t) (S.Set t)
readInput i = do
-- ask for the input
xs <- RWS.ask
-- just a safe way to retrieve the i-th element
each . take 1 . drop i $ xs
-- | List all rules with the given head non-terminal.
withHead :: (Ord n) => n -> P.ListT (Earley n t) [Either n t]
withHead x = do
g <- RWS.gets gram
each . maybe [] S.toList $ M.lookup x g
-- | Processed items which expect the given symbol and end on the
-- given position.
expectEnd :: (Ord n, Ord t) => n -> Pos -> P.ListT (Earley n t) (Item n t)
expectEnd x i = do
m <- RWS.gets done
q <- each (M.keys m)
(Left nonTerm, _) <- some (expects q)
RWS.guard (nonTerm == x && end q == i)
return q
-- | Processed, passive items which provide the given symbol and
-- begin on the given position.
doneBeg :: (Ord n, Ord t) => n -> Pos -> P.ListT (Earley n t) (Item n t)
doneBeg x i = do
m <- RWS.gets done
p <- each (M.keys m)
RWS.guard (passive p)
RWS.guard (ihead p == x && beg p == i)
return p
-- | Check if the given item is "done" (processed).
isDone :: (Ord n, Ord t) => Item n t -> Earley n t Bool
isDone q = M.member q <$> RWS.gets done
-- | Check if the given item is waiting.
isWait :: (Ord n, Ord t) => Item n t -> Earley n t Bool
isWait q = M.member q <$> RWS.gets queue
-- | Put an axiom item to the hypergraph's queue.
push0 :: (Ord n, Ord t) => Item n t -> Earley n t ()
push0 q = do
-- i <- M.size <$> RWS.gets idMap
-- RWS.modify' $ \h -> h
-- { queue = M.insert q (S.singleton Pred) (queue h)
-- , idMap = M.insert i q (idMap h) }
wt <- isWait q
RWS.modify' $ \h -> h
{ queue = M.insert q (S.singleton Pred) (queue h) }
i <- M.size <$> RWS.gets idMap
RWS.unless wt . RWS.modify' $ \h ->
h {idMap = M.insert i q (idMap h)}
-- | Put an item to the hypergraph, together with the corresponding
-- traversal.
push :: (Ord n, Ord t) => Item n t -> Trav n t -> Earley n t ()
push q trav = do
dn <- isDone q
if dn then RWS.modify' $ \h -> h
{ done = M.insertWith
S.union q
(S.singleton trav)
(done h) }
else do
wt <- isWait q
RWS.modify' $ \h -> h
{ queue = M.insertWith
S.union q
(S.singleton trav)
(queue h) }
i <- M.size <$> RWS.gets idMap
RWS.unless wt . RWS.modify' $ \h ->
h {idMap = M.insert i q (idMap h)}
-- | Move the item from the queue to the set of processed (done)
-- items.
shift :: (Ord n, Ord t) => Item n t -> Earley n t ()
shift q = do
mayTrav <- M.lookup q <$> RWS.gets queue
case mayTrav of
Nothing -> return ()
Just tr -> RWS.modify' $ \h -> h
{ done = M.insert q tr (done h)
, queue = M.delete q (queue h) }
--------------------------------------------------
-- Parsed trees
--------------------------------------------------
-- | Get the forest of parsed trees for a given item.
parsedTrees
:: (Ord n, Ord t)
=> Hype n t
-> Item n t
-> [R.Forest (Either n t)]
parsedTrees h@Hype{..} =
fromActive
where
fromActive q = concatMap
(fromActiveTrav q)
(travList h q)
fromActiveTrav _ (Scan q) =
[ ts ++ [mkLeaf t]
| t <- take 1 (right q)
, ts <- fromActive q ]
fromActiveTrav _ (Comp q p) =
[ ts ++ [t]
| ts <- fromActive q
, t <- fromPassive p ]
fromActiveTrav _ Pred = [[]]
fromPassive q =
[ R.Node (Left $ ihead q) ts
| ts <- fromActive q ]
mkLeaf x = R.Node x []
-- | Get the list of traversals for the given item.
travList
:: (Ord n, Ord t)
=> Hype n t
-> Item n t
-> [Trav n t]
travList Hype{..} q = maybe [] S.toList $
msum [M.lookup q done, M.lookup q queue]
--------------------------------------------------
-- Inference rules
--------------------------------------------------
-- | Apply the axiom rule given the non-terminal we wish to
-- recognize.
axiom :: (Ord n) => n -> P.ListT (Earley n t) (Item n t)
axiom nonTerm = do
body <- withHead nonTerm
return Item
{ ihead = nonTerm
, left = []
, right = body
, beg = 0
, end = 0 }
-- | Try to predict new items from the given item.
predict :: (Ord n) => Item n t -> P.ListT (Earley n t) (Item n t)
predict q = do
(Left nonTerm, _) <- some (expects q)
body <- withHead nonTerm
return Item
{ ihead = nonTerm
, left = []
, right = body
, beg = end q
, end = end q }
-- | Try to scan for the given item.
scan :: (Ord t) => Item n t -> P.ListT (Earley n t) (Item n t)
scan q = do
(Right term, rest) <- some (expects q)
termSet <- readInput (end q)
RWS.guard $ S.member term termSet
return q
{ left = Right term : left q
, right = rest
, end = end q + 1 }
-- | Try to complete for the given item. Return the (active) items
-- matching on the left.
matchLeft
:: (Ord n, Ord t)
=> Item n t
-> P.ListT (Earley n t) (Item n t)
matchLeft p = do
RWS.guard (passive p)
expectEnd (ihead p) (beg p)
-- | Try to complete for the given item. Return the (passive) items
-- matching on the right.
matchRight
:: (Ord n, Ord t)
=> Item n t
-> P.ListT (Earley n t) (Item n t)
matchRight q = do
(Left nonTerm, _) <- some (expects q)
doneBeg nonTerm (end q)
-- | Complete one item with another one.
complete
:: (Ord n, Ord t)
=> Item n t -- ^ Active item
-> Item n t -- ^ Passive item
-> P.ListT (Earley n t) (Item n t)
complete q p = do
(Left nonTerm, rest) <- some (expects q)
RWS.guard
( passive p
&& nonTerm == ihead p
&& end q == beg p )
return q
{ left = Left nonTerm : left q
, right = rest
, end = end p }
-- | Process the item under the given ID.
proc
:: (Ord n, Ord t, Show n, Show t)
=> ID -> Earley n t ()
proc i = do
mayQ <- M.lookup i <$> RWS.gets idMap
case mayQ of
Nothing -> liftIO $ putStrLn "<<no such item>>"
Just q -> do
b <- isDone q
if b then do
liftIO $ putStrLn "<<done>>"
else do
shift q
P.runListT $ do
p <- predict q
lift (push p Pred)
liftIO $ do
putStr "[P] "
printItem p
putStrLn ""
P.runListT $ do
p <- scan q
liftIO $ do
putStr "[S] "
printItem p
putStrLn ""
lift (push p $ Scan q)
P.runListT $ do
p <- matchLeft q
liftIO $ do
putStr "[C] "
printItem p
putStr " => "
q' <- complete p q
liftIO $ do
printItem q'
putStrLn ""
lift (push q' $ Comp p q)
P.runListT $ do
p <- matchRight q
liftIO $ do
putStr "COMPLETE "
printItem p
putStr ": "
q' <- complete q p
liftIO $ do
printItem q'
putStrLn ""
lift (push q' $ Comp q p)
--------------------------------------------------
-- Interactive
--------------------------------------------------
-- | Command for the interactive mode.
data Command n
= Print Pos Bool
-- ^ Print a specific chart column
| Axiom n
-- ^ Process the entire column
| Quick Pos
-- ^ Process the entire column
| Proc ID
-- ^ Process a specific item
| Forest ID
-- ^ Parse forest for a specific item
optColumn :: Parser Pos
optColumn = argument auto
( metavar "COLUMN"
<> help "Chart column" )
optVerbose :: Parser Bool
optVerbose = switch
( short 'v'
<> long "verbose"
<> help "Verbose" )
optID :: Parser ID
optID = argument auto
( metavar "ID"
<> help "Chart item ID" )
optNonTerm :: Read n => Parser n
optNonTerm = argument auto
( metavar "NON-TERM"
<> help "Start non-terminal" )
opts :: Read n => Parser (Command n)
opts = subparser
( command "print"
(info (helper <*> (Print <$> optColumn <*> optVerbose))
(progDesc "Print the chart column")
)
<> command "process"
(info (helper <*> (Proc <$> optID))
(progDesc "Process the specified item")
)
<> command "forest"
(info (helper <*> (Forest <$> optID))
(progDesc "Print parsed forest for the specified item")
)
<> command "quick"
(info (helper <*> (Quick <$> optColumn))
(progDesc "Process the entire column")
)
<> command "axiom"
(info (helper <*> (Axiom <$> optNonTerm))
(progDesc "Axiom with the given non-terminal")
)
)
-- | Run the given command.
run :: (Ord n, Ord t, Show n, Show t)
=> Command n -> Earley n t ()
run (Print k v) = do
h <- RWS.get
liftIO $ printColumn k v h
run (Axiom start) = P.runListT $ do
q <- axiom start
lift (push0 q)
run (Proc i) = proc i
run (Forest i) = do
mayQ <- M.lookup i <$> RWS.gets idMap
case mayQ of
Nothing -> liftIO $ putStrLn "<<no such item>>"
Just q -> do
h <- RWS.get
let tss = parsedTrees h q
forM_ tss $ \ts -> liftIO $ do
putStr . R.drawForest . map (fmap show) $ ts
putStrLn "----------------------------------"
run (Quick k) = do
m <- RWS.gets idMap
forM_ (M.toList m) $ \(i, q) -> do
b <- isWait q
when (b && end q == k) $ do
liftIO $ do
putStr "> "
printItem q
putStrLn ""
proc i
-- | Main loop.
loop
:: (Ord n, Ord t, Show n, Show t, Read n)
=> Earley n t ()
loop = do
liftIO $ do
putStr "> "
hFlush stdout
line <- liftIO getLine
let res = execParserPure defaultPrefs
optsExt (words line)
-- case getParseResult res of
-- -- Nothing -> liftIO $ putStrLn "<<unknown command>>"
-- -- Nothing -> void . liftIO $ handleParseResult res
-- Just cmd -> run cmd
case res of
Success cmd -> run cmd
Failure failure -> liftIO $ do
putStrLn . fst $ renderFailure failure ""
putStrLn ""
putStrLn "Available commands: axiom, print, process, forest"
_ -> return ()
loop
where
optsExt = info (helper <*> opts) fullDesc
-- ( fullDesc
-- <> progDesc "Earley facile"
-- <> header "earley-facile" )
-- | Run the parser on the given grammar and the given input.
runEarley
:: (Ord n, Ord t, Show n, Show t, Read n)
=> [(n, [Either n t])] -- ^ The grammar
-> [[t]] -- ^ The input
-> IO ()
runEarley rules input = void $
RWS.execRWST loop (map S.fromList input) $ Hype
{ gram = M.fromListWith S.union
[ (hd, S.singleton bd)
| (hd, bd) <- rules ]
, done = M.empty
, queue = M.empty
, idMap = M.empty }
-- | Run the parser on the given grammar and the given input.
runEarley'
:: (Ord n, Ord t, Ord p, Show n, Show t, Show p, Read n)
=> G.CFG n p t
-> [t] -- ^ The input
-> IO ()
runEarley' cfg sent = void $
RWS.execRWST loop input $ Hype
{ gram = M.fromListWith S.union
[ (hd, S.singleton bd)
| (hd, bd) <- S.toList (G.rules cfg) ]
, done = M.empty
, queue = M.empty
, idMap = M.empty }
where
input =
let getPOS t = maybe S.empty id . M.lookup t $ G.lexicon cfg
in map getPOS sent
--------------------------------------------------
-- Utilities
--------------------------------------------------
-- | Deconstruct list. Utility function. Similar to `unCons`.
decoList :: [a] -> Maybe (a, [a])
decoList [] = Nothing
decoList (y:ys) = Just (y, ys)
-- -- | MaybeT transformer.
-- maybeT :: Monad m => Maybe a -> MaybeT m a
-- maybeT = MaybeT . return
-- | ListT from a list.
each :: Monad m => [a] -> P.ListT m a
each = P.Select . P.each
-- | ListT from a maybe.
some :: Monad m => Maybe a -> P.ListT m a
some = each . maybeToList
-- | Showing labels.
showLab :: (Show n, Show t) => Either n t -> String
showLab (Left x) = show x
showLab (Right x) = show x
|
kawu/earley-facile
|
src/NLP/EarleyFacile.hs
|
bsd-2-clause
| 18,171
| 1
| 22
| 6,132
| 5,571
| 2,798
| 2,773
| 421
| 3
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QLineEdit.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QLineEdit (
EchoMode, eNoEcho, ePassword, ePasswordEchoOnEdit
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CEchoMode a = CEchoMode a
type EchoMode = QEnum(CEchoMode Int)
ieEchoMode :: Int -> EchoMode
ieEchoMode x = QEnum (CEchoMode x)
instance QEnumC (CEchoMode Int) where
qEnum_toInt (QEnum (CEchoMode x)) = x
qEnum_fromInt x = QEnum (CEchoMode x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> EchoMode -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeNormal EchoMode where
eNormal
= ieEchoMode $ 0
eNoEcho :: EchoMode
eNoEcho
= ieEchoMode $ 1
ePassword :: EchoMode
ePassword
= ieEchoMode $ 2
ePasswordEchoOnEdit :: EchoMode
ePasswordEchoOnEdit
= ieEchoMode $ 3
|
keera-studios/hsQt
|
Qtc/Enums/Gui/QLineEdit.hs
|
bsd-2-clause
| 2,472
| 0
| 18
| 544
| 629
| 322
| 307
| 58
| 1
|
{-# LANGUAGE Haskell2010 #-}
module Bug458 where
-- | See the defn of @'⊆'@.
(⊆) :: () -> () -> ()
_ ⊆ _ = ()
|
haskell/haddock
|
html-test/src/Bug458.hs
|
bsd-2-clause
| 118
| 0
| 7
| 28
| 40
| 23
| 17
| 4
| 1
|
{-# LANGUAGE TemplateHaskell, TupleSections, FlexibleContexts #-}
module Syscall ( Syscall(..)
, scName
, scArgs
, scRet
, Argument(..)
, parseSyscall
) where
import Control.Monad
import Control.Applicative ((<*), (<$>), (<*>))
import Data.List
import Data.Char
import qualified Data.ByteString.Lazy.Char8 as BL
import Text.Parsec.Prim
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.ByteString.Lazy
import qualified Text.Parsec.Token as T
import qualified Text.Parsec.Language as T
import Data.Accessor.Template
data Argument = NumLiteral Int
| StrLiteral String
| Mask [String]
| Labelled String Argument
| Object [Argument]
deriving (Show)
data Syscall = Syscall { scName_ :: String
, scArgs_ :: [Argument]
, scRet_ :: Int
, scErrno_ :: Maybe (String, String)
} deriving (Show)
$(deriveAccessors ''Syscall)
def :: Stream s m Char => T.GenTokenParser s u m
def = T.makeTokenParser $ T.emptyDef { T.identStart = letter
, T.identLetter = alphaNum <|> char '_'
, T.opStart = oneOf "=.|"
, T.opLetter = oneOf "=.|"
, T.reservedOpNames = ["=", "...", "|"]
}
tOp :: String -> Parser ()
tOp = T.reservedOp def
tString :: Parser String
tString = T.stringLiteral def <* optional (tOp "...")
tId :: Parser String
tId = T.identifier def <|> (T.lexeme def $ string "@")
tNum :: Parser Int
tNum = fromIntegral <$> T.integer def
labelled :: Parser (String, Argument)
labelled = do
l <- tId
tOp "="
v <- argument
return (l, v)
mask :: Parser [String]
mask = tId `sepBy1` tOp "|"
object :: Parser [Argument]
object = T.braces def cont <|> T.brackets def cont
where
cont = T.commaSep def argument
argument :: Parser Argument
argument =
(try tNum >>= return . NumLiteral) <|>
(try labelled >>= return . uncurry Labelled) <|>
(try mask >>= return . Mask) <|>
(try tString >>= return . StrLiteral) <|>
(object >>= return . Object)
errnoStr :: Parser String
errnoStr = T.lexeme def $ do
char '('
content <- many1 $ noneOf ")"
char ')'
return content
syscall :: Parser Syscall
syscall = do
spaces
scName <- tId
args <- T.parens def $ T.commaSep def argument
tOp "="
retc <- tNum
errno <- optionMaybe $ (,) <$> tId <*> errnoStr
eof
return $ Syscall scName args retc errno
parseSyscall :: BL.ByteString -> Either String Syscall
parseSyscall s = case parse syscall "" s of
Left e -> Left $ show e
Right s -> Right s
|
ratatosk/traceblade
|
Syscall.hs
|
bsd-3-clause
| 2,838
| 0
| 12
| 894
| 893
| 476
| 417
| 83
| 2
|
{-# LANGUAGE CPP #-}
import Control.Shell
import Data.Bits
import System.Info (os)
import Control.Monad
import System.Environment (getArgs)
import System.Exit
inBuildDir :: [String] -> Shell a -> Shell a
inBuildDir args act = do
srcdir <- pwd
isdir <- isDirectory "_build"
when (isdir && not ("no-rebuild" `elem` args)) $ rmdir "_build"
mkdir True "_build"
inDirectory "_build" $ do
unless ("no-rebuild" `elem` args) $ run_ "git" ["clone", srcdir] ""
inDirectory "haste-compiler" act
-- Packages will end up in ghc-$GHC_MAJOR.$GHC_MINOR. If the directory does
-- not exist, it is created. If the package already exists in that directory,
-- it is overwritten.
main = do
args <- fixAllArg `fmap` getArgs
when (null args) $ do
putStrLn $ "Usage: runghc build-release.hs [no-rebuild|in-place] formats\n"
putStrLn $ "Supported formats: deb, tarball, 7z, all\n"
putStrLn $ "no-rebuild\n Repackage whatever is already in the " ++
"_build directory\n instead of rebuilding from scratch."
putStrLn $ "in-place\n Build package in current directory.\n" ++
" Packages end up in ghc-$GHC_MAJOR.$GHC_MINOR."
exitFailure
when ("--debghcdeps" `elem` args) $ do
putStr "ghc"
exitSuccess
let inplace = "in-place" `elem` args
chdir = if inplace then id else inBuildDir args
res <- shell $ do
chdir $ do
(ver, ghcver) <- if ("no-rebuild" `elem` args)
then do
getVersions
else do
vers <- buildPortable
bootPortable
return vers
let (major, '.':rest) = break (== '.') ghcver
(minor, _) = break (== '.') rest
outdir
| inplace = "ghc-" ++ major ++ "." ++ minor
| otherwise = ".." </> ".." </> ("ghc-" ++ major ++ "." ++ minor)
mkdir True outdir
when ("tarball" `elem` args) $ do
tar <- buildBinaryTarball ver ghcver
mv tar (outdir </> tar)
when ("7z" `elem` args) $ do
f <- buildBinary7z ver ghcver
mv f (outdir </> f)
when ("deb" `elem` args) $ do
deb <- buildDebianPackage ver ghcver
mv (".." </> deb) (outdir </> deb)
case res of
Left err -> error $ "FAILED: " ++ err
_ -> return ()
where
fixAllArg args | "all" `elem` args = "deb" : "tarball" : "7z" : args
| otherwise = args
buildPortable = do
-- Build compiler
run_ "cabal" ["configure", "-f", "portable", "-f", "static"] ""
run_ "cabal" ["haddock"] ""
run_ "dist/setup/setup" ["build"] ""
-- Copy docs
cpDir "dist/doc/html/haste-compiler" "haste-compiler/docs"
-- Strip symbols
case os of
"mingw32" -> do
-- windows
run_ "strip" ["-s", "haste-compiler\\bin\\haste-pkg.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\hastec.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\haste-cat.exe"] ""
"linux" -> do
-- linux
run_ "strip" ["-s", "haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["-s", "haste-compiler/bin/hastec"] ""
run_ "strip" ["-s", "haste-compiler/bin/haste-cat"] ""
_ -> do
-- darwin
run_ "strip" ["haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["haste-compiler/bin/hastec"] ""
run_ "strip" ["haste-compiler/bin/haste-cat"] ""
-- Get versions
getVersions
getVersions = do
ver <- fmap init $ run "haste-compiler/bin/hastec" ["--version"] ""
ghcver <- fmap init $ run "ghc" ["--numeric-version"] ""
return (ver, ghcver)
bootPortable = do
-- Build libs
run_ "haste-compiler/bin/haste-boot" ["--force", "--initial"] ""
-- Remove unnecessary binaries
case os of
"mingw32" -> do
-- windows
rm "haste-compiler\\bin\\haste-boot.exe"
rm "haste-compiler\\bin\\haste-copy-pkg.exe"
rm "haste-compiler\\bin\\haste-install-his.exe"
_ -> do
-- linux/darwin
rm "haste-compiler/bin/haste-boot"
rm "haste-compiler/bin/haste-copy-pkg"
rm "haste-compiler/bin/haste-install-his"
forEachFile "haste-compiler" $ \f -> do
when ((f `hasExt` ".o") || (f `hasExt` ".a")) $ rm f
where
f `hasExt` e = takeExtension f == e
buildBinaryTarball ver ghcver = do
-- Get versions and create binary tarball
run_ "tar" ["-cjf", tarball, "haste-compiler"] ""
return tarball
where
tarball =
concat ["haste-compiler-",ver,"_ghc-",ghcver,"-",os,".tar.bz2"]
buildBinary7z ver ghcver = do
-- Get versions and create binary tarball
run_ "7z" ["a", "-i!haste-compiler", name] ""
return $ name
where
name =
concat ["haste-compiler-",ver,"_ghc-",ghcver,"-",os,".7z"]
arch :: String
arch = "amd64" -- only amd64 supported
-- Debian packaging based on https://wiki.debian.org/IntroDebianPackaging.
-- Requires build-essential, devscripts and debhelper.
buildDebianPackage ver ghcver = do
run_ "debuild" ["-e", "LD_LIBRARY_PATH=haste-compiler/haste-cabal",
"-us", "-uc", "-b"] ""
return $ "haste-compiler_" ++ ver ++ "_" ++ arch ++ ".deb"
|
jtojnar/haste-compiler
|
build-release.hs
|
bsd-3-clause
| 5,322
| 0
| 22
| 1,505
| 1,379
| 693
| 686
| 113
| 4
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
module Distribution.Client.GlobalFlags (
GlobalFlags(..)
, defaultGlobalFlags
, RepoContext(..)
, withRepoContext
) where
import Distribution.Client.Types
( Repo(..), RemoteRepo(..) )
import Distribution.Simple.Setup
( Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe )
import Distribution.Utils.NubList
( NubList, fromNubList )
import Distribution.Client.HttpUtils
( HttpTransport, configureTransport )
import Distribution.Verbosity
( Verbosity )
import Distribution.Simple.Utils
( info )
import Control.Concurrent
( MVar, newMVar, modifyMVar )
import Control.Exception
( throwIO )
import Control.Monad
( when )
import System.FilePath
( (</>) )
import Network.URI
( uriScheme, uriPath )
import Data.Map
( Map )
import qualified Data.Map as Map
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( Monoid(..) )
#endif
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Util.Path as Sec
import qualified Hackage.Security.Util.Pretty as Sec
import qualified Hackage.Security.Client.Repository.Cache as Sec
import qualified Hackage.Security.Client.Repository.Local as Sec.Local
import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote
import qualified Distribution.Client.Security.HTTP as Sec.HTTP
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalSandboxConfigFile :: Flag FilePath,
globalConstraintsFile :: Flag FilePath,
globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: NubList FilePath,
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath,
globalRequireSandbox :: Flag Bool,
globalIgnoreSandbox :: Flag Bool,
globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates
globalHttpTransport :: Flag String
}
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = Flag False,
globalIgnoreSandbox = Flag False,
globalIgnoreExpiry = Flag False,
globalHttpTransport = mempty
}
instance Monoid GlobalFlags where
mempty = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = mempty,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty,
globalRequireSandbox = mempty,
globalIgnoreSandbox = mempty,
globalIgnoreExpiry = mempty,
globalHttpTransport = mempty
}
mappend a b = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalConfigFile,
globalConstraintsFile = combine globalConstraintsFile,
globalRemoteRepos = combine globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = combine globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox,
globalIgnoreExpiry = combine globalIgnoreExpiry,
globalHttpTransport = combine globalHttpTransport
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Repo context
-- ------------------------------------------------------------
-- | Access to repositories
data RepoContext = RepoContext {
-- | All user-specified repositories
repoContextRepos :: [Repo]
-- | Get the HTTP transport
--
-- The transport will be initialized on the first call to this function.
--
-- NOTE: It is important that we don't eagerly initialize the transport.
-- Initializing the transport is not free, and especially in contexts where
-- we don't know a-priori whether or not we need the transport (for instance
-- when using cabal in "nix mode") incurring the overhead of transport
-- initialization on _every_ invocation (eg @cabal build@) is undesirable.
, repoContextGetTransport :: IO HttpTransport
-- | Get the (initialized) secure repo
--
-- (the 'Repo' type itself is stateless and must remain so, because it
-- must be serializable)
, repoContextWithSecureRepo :: forall a.
Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
-- | Should we ignore expiry times (when checking security)?
, repoContextIgnoreExpiry :: Bool
}
-- | Wrapper around 'Repository', hiding the type argument
data SecureRepo = forall down. SecureRepo (Sec.Repository down)
withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a
withRepoContext verbosity globalFlags = \callback -> do
transportRef <- newMVar Nothing
let httpLib = Sec.HTTP.transportAdapter
verbosity
(getTransport transportRef)
initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' ->
callback RepoContext {
repoContextRepos = allRemoteRepos ++ localRepos
, repoContextGetTransport = getTransport transportRef
, repoContextWithSecureRepo = withSecureRepo secureRepos'
, repoContextIgnoreExpiry = fromFlagOrDefault False
(globalIgnoreExpiry globalFlags)
}
where
secureRemoteRepos =
[ (remote, cacheDir)
| RepoSecure remote cacheDir <- allRemoteRepos ]
allRemoteRepos =
[ case remoteRepoSecure remote of
Just True -> RepoSecure remote cacheDir
_otherwise -> RepoRemote remote cacheDir
| remote <- fromNubList $ globalRemoteRepos globalFlags
, let cacheDir = fromFlag (globalCacheDir globalFlags)
</> remoteRepoName remote ]
localRepos =
[ RepoLocal local
| local <- fromNubList $ globalLocalRepos globalFlags ]
getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport
getTransport transportRef =
modifyMVar transportRef $ \mTransport -> do
transport <- case mTransport of
Just tr -> return tr
Nothing -> configureTransport
verbosity
(flagToMaybe (globalHttpTransport globalFlags))
return (Just transport, transport)
withSecureRepo :: Map Repo SecureRepo
-> Repo
-> (forall down. Sec.Repository down -> IO a)
-> IO a
withSecureRepo secureRepos repo callback =
case Map.lookup repo secureRepos of
Just (SecureRepo secureRepo) -> callback secureRepo
Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo"
-- | Initialize the provided secure repositories
--
-- Assumed invariant: `remoteRepoSecure` should be set for all these repos.
initSecureRepos :: forall a. Verbosity
-> Sec.HTTP.HttpLib
-> [(RemoteRepo, FilePath)]
-> (Map Repo SecureRepo -> IO a)
-> IO a
initSecureRepos verbosity httpLib repos callback = go Map.empty repos
where
go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a
go !acc [] = callback acc
go !acc ((r,cacheDir):rs) = do
cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir
initSecureRepo verbosity httpLib r cachePath $ \r' ->
go (Map.insert (RepoSecure r cacheDir) r' acc) rs
-- | Initialize the given secure repo
--
-- The security library has its own concept of a "local" repository, distinct
-- from @cabal-install@'s; these are secure repositories, but live in the local
-- file system. We use the convention that these repositories are identified by
-- URLs of the form @file:/path/to/local/repo@.
initSecureRepo :: Verbosity
-> Sec.HTTP.HttpLib
-> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed)
-> Sec.Path Sec.Absolute -- ^ Cache dir
-> (SecureRepo -> IO a) -- ^ Callback
-> IO a
initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do
withRepo $ \r -> do
requiresBootstrap <- Sec.requiresBootstrap r
when requiresBootstrap $ Sec.uncheckClientErrors $
Sec.bootstrap r
(map Sec.KeyId remoteRepoRootKeys)
(Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold))
callback $ SecureRepo r
where
-- Initialize local or remote repo depending on the URI
withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a
withRepo callback | uriScheme remoteRepoURI == "file:" = do
dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI)
Sec.Local.withRepository dir
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
withRepo callback =
Sec.Remote.withRepository httpLib
[remoteRepoURI]
Sec.Remote.defaultRepoOpts
cache
Sec.hackageRepoLayout
Sec.hackageIndexLayout
logTUF
callback
cache :: Sec.Cache
cache = Sec.Cache {
cacheRoot = cachePath
, cacheLayout = Sec.cabalCacheLayout
}
-- We display any TUF progress only in verbose mode, including any transient
-- verification errors. If verification fails, then the final exception that
-- is thrown will of course be shown.
logTUF :: Sec.LogMessage -> IO ()
logTUF = info verbosity . Sec.pretty
|
edsko/cabal
|
cabal-install/Distribution/Client/GlobalFlags.hs
|
bsd-3-clause
| 11,226
| 0
| 19
| 3,208
| 2,000
| 1,119
| 881
| 210
| 4
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
module Data.Grid where
import Data.Points
import Util
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Lens
import Control.Monad
import Data.List (maximumBy, minimumBy, nub)
import qualified Data.Map as M
import qualified Data.Foldable as F
import qualified Data.Traversable as T
newtype Grid c a = Grid
{ grid :: M.Map (Coord c) a
} deriving (Eq,Show,Functor,F.Foldable,T.Traversable)
type Coords c = Grid c ()
csFromList :: (Ord c) => [Coord c] -> Coords c
csFromList cs = gridFromList $ zip cs $ repeat ()
-- Grid {{{
gridFromMap :: M.Map (Coord c) a -> Grid c a
gridFromMap = Grid
emptyGrid :: Grid c a
emptyGrid = gridFromMap M.empty
mkRepeatGrid :: (DiscreteCoord c) => Size c -> a -> Grid c a
mkRepeatGrid sz a = gridFromList $ zip cs $ repeat a
where
cs = allCoords sz
mkIotaGrid :: (DiscreteCoord c) => Size c -> Grid c TileIndex
mkIotaGrid sz = gridFromList $ zip cs [0..]
where
cs = allCoords sz
gridInsert :: (Ord c) => Coord c -> a -> Grid c a -> Grid c a
gridInsert c = gridOnMap . M.insert c
gridUnion :: (Ord c) => Grid c a -> Grid c a -> Grid c a
gridUnion g1 = gridFromMap . M.union (grid g1) . grid
gridUnions :: (Ord c) => [Grid c a] -> Grid c a
gridUnions = gridFromMap . M.unions . map grid
gridSize :: (CoordType c) => Grid c a -> Size c
gridSize =
(+) 1
. uncurry (-)
. over both (view coordSize)
. (safeMaximumV2 0 &&& safeMinimumV2 0)
. gridCoords
gridRows, gridCols :: (CoordType c) => Grid c a -> c
gridRows = view height . gridSize
gridCols = view width . gridSize
gridOnMap :: (M.Map (Coord c) a -> M.Map (Coord c) b) -> Grid c a -> Grid c b
gridOnMap f g = g { grid = f $ grid g }
gridOnMapA :: (Applicative f) => (M.Map (Coord c) a -> f (M.Map (Coord c) b))
-> Grid c a -> f (Grid c b)
gridOnMapA f = fmap gridFromMap . f . grid
gridOnMapM :: (Monad m) => (M.Map (Coord c) a -> m (M.Map (Coord c) b))
-> Grid c a -> m (Grid c b)
gridOnMapM f = return . gridFromMap <=< (f . grid)
gridLookup :: (CoordType c) => Grid c a -> Coord c -> Maybe a
gridLookup g c = M.lookup c $ grid g
gridIndex :: (Ord c) => Grid c a -> Coord c -> a
gridIndex g c = grid g M.! c
gridFilter :: (a -> Bool) -> Grid c a -> Grid c a
gridFilter pr = gridOnMap $ M.filter pr
gridFromList :: (Ord c) => [(Coord c,a)] -> Grid c a
gridFromList = gridFromMap . M.fromList
gridCoords :: Grid c a -> [Coord c]
gridCoords = M.keys . grid
gridContents :: Grid c a -> [(Coord c,a)]
gridContents = M.assocs . grid
gridList :: (CoordType c) => Grid c a -> [[Maybe a]]
gridList g = map (map $ gridLookup g)
$ coordGrid
$ gridSize g
gridDifference :: (Ord c) => Grid c a -> Grid c b -> Grid c a
gridDifference = gridOnMap . M.difference . grid
gridMinimum, gridMaximum :: (Ord c) => Grid c a -> Maybe (Coord c, a)
gridMinimum = fmap fst . M.minViewWithKey . grid
gridMaximum = fmap fst . M.maxViewWithKey . grid
gridMinimumValue, gridMaximumValue :: (Ord a, Ord c)
=> Grid c a -> Maybe (Coord c, a)
gridMinimumValue g
| cs <- gridContents g
= if null cs
then Nothing
else Just $ minimumBy (compare `on` snd) cs
gridMaximumValue g
| cs <- gridContents g
= if null cs
then Nothing
else Just $ maximumBy (compare `on` snd) cs
-- }}}
-- Key Maps / Traversals {{{
gridKeys :: Grid c a -> [Coord c]
gridKeys = M.keys . grid
gridValues :: Eq a => Grid c a -> [a]
gridValues = nub . M.elems . grid
gridTraverseWithKey :: (Applicative f) => (Coord c -> a -> f b)
-> Grid c a -> f (Grid c b)
gridTraverseWithKey f = gridOnMapA $ M.traverseWithKey f
gridMapKeysTo :: (Coord c -> a) -> Grid c b -> Grid c a
gridMapKeysTo = gridOnMap . mapKeysTo
gridTraverseKeys :: (Applicative f, Ord c)
=> (Coord c -> f a) -> Grid c b -> f (Grid c a)
gridTraverseKeys = gridOnMapA . traverseKeys
filterKeys :: (k -> Bool) -> M.Map k a -> M.Map k a
filterKeys = M.filterWithKey . onlyIndex
mapKeysTo :: (k -> a) -> M.Map k b -> M.Map k a
mapKeysTo = M.mapWithKey . onlyIndex
foldrKeys :: (k -> b -> b) -> b -> M.Map k a -> b
foldrKeys = M.foldrWithKey . onlyIndex
traverseKeys :: (Applicative f, Ord k)
=> (k -> f a) -> M.Map k b -> f (M.Map k a)
traverseKeys = M.traverseWithKey . onlyIndex
gridFoldrWithKey :: (Coord c -> b -> a -> a)
-> a -> Grid c b -> a
gridFoldrWithKey f a = M.foldrWithKey f a . grid
gridFoldrKeys :: (Coord c -> a -> a) -> a -> Grid c b -> a
gridFoldrKeys f a = M.foldrWithKey (const . f) a . grid
-- }}}
-- Selective Update {{{
gridUpdateAt :: (Ord c) => [Coord c] -> (a -> a) -> Grid c a -> Grid c a
gridUpdateAt cs = gridOnMap . updateAt cs
gridUpdateWithKeyAt :: (Ord c) => [Coord c] -> (Coord c -> a -> a)
-> Grid c a -> Grid c a
gridUpdateWithKeyAt cs = gridOnMap . updateWithKeyAt cs
gridUpdateAtM :: (Monad m, Ord c) => [Coord c] -> (a -> m a)
-> Grid c a -> m (Grid c a)
gridUpdateAtM cs = gridOnMapM . updateAtM cs
gridUpdateWithKeyAtM :: (Monad m, Ord c) => [Coord c]
-> (Coord c -> a -> m a) -> Grid c a -> m (Grid c a)
gridUpdateWithKeyAtM cs = gridOnMapM . updateWithKeyAtM cs
updateAt :: (Ord k) => [k] -> (a -> a) -> M.Map k a -> M.Map k a
updateAt ks = updateWithKeyAt ks . const
updateWithKeyAt :: (Ord k) => [k] -> (k -> a -> a)
-> M.Map k a -> M.Map k a
updateWithKeyAt ks f mp = F.foldl fn mp ks
where
fn m k = M.insert k (f k $ m M.! k) m
updateAtM :: (Monad m, Ord k) => [k] -> (a -> m a)
-> M.Map k a -> m (M.Map k a)
updateAtM ks = updateWithKeyAtM ks . const
updateWithKeyAtM :: (Monad m, Ord k) => [k] -> (k -> a -> m a)
-> M.Map k a -> m (M.Map k a)
updateWithKeyAtM ks f mp = F.foldlM fn mp ks
where
fn m k = do
a <- maybe err return $ M.lookup k m
b <- f k a
return $ M.insert k b m
err = fail "key not in map"
-- }}}
-- SubMap {{{
gridSubMap :: (Ord c) => Grid c b -> Grid c a -> Grid c a
gridSubMap cs = gridOnMap $ mSubMap (grid cs)
gridSubMapByValue :: (Eq a, Ord c) => a -> Grid c a -> Grid c a
gridSubMapByValue a = gridOnMap $ subMapByValue a
mSubMap :: (Ord k) => M.Map k b -> M.Map k a -> M.Map k a
mSubMap = filterKeys . flip M.member
subMapByValue :: (Eq a, Ord k) => a -> M.Map k a -> M.Map k a
subMapByValue = M.filter . (==)
-- }}}
-- Pretty Printing {{{
ppGrid :: (CoordType c, Ord a, Show a, Enum a) => Grid c a -> String
ppGrid = ppSparseRows . gridList
-- }}}
|
kylcarte/wangtiles
|
src/Data/Grid.hs
|
bsd-3-clause
| 6,400
| 0
| 13
| 1,460
| 3,130
| 1,589
| 1,541
| -1
| -1
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Selection
-- Copyright : (c) Sven Panne 2003
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : sven_panne@yahoo.com
-- Stability : provisional
-- Portability : portable
--
-- This module corresponds to section 5.2 (Selection) of the OpenGL 1.4 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Selection (
HitRecord(..), getHitRecords,
Name(..), withName, loadName, maxNameStackDepth, nameStackDepth,
RenderMode(..), renderMode
) where
import Control.Monad ( liftM )
import Foreign.Marshal.Array ( allocaArray )
import Foreign.Ptr ( Ptr )
import Graphics.Rendering.OpenGL.GL.BasicTypes (
GLint, GLsizei, GLuint, GLfloat )
import Graphics.Rendering.OpenGL.GL.Exception ( finally )
import Graphics.Rendering.OpenGL.GL.IOState (
IOState, peekIOState, evalIOState, nTimes )
import Graphics.Rendering.OpenGL.GL.QueryUtils (
GetPName(GetMaxNameStackDepth,GetNameStackDepth), getSizei1 )
import Graphics.Rendering.OpenGL.GL.RenderMode (
RenderMode(..), withRenderMode, renderMode )
import Graphics.Rendering.OpenGL.GL.StateVar (
GettableStateVar, makeGettableStateVar )
--------------------------------------------------------------------------------
data HitRecord = HitRecord GLfloat GLfloat [Name]
deriving ( Eq, Ord, Show )
--------------------------------------------------------------------------------
getHitRecords :: GLsizei -> IO a -> IO (a, Maybe [HitRecord])
getHitRecords bufSize action =
allocaArray (fromIntegral bufSize) $ \buf -> do
glSelectBuffer bufSize buf
(value, numHits) <- withRenderMode Select $ do
glInitNames
action
hits <- parseSelectionBuffer numHits buf
return (value, hits)
foreign import stdcall unsafe "HsOpenGL.h glInitNames" glInitNames :: IO ()
foreign import stdcall unsafe "HsOpenGL.h glSelectBuffer" glSelectBuffer ::
GLsizei -> Ptr GLuint -> IO ()
--------------------------------------------------------------------------------
parseSelectionBuffer :: GLint -> Ptr GLuint -> IO (Maybe [HitRecord])
parseSelectionBuffer numHits buf
| numHits < 0 = return Nothing
| otherwise = liftM Just $ evalIOState (nTimes numHits parseSelectionHit) buf
type Parser a = IOState GLuint a
parseSelectionHit :: Parser HitRecord
parseSelectionHit = do
numNames <- parseGLuint
minZ <- parseGLfloat
maxZ <- parseGLfloat
nameStack <- nTimes numNames parseName
return $ HitRecord minZ maxZ nameStack
parseGLuint :: Parser GLuint
parseGLuint = peekIOState
parseGLfloat :: Parser GLfloat
parseGLfloat = liftM (\x -> fromIntegral x / 0xffffffff) parseGLuint
parseName :: Parser Name
parseName = liftM Name parseGLuint
--------------------------------------------------------------------------------
newtype Name = Name GLuint
deriving ( Eq, Ord, Show )
withName :: Name -> IO a -> IO a
withName name action = (do glPushName name ; action) `finally` glPopName
foreign import stdcall unsafe "HsOpenGL.h glPopName" glPopName :: IO ()
foreign import stdcall unsafe "HsOpenGL.h glPushName" glPushName :: Name -> IO ()
foreign import stdcall unsafe "HsOpenGL.h glLoadName" loadName :: Name -> IO ()
maxNameStackDepth :: GettableStateVar GLsizei
maxNameStackDepth = makeGettableStateVar (getSizei1 id GetMaxNameStackDepth)
nameStackDepth :: GettableStateVar GLsizei
nameStackDepth = makeGettableStateVar (getSizei1 id GetNameStackDepth)
|
OS2World/DEV-UTIL-HUGS
|
libraries/Graphics/Rendering/OpenGL/GL/Selection.hs
|
bsd-3-clause
| 3,618
| 0
| 12
| 527
| 813
| 450
| 363
| 61
| 1
|
{-|
Module : Values
Description : common functions for MPBall benchmarks
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
-}
module Values where
import MixedTypesNumPrelude
-- import Prelude
import Test.QuickCheck
import Control.DeepSeq
import AERN2.Utils.Bench
import AERN2.MP.UseMPFR.Float
import AERN2.MP.UseMPFR.Ball
import AERN2.MP.Ball.Tests () -- instance Arbitrary MPBall
ballsExactPositive :: [MPBall]
ballsExactPositive = filter (!>! 0) ballsExact
ballsSmallExact :: [MPBall]
ballsSmallExact = map centreAsBall ballsSmall
ballsExact :: [MPBall]
ballsExact = map centreAsBall balls
ballsSmall :: [MPBall]
ballsSmall = map makeSmall balls
where
makeSmall :: MPBall -> MPBall
makeSmall b
| (abs b) !<! 1000000 = b
| otherwise = 100000 * (b/(1000000+(abs $ centreAsBall b)))
ballsPositive :: [MPBall]
ballsPositive = filter (!>! 0) balls
balls :: [MPBall]
balls = listFromGen arbitrary
instance NFData MPFloat where rnf x = rnf $ x > 0
instance NFData ErrorBound where rnf = rnf . mpFloat
instance NFData MPBall
|
michalkonecny/aern2
|
aern2-mp/bench/old/Values.hs
|
bsd-3-clause
| 1,173
| 0
| 15
| 227
| 284
| 160
| 124
| 27
| 1
|
{-|
Module : Idris.IBC
Description : Core representations and code to generate IBC files.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.IBC (loadIBC, loadPkgIndex,
writeIBC, writePkgIndex,
hasValidIBCVersion, IBCPhase(..)) where
import Idris.AbsSyntax
import Idris.Core.Binary
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.DeepSeq
import Idris.Delaborate
import Idris.Docstrings (Docstring)
import qualified Idris.Docstrings as D
import Idris.Error
import Idris.Imports
import Idris.Output
import IRTS.System (getIdrisLibDir)
import Paths_idris
import qualified Cheapskate.Types as CT
import Codec.Archive.Zip
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict hiding (get, put)
import qualified Control.Monad.State.Strict as ST
import Data.Binary
import Data.ByteString.Lazy as B hiding (elem, length, map)
import Data.Functor
import Data.List as L
import Data.Maybe (catMaybes)
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Vector.Binary
import Debug.Trace
import System.Directory
import System.FilePath
ibcVersion :: Word16
ibcVersion = 160
-- | When IBC is being loaded - we'll load different things (and omit
-- different structures/definitions) depending on which phase we're in.
data IBCPhase = IBC_Building -- ^ when building the module tree
| IBC_REPL Bool -- ^ when loading modules for the REPL Bool = True for top level module
deriving (Show, Eq)
data IBCFile = IBCFile {
ver :: Word16
, sourcefile :: FilePath
, ibc_reachablenames :: ![Name]
, ibc_imports :: ![(Bool, FilePath)]
, ibc_importdirs :: ![FilePath]
, ibc_sourcedirs :: ![FilePath]
, ibc_implicits :: ![(Name, [PArg])]
, ibc_fixes :: ![FixDecl]
, ibc_statics :: ![(Name, [Bool])]
, ibc_interfaces :: ![(Name, InterfaceInfo)]
, ibc_records :: ![(Name, RecordInfo)]
, ibc_implementations :: ![(Bool, Bool, Name, Name)]
, ibc_dsls :: ![(Name, DSL)]
, ibc_datatypes :: ![(Name, TypeInfo)]
, ibc_optimise :: ![(Name, OptInfo)]
, ibc_syntax :: ![Syntax]
, ibc_keywords :: ![String]
, ibc_objs :: ![(Codegen, FilePath)]
, ibc_libs :: ![(Codegen, String)]
, ibc_cgflags :: ![(Codegen, String)]
, ibc_dynamic_libs :: ![String]
, ibc_hdrs :: ![(Codegen, String)]
, ibc_totcheckfail :: ![(FC, String)]
, ibc_flags :: ![(Name, [FnOpt])]
, ibc_fninfo :: ![(Name, FnInfo)]
, ibc_cg :: ![(Name, CGInfo)]
, ibc_docstrings :: ![(Name, (Docstring D.DocTerm, [(Name, Docstring D.DocTerm)]))]
, ibc_moduledocs :: ![(Name, Docstring D.DocTerm)]
, ibc_transforms :: ![(Name, (Term, Term))]
, ibc_errRev :: ![(Term, Term)]
, ibc_errReduce :: ![Name]
, ibc_coercions :: ![Name]
, ibc_lineapps :: ![(FilePath, Int, PTerm)]
, ibc_namehints :: ![(Name, Name)]
, ibc_metainformation :: ![(Name, MetaInformation)]
, ibc_errorhandlers :: ![Name]
, ibc_function_errorhandlers :: ![(Name, Name, Name)] -- fn, arg, handler
, ibc_metavars :: ![(Name, (Maybe Name, Int, [Name], Bool, Bool))]
, ibc_patdefs :: ![(Name, ([([(Name, Term)], Term, Term)], [PTerm]))]
, ibc_postulates :: ![Name]
, ibc_externs :: ![(Name, Int)]
, ibc_parsedSpan :: !(Maybe FC)
, ibc_usage :: ![(Name, Int)]
, ibc_exports :: ![Name]
, ibc_autohints :: ![(Name, Name)]
, ibc_deprecated :: ![(Name, String)]
, ibc_defs :: ![(Name, Def)]
, ibc_total :: ![(Name, Totality)]
, ibc_injective :: ![(Name, Injectivity)]
, ibc_access :: ![(Name, Accessibility)]
, ibc_fragile :: ![(Name, String)]
, ibc_constraints :: ![(FC, UConstraint)]
}
deriving Show
{-!
deriving instance Binary IBCFile
!-}
initIBC :: IBCFile
initIBC = IBCFile ibcVersion "" [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] Nothing [] [] [] [] [] [] [] [] [] []
hasValidIBCVersion :: FilePath -> Idris Bool
hasValidIBCVersion fp = do
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> return False
Right archive -> do ver <- getEntry 0 "ver" archive
return (ver == ibcVersion)
loadIBC :: Bool -- ^ True = reexport, False = make everything private
-> IBCPhase
-> FilePath -> Idris ()
loadIBC reexport phase fp
= do imps <- getImported
case lookup fp imps of
Nothing -> load True
Just p -> if (not p && reexport) then load False else return ()
where
load fullLoad = do
logIBC 1 $ "Loading ibc " ++ fp ++ " " ++ show reexport
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> do
ifail $ fp ++ " isn't loadable, it may have an old ibc format.\n"
++ "Please clean and rebuild it."
Right archive -> do
if fullLoad
then process reexport phase archive fp
else unhide phase archive
addImported reexport fp
-- | Load an entire package from its index file
loadPkgIndex :: String -> Idris ()
loadPkgIndex pkg = do ddir <- runIO getIdrisLibDir
addImportDir (ddir </> pkg)
fp <- findPkgIndex pkg
loadIBC True IBC_Building fp
makeEntry :: (Binary b) => String -> [b] -> Maybe Entry
makeEntry name val = if L.null val
then Nothing
else Just $ toEntry name 0 (encode val)
entries :: IBCFile -> [Entry]
entries i = catMaybes [Just $ toEntry "ver" 0 (encode $ ver i),
makeEntry "sourcefile" (sourcefile i),
makeEntry "ibc_imports" (ibc_imports i),
makeEntry "ibc_importdirs" (ibc_importdirs i),
makeEntry "ibc_sourcedirs" (ibc_sourcedirs i),
makeEntry "ibc_implicits" (ibc_implicits i),
makeEntry "ibc_fixes" (ibc_fixes i),
makeEntry "ibc_statics" (ibc_statics i),
makeEntry "ibc_interfaces" (ibc_interfaces i),
makeEntry "ibc_records" (ibc_records i),
makeEntry "ibc_implementations" (ibc_implementations i),
makeEntry "ibc_dsls" (ibc_dsls i),
makeEntry "ibc_datatypes" (ibc_datatypes i),
makeEntry "ibc_optimise" (ibc_optimise i),
makeEntry "ibc_syntax" (ibc_syntax i),
makeEntry "ibc_keywords" (ibc_keywords i),
makeEntry "ibc_objs" (ibc_objs i),
makeEntry "ibc_libs" (ibc_libs i),
makeEntry "ibc_cgflags" (ibc_cgflags i),
makeEntry "ibc_dynamic_libs" (ibc_dynamic_libs i),
makeEntry "ibc_hdrs" (ibc_hdrs i),
makeEntry "ibc_totcheckfail" (ibc_totcheckfail i),
makeEntry "ibc_flags" (ibc_flags i),
makeEntry "ibc_fninfo" (ibc_fninfo i),
makeEntry "ibc_cg" (ibc_cg i),
makeEntry "ibc_docstrings" (ibc_docstrings i),
makeEntry "ibc_moduledocs" (ibc_moduledocs i),
makeEntry "ibc_transforms" (ibc_transforms i),
makeEntry "ibc_errRev" (ibc_errRev i),
makeEntry "ibc_errReduce" (ibc_errReduce i),
makeEntry "ibc_coercions" (ibc_coercions i),
makeEntry "ibc_lineapps" (ibc_lineapps i),
makeEntry "ibc_namehints" (ibc_namehints i),
makeEntry "ibc_metainformation" (ibc_metainformation i),
makeEntry "ibc_errorhandlers" (ibc_errorhandlers i),
makeEntry "ibc_function_errorhandlers" (ibc_function_errorhandlers i),
makeEntry "ibc_metavars" (ibc_metavars i),
makeEntry "ibc_patdefs" (ibc_patdefs i),
makeEntry "ibc_postulates" (ibc_postulates i),
makeEntry "ibc_externs" (ibc_externs i),
toEntry "ibc_parsedSpan" 0 . encode <$> ibc_parsedSpan i,
makeEntry "ibc_usage" (ibc_usage i),
makeEntry "ibc_exports" (ibc_exports i),
makeEntry "ibc_autohints" (ibc_autohints i),
makeEntry "ibc_deprecated" (ibc_deprecated i),
makeEntry "ibc_defs" (ibc_defs i),
makeEntry "ibc_total" (ibc_total i),
makeEntry "ibc_injective" (ibc_injective i),
makeEntry "ibc_access" (ibc_access i),
makeEntry "ibc_fragile" (ibc_fragile i)]
-- TODO: Put this back in shortly after minimising/pruning constraints
-- makeEntry "ibc_constraints" (ibc_constraints i)]
writeArchive :: FilePath -> IBCFile -> Idris ()
writeArchive fp i = do let a = L.foldl (\x y -> addEntryToArchive y x) emptyArchive (entries i)
runIO $ B.writeFile fp (fromArchive a)
writeIBC :: FilePath -> FilePath -> Idris ()
writeIBC src f
= do
logIBC 1 $ "Writing IBC for: " ++ show f
iReport 2 $ "Writing IBC for: " ++ show f
i <- getIState
-- case (Data.List.map fst (idris_metavars i)) \\ primDefs of
-- (_:_) -> ifail "Can't write ibc when there are unsolved metavariables"
-- [] -> return ()
resetNameIdx
ibcf <- mkIBC (ibc_write i) (initIBC { sourcefile = src })
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
-- | Write a package index containing all the imports in the current
-- IState Used for ':search' of an entire package, to ensure
-- everything is loaded.
writePkgIndex :: FilePath -> Idris ()
writePkgIndex f
= do i <- getIState
let imps = map (\ (x, y) -> (True, x)) $ idris_imported i
logIBC 1 $ "Writing package index " ++ show f ++ " including\n" ++
show (map snd imps)
resetNameIdx
let ibcf = initIBC { ibc_imports = imps }
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
mkIBC :: [IBCWrite] -> IBCFile -> Idris IBCFile
mkIBC [] f = return f
mkIBC (i:is) f = do ist <- getIState
logIBC 5 $ show i ++ " " ++ show (L.length is)
f' <- ibc ist i f
mkIBC is f'
ibc :: IState -> IBCWrite -> IBCFile -> Idris IBCFile
ibc i (IBCFix d) f = return f { ibc_fixes = d : ibc_fixes f }
ibc i (IBCImp n) f = case lookupCtxtExact n (idris_implicits i) of
Just v -> return f { ibc_implicits = (n,v): ibc_implicits f }
_ -> ifail "IBC write failed"
ibc i (IBCStatic n) f
= case lookupCtxtExact n (idris_statics i) of
Just v -> return f { ibc_statics = (n,v): ibc_statics f }
_ -> ifail "IBC write failed"
ibc i (IBCInterface n) f
= case lookupCtxtExact n (idris_interfaces i) of
Just v -> return f { ibc_interfaces = (n,v): ibc_interfaces f }
_ -> ifail "IBC write failed"
ibc i (IBCRecord n) f
= case lookupCtxtExact n (idris_records i) of
Just v -> return f { ibc_records = (n,v): ibc_records f }
_ -> ifail "IBC write failed"
ibc i (IBCImplementation int res n ins) f
= return f { ibc_implementations = (int, res, n, ins) : ibc_implementations f }
ibc i (IBCDSL n) f
= case lookupCtxtExact n (idris_dsls i) of
Just v -> return f { ibc_dsls = (n,v): ibc_dsls f }
_ -> ifail "IBC write failed"
ibc i (IBCData n) f
= case lookupCtxtExact n (idris_datatypes i) of
Just v -> return f { ibc_datatypes = (n,v): ibc_datatypes f }
_ -> ifail "IBC write failed"
ibc i (IBCOpt n) f = case lookupCtxtExact n (idris_optimisation i) of
Just v -> return f { ibc_optimise = (n,v): ibc_optimise f }
_ -> ifail "IBC write failed"
ibc i (IBCSyntax n) f = return f { ibc_syntax = n : ibc_syntax f }
ibc i (IBCKeyword n) f = return f { ibc_keywords = n : ibc_keywords f }
ibc i (IBCImport n) f = return f { ibc_imports = n : ibc_imports f }
ibc i (IBCImportDir n) f = return f { ibc_importdirs = n : ibc_importdirs f }
ibc i (IBCSourceDir n) f = return f { ibc_sourcedirs = n : ibc_sourcedirs f }
ibc i (IBCObj tgt n) f = return f { ibc_objs = (tgt, n) : ibc_objs f }
ibc i (IBCLib tgt n) f = return f { ibc_libs = (tgt, n) : ibc_libs f }
ibc i (IBCCGFlag tgt n) f = return f { ibc_cgflags = (tgt, n) : ibc_cgflags f }
ibc i (IBCDyLib n) f = return f {ibc_dynamic_libs = n : ibc_dynamic_libs f }
ibc i (IBCHeader tgt n) f = return f { ibc_hdrs = (tgt, n) : ibc_hdrs f }
ibc i (IBCDef n) f
= do f' <- case lookupDefExact n (tt_ctxt i) of
Just v -> return f { ibc_defs = (n,v) : ibc_defs f }
_ -> ifail "IBC write failed"
case lookupCtxtExact n (idris_patdefs i) of
Just v -> return f' { ibc_patdefs = (n,v) : ibc_patdefs f }
_ -> return f' -- Not a pattern definition
ibc i (IBCDoc n) f = case lookupCtxtExact n (idris_docstrings i) of
Just v -> return f { ibc_docstrings = (n,v) : ibc_docstrings f }
_ -> ifail "IBC write failed"
ibc i (IBCCG n) f = case lookupCtxtExact n (idris_callgraph i) of
Just v -> return f { ibc_cg = (n,v) : ibc_cg f }
_ -> ifail "IBC write failed"
ibc i (IBCCoercion n) f = return f { ibc_coercions = n : ibc_coercions f }
ibc i (IBCAccess n a) f = return f { ibc_access = (n,a) : ibc_access f }
ibc i (IBCFlags n) f
= case lookupCtxtExact n (idris_flags i) of
Just a -> return f { ibc_flags = (n,a): ibc_flags f }
_ -> ifail "IBC write failed"
ibc i (IBCFnInfo n a) f = return f { ibc_fninfo = (n,a) : ibc_fninfo f }
ibc i (IBCTotal n a) f = return f { ibc_total = (n,a) : ibc_total f }
ibc i (IBCInjective n a) f = return f { ibc_injective = (n,a) : ibc_injective f }
ibc i (IBCTrans n t) f = return f { ibc_transforms = (n, t) : ibc_transforms f }
ibc i (IBCErrRev t) f = return f { ibc_errRev = t : ibc_errRev f }
ibc i (IBCErrReduce t) f = return f { ibc_errReduce = t : ibc_errReduce f }
ibc i (IBCLineApp fp l t) f
= return f { ibc_lineapps = (fp,l,t) : ibc_lineapps f }
ibc i (IBCNameHint (n, ty)) f
= return f { ibc_namehints = (n, ty) : ibc_namehints f }
ibc i (IBCMetaInformation n m) f = return f { ibc_metainformation = (n,m) : ibc_metainformation f }
ibc i (IBCErrorHandler n) f = return f { ibc_errorhandlers = n : ibc_errorhandlers f }
ibc i (IBCFunctionErrorHandler fn a n) f =
return f { ibc_function_errorhandlers = (fn, a, n) : ibc_function_errorhandlers f }
ibc i (IBCMetavar n) f =
case lookup n (idris_metavars i) of
Nothing -> return f
Just t -> return f { ibc_metavars = (n, t) : ibc_metavars f }
ibc i (IBCPostulate n) f = return f { ibc_postulates = n : ibc_postulates f }
ibc i (IBCExtern n) f = return f { ibc_externs = n : ibc_externs f }
ibc i (IBCTotCheckErr fc err) f = return f { ibc_totcheckfail = (fc, err) : ibc_totcheckfail f }
ibc i (IBCParsedRegion fc) f = return f { ibc_parsedSpan = Just fc }
ibc i (IBCModDocs n) f = case lookupCtxtExact n (idris_moduledocs i) of
Just v -> return f { ibc_moduledocs = (n,v) : ibc_moduledocs f }
_ -> ifail "IBC write failed"
ibc i (IBCUsage n) f = return f { ibc_usage = n : ibc_usage f }
ibc i (IBCExport n) f = return f { ibc_exports = n : ibc_exports f }
ibc i (IBCAutoHint n h) f = return f { ibc_autohints = (n, h) : ibc_autohints f }
ibc i (IBCDeprecate n r) f = return f { ibc_deprecated = (n, r) : ibc_deprecated f }
ibc i (IBCFragile n r) f = return f { ibc_fragile = (n,r) : ibc_fragile f }
ibc i (IBCConstraint fc u) f = return f { ibc_constraints = (fc, u) : ibc_constraints f }
getEntry :: (Binary b, NFData b) => b -> FilePath -> Archive -> Idris b
getEntry alt f a = case findEntryByPath f a of
Nothing -> return alt
Just e -> return $! (force . decode . fromEntry) e
unhide :: IBCPhase -> Archive -> Idris ()
unhide phase ar = do
processImports True phase ar
processAccess True phase ar
process :: Bool -- ^ Reexporting
-> IBCPhase
-> Archive -> FilePath -> Idris ()
process reexp phase archive fn = do
ver <- getEntry 0 "ver" archive
when (ver /= ibcVersion) $ do
logIBC 1 "ibc out of date"
let e = if ver < ibcVersion
then "an earlier" else "a later"
ldir <- runIO $ getIdrisLibDir
let start = if ldir `L.isPrefixOf` fn
then "This external module"
else "This module"
let end = case L.stripPrefix ldir fn of
Nothing -> "Please clean and rebuild."
Just ploc -> unwords ["Please reinstall:", L.head $ splitDirectories ploc]
ifail $ unlines [ unwords ["Incompatible ibc version for:", show fn]
, unwords [start
, "was built with"
, e
, "version of Idris."]
, end
]
source <- getEntry "" "sourcefile" archive
srcok <- runIO $ doesFileExist source
when srcok $ timestampOlder source fn
processImportDirs archive
processSourceDirs archive
processImports reexp phase archive
processImplicits archive
processInfix archive
processStatics archive
processInterfaces archive
processRecords archive
processImplementations archive
processDSLs archive
processDatatypes archive
processOptimise archive
processSyntax archive
processKeywords archive
processObjectFiles archive
processLibs archive
processCodegenFlags archive
processDynamicLibs archive
processHeaders archive
processPatternDefs archive
processFlags archive
processFnInfo archive
processTotalityCheckError archive
processCallgraph archive
processDocs archive
processModuleDocs archive
processCoercions archive
processTransforms archive
processErrRev archive
processErrReduce archive
processLineApps archive
processNameHints archive
processMetaInformation archive
processErrorHandlers archive
processFunctionErrorHandlers archive
processMetaVars archive
processPostulates archive
processExterns archive
processParsedSpan archive
processUsage archive
processExports archive
processAutoHints archive
processDeprecate archive
processDefs archive
processTotal archive
processInjective archive
processAccess reexp phase archive
processFragile archive
processConstraints archive
timestampOlder :: FilePath -> FilePath -> Idris ()
timestampOlder src ibc = do
srct <- runIO $ getModificationTime src
ibct <- runIO $ getModificationTime ibc
if (srct > ibct)
then ifail $ unlines [ "Module needs reloading:"
, unwords ["\tSRC :", show src]
, unwords ["\tModified at:", show srct]
, unwords ["\tIBC :", show ibc]
, unwords ["\tModified at:", show ibct]
]
else return ()
processPostulates :: Archive -> Idris ()
processPostulates ar = do
ns <- getEntry [] "ibc_postulates" ar
updateIState (\i -> i { idris_postulates = idris_postulates i `S.union` S.fromList ns })
processExterns :: Archive -> Idris ()
processExterns ar = do
ns <- getEntry [] "ibc_externs" ar
updateIState (\i -> i{ idris_externs = idris_externs i `S.union` S.fromList ns })
processParsedSpan :: Archive -> Idris ()
processParsedSpan ar = do
fc <- getEntry Nothing "ibc_parsedSpan" ar
updateIState (\i -> i { idris_parsedSpan = fc })
processUsage :: Archive -> Idris ()
processUsage ar = do
ns <- getEntry [] "ibc_usage" ar
updateIState (\i -> i { idris_erasureUsed = ns ++ idris_erasureUsed i })
processExports :: Archive -> Idris ()
processExports ar = do
ns <- getEntry [] "ibc_exports" ar
updateIState (\i -> i { idris_exports = ns ++ idris_exports i })
processAutoHints :: Archive -> Idris ()
processAutoHints ar = do
ns <- getEntry [] "ibc_autohints" ar
mapM_ (\(n,h) -> addAutoHint n h) ns
processDeprecate :: Archive -> Idris ()
processDeprecate ar = do
ns <- getEntry [] "ibc_deprecated" ar
mapM_ (\(n,reason) -> addDeprecated n reason) ns
processFragile :: Archive -> Idris ()
processFragile ar = do
ns <- getEntry [] "ibc_fragile" ar
mapM_ (\(n,reason) -> addFragile n reason) ns
processConstraints :: Archive -> Idris ()
processConstraints ar = do
cs <- getEntry [] "ibc_constraints" ar
mapM_ (\ (fc, c) -> addConstraints fc (0, [c])) cs
processImportDirs :: Archive -> Idris ()
processImportDirs ar = do
fs <- getEntry [] "ibc_importdirs" ar
mapM_ addImportDir fs
processSourceDirs :: Archive -> Idris ()
processSourceDirs ar = do
fs <- getEntry [] "ibc_sourcedirs" ar
mapM_ addSourceDir fs
processImports :: Bool -> IBCPhase -> Archive -> Idris ()
processImports reexp phase ar = do
fs <- getEntry [] "ibc_imports" ar
mapM_ (\(re, f) -> do
i <- getIState
ibcsd <- valIBCSubDir i
ids <- allImportDirs
fp <- findImport ids ibcsd f
-- if (f `elem` imported i)
-- then logLvl 1 $ "Already read " ++ f
putIState (i { imported = f : imported i })
let phase' = case phase of
IBC_REPL _ -> IBC_REPL False
p -> p
case fp of
LIDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IBC fn src -> loadIBC (reexp && re) phase' fn) fs
processImplicits :: Archive -> Idris ()
processImplicits ar = do
imps <- getEntry [] "ibc_implicits" ar
mapM_ (\ (n, imp) -> do
i <- getIState
case lookupDefAccExact n False (tt_ctxt i) of
Just (n, Hidden) -> return ()
Just (n, Private) -> return ()
_ -> putIState (i { idris_implicits = addDef n imp (idris_implicits i) })) imps
processInfix :: Archive -> Idris ()
processInfix ar = do
f <- getEntry [] "ibc_fixes" ar
updateIState (\i -> i { idris_infixes = sort $ f ++ idris_infixes i })
processStatics :: Archive -> Idris ()
processStatics ar = do
ss <- getEntry [] "ibc_statics" ar
mapM_ (\ (n, s) ->
updateIState (\i -> i { idris_statics = addDef n s (idris_statics i) })) ss
processInterfaces :: Archive -> Idris ()
processInterfaces ar = do
cs <- getEntry [] "ibc_interfaces" ar
mapM_ (\ (n, c) -> do
i <- getIState
-- Don't lose implementations from previous IBCs, which
-- could have loaded in any order
let is = case lookupCtxtExact n (idris_interfaces i) of
Just ci -> interface_implementations ci
_ -> []
let c' = c { interface_implementations = interface_implementations c ++ is }
putIState (i { idris_interfaces = addDef n c' (idris_interfaces i) })) cs
processRecords :: Archive -> Idris ()
processRecords ar = do
rs <- getEntry [] "ibc_records" ar
mapM_ (\ (n, r) ->
updateIState (\i -> i { idris_records = addDef n r (idris_records i) })) rs
processImplementations :: Archive -> Idris ()
processImplementations ar = do
cs <- getEntry [] "ibc_implementations" ar
mapM_ (\ (i, res, n, ins) -> addImplementation i res n ins) cs
processDSLs :: Archive -> Idris ()
processDSLs ar = do
cs <- getEntry [] "ibc_dsls" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_dsls = addDef n c (idris_dsls i) })) cs
processDatatypes :: Archive -> Idris ()
processDatatypes ar = do
cs <- getEntry [] "ibc_datatypes" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_datatypes = addDef n c (idris_datatypes i) })) cs
processOptimise :: Archive -> Idris ()
processOptimise ar = do
cs <- getEntry [] "ibc_optimise" ar
mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_optimisation = addDef n c (idris_optimisation i) })) cs
processSyntax :: Archive -> Idris ()
processSyntax ar = do
s <- getEntry [] "ibc_syntax" ar
updateIState (\i -> i { syntax_rules = updateSyntaxRules s (syntax_rules i) })
processKeywords :: Archive -> Idris ()
processKeywords ar = do
k <- getEntry [] "ibc_keywords" ar
updateIState (\i -> i { syntax_keywords = k ++ syntax_keywords i })
processObjectFiles :: Archive -> Idris ()
processObjectFiles ar = do
os <- getEntry [] "ibc_objs" ar
mapM_ (\ (cg, obj) -> do
dirs <- allImportDirs
o <- runIO $ findInPath dirs obj
addObjectFile cg o) os
processLibs :: Archive -> Idris ()
processLibs ar = do
ls <- getEntry [] "ibc_libs" ar
mapM_ (uncurry addLib) ls
processCodegenFlags :: Archive -> Idris ()
processCodegenFlags ar = do
ls <- getEntry [] "ibc_cgflags" ar
mapM_ (uncurry addFlag) ls
processDynamicLibs :: Archive -> Idris ()
processDynamicLibs ar = do
ls <- getEntry [] "ibc_dynamic_libs" ar
res <- mapM (addDyLib . return) ls
mapM_ checkLoad res
where
checkLoad (Left _) = return ()
checkLoad (Right err) = ifail err
processHeaders :: Archive -> Idris ()
processHeaders ar = do
hs <- getEntry [] "ibc_hdrs" ar
mapM_ (uncurry addHdr) hs
processPatternDefs :: Archive -> Idris ()
processPatternDefs ar = do
ds <- getEntry [] "ibc_patdefs" ar
mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_patdefs = addDef n (force d) (idris_patdefs i) })) ds
processDefs :: Archive -> Idris ()
processDefs ar = do
ds <- getEntry [] "ibc_defs" ar
mapM_ (\ (n, d) -> do
d' <- updateDef d
case d' of
TyDecl _ _ -> return ()
_ -> do
logIBC 1 $ "SOLVING " ++ show n
solveDeferred emptyFC n
updateIState (\i -> i { tt_ctxt = addCtxtDef n d' (tt_ctxt i) })) ds
where
updateDef (CaseOp c t args o s cd) = do
o' <- mapM updateOrig o
cd' <- updateCD cd
return $ CaseOp c t args o' s cd'
updateDef t = return t
updateOrig (Left t) = liftM Left (update t)
updateOrig (Right (l, r)) = do
l' <- update l
r' <- update r
return $ Right (l', r')
updateCD (CaseDefs (cs, c) (rs, r)) = do
c' <- updateSC c
r' <- updateSC r
return $ CaseDefs (cs, c') (rs, r')
updateSC (Case t n alts) = do
alts' <- mapM updateAlt alts
return (Case t n alts')
updateSC (ProjCase t alts) = do
alts' <- mapM updateAlt alts
return (ProjCase t alts')
updateSC (STerm t) = do
t' <- update t
return (STerm t')
updateSC c = return c
updateAlt (ConCase n i ns t) = do
t' <- updateSC t
return (ConCase n i ns t')
updateAlt (FnCase n ns t) = do
t' <- updateSC t
return (FnCase n ns t')
updateAlt (ConstCase c t) = do
t' <- updateSC t
return (ConstCase c t')
updateAlt (SucCase n t) = do
t' <- updateSC t
return (SucCase n t')
updateAlt (DefaultCase t) = do
t' <- updateSC t
return (DefaultCase t')
-- We get a lot of repetition in sub terms and can save a fair chunk
-- of memory if we make sure they're shared. addTT looks for a term
-- and returns it if it exists already, while also keeping stats of
-- how many times a subterm is repeated.
update t = do
tm <- addTT t
case tm of
Nothing -> update' t
Just t' -> return t'
update' (P t n ty) = do
n' <- getSymbol n
return $ P t n' ty
update' (App s f a) = liftM2 (App s) (update' f) (update' a)
update' (Bind n b sc) = do
b' <- updateB b
sc' <- update sc
return $ Bind n b' sc'
where
updateB (Let t v) = liftM2 Let (update' t) (update' v)
updateB b = do
ty' <- update' (binderTy b)
return (b { binderTy = ty' })
update' (Proj t i) = do
t' <- update' t
return $ Proj t' i
update' t = return t
processDocs :: Archive -> Idris ()
processDocs ar = do
ds <- getEntry [] "ibc_docstrings" ar
mapM_ (\(n, a) -> addDocStr n (fst a) (snd a)) ds
processModuleDocs :: Archive -> Idris ()
processModuleDocs ar = do
ds <- getEntry [] "ibc_moduledocs" ar
mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_moduledocs = addDef n d (idris_moduledocs i) })) ds
processAccess :: Bool -- ^ Reexporting?
-> IBCPhase
-> Archive -> Idris ()
processAccess reexp phase ar = do
ds <- getEntry [] "ibc_access" ar
mapM_ (\ (n, a_in) -> do
let a = if reexp then a_in else Hidden
logIBC 3 $ "Setting " ++ show (a, n) ++ " to " ++ show a
updateIState (\i -> i { tt_ctxt = setAccess n a (tt_ctxt i) })
if (not reexp)
then do
logIBC 1 $ "Not exporting " ++ show n
setAccessibility n Hidden
else logIBC 1 $ "Exporting " ++ show n
-- Everything should be available at the REPL from
-- things imported publicly
when (phase == IBC_REPL True) $ setAccessibility n Public) ds
processFlags :: Archive -> Idris ()
processFlags ar = do
ds <- getEntry [] "ibc_flags" ar
mapM_ (\ (n, a) -> setFlags n a) ds
processFnInfo :: Archive -> Idris ()
processFnInfo ar = do
ds <- getEntry [] "ibc_fninfo" ar
mapM_ (\ (n, a) -> setFnInfo n a) ds
processTotal :: Archive -> Idris ()
processTotal ar = do
ds <- getEntry [] "ibc_total" ar
mapM_ (\ (n, a) -> updateIState (\i -> i { tt_ctxt = setTotal n a (tt_ctxt i) })) ds
processInjective :: Archive -> Idris ()
processInjective ar = do
ds <- getEntry [] "ibc_injective" ar
mapM_ (\ (n, a) -> updateIState (\i -> i { tt_ctxt = setInjective n a (tt_ctxt i) })) ds
processTotalityCheckError :: Archive -> Idris ()
processTotalityCheckError ar = do
es <- getEntry [] "ibc_totcheckfail" ar
updateIState (\i -> i { idris_totcheckfail = idris_totcheckfail i ++ es })
processCallgraph :: Archive -> Idris ()
processCallgraph ar = do
ds <- getEntry [] "ibc_cg" ar
mapM_ (\ (n, a) -> addToCG n a) ds
processCoercions :: Archive -> Idris ()
processCoercions ar = do
ns <- getEntry [] "ibc_coercions" ar
mapM_ (\ n -> addCoercion n) ns
processTransforms :: Archive -> Idris ()
processTransforms ar = do
ts <- getEntry [] "ibc_transforms" ar
mapM_ (\ (n, t) -> addTrans n t) ts
processErrRev :: Archive -> Idris ()
processErrRev ar = do
ts <- getEntry [] "ibc_errRev" ar
mapM_ addErrRev ts
processErrReduce :: Archive -> Idris ()
processErrReduce ar = do
ts <- getEntry [] "ibc_errReduce" ar
mapM_ addErrReduce ts
processLineApps :: Archive -> Idris ()
processLineApps ar = do
ls <- getEntry [] "ibc_lineapps" ar
mapM_ (\ (f, i, t) -> addInternalApp f i t) ls
processNameHints :: Archive -> Idris ()
processNameHints ar = do
ns <- getEntry [] "ibc_namehints" ar
mapM_ (\ (n, ty) -> addNameHint n ty) ns
processMetaInformation :: Archive -> Idris ()
processMetaInformation ar = do
ds <- getEntry [] "ibc_metainformation" ar
mapM_ (\ (n, m) -> updateIState (\i ->
i { tt_ctxt = setMetaInformation n m (tt_ctxt i) })) ds
processErrorHandlers :: Archive -> Idris ()
processErrorHandlers ar = do
ns <- getEntry [] "ibc_errorhandlers" ar
updateIState (\i -> i { idris_errorhandlers = idris_errorhandlers i ++ ns })
processFunctionErrorHandlers :: Archive -> Idris ()
processFunctionErrorHandlers ar = do
ns <- getEntry [] "ibc_function_errorhandlers" ar
mapM_ (\ (fn,arg,handler) -> addFunctionErrorHandlers fn arg [handler]) ns
processMetaVars :: Archive -> Idris ()
processMetaVars ar = do
ns <- getEntry [] "ibc_metavars" ar
updateIState (\i -> i { idris_metavars = L.reverse ns ++ idris_metavars i })
----- For Cheapskate and docstrings
instance Binary a => Binary (D.Docstring a) where
put (D.DocString opts lines) = do put opts ; put lines
get = do opts <- get
lines <- get
return (D.DocString opts lines)
instance Binary CT.Options where
put (CT.Options x1 x2 x3 x4) = do put x1 ; put x2 ; put x3 ; put x4
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (CT.Options x1 x2 x3 x4)
instance Binary D.DocTerm where
put D.Unchecked = putWord8 0
put (D.Checked t) = putWord8 1 >> put t
put (D.Example t) = putWord8 2 >> put t
put (D.Failing e) = putWord8 3 >> put e
get = do i <- getWord8
case i of
0 -> return D.Unchecked
1 -> fmap D.Checked get
2 -> fmap D.Example get
3 -> fmap D.Failing get
_ -> error "Corrupted binary data for DocTerm"
instance Binary a => Binary (D.Block a) where
put (D.Para lines) = do putWord8 0 ; put lines
put (D.Header i lines) = do putWord8 1 ; put i ; put lines
put (D.Blockquote bs) = do putWord8 2 ; put bs
put (D.List b t xs) = do putWord8 3 ; put b ; put t ; put xs
put (D.CodeBlock attr txt src) = do putWord8 4 ; put attr ; put txt ; put src
put (D.HtmlBlock txt) = do putWord8 5 ; put txt
put D.HRule = putWord8 6
get = do i <- getWord8
case i of
0 -> fmap D.Para get
1 -> liftM2 D.Header get get
2 -> fmap D.Blockquote get
3 -> liftM3 D.List get get get
4 -> liftM3 D.CodeBlock get get get
5 -> liftM D.HtmlBlock get
6 -> return D.HRule
_ -> error "Corrupted binary data for Block"
instance Binary a => Binary (D.Inline a) where
put (D.Str txt) = do putWord8 0 ; put txt
put D.Space = putWord8 1
put D.SoftBreak = putWord8 2
put D.LineBreak = putWord8 3
put (D.Emph xs) = putWord8 4 >> put xs
put (D.Strong xs) = putWord8 5 >> put xs
put (D.Code xs tm) = putWord8 6 >> put xs >> put tm
put (D.Link a b c) = putWord8 7 >> put a >> put b >> put c
put (D.Image a b c) = putWord8 8 >> put a >> put b >> put c
put (D.Entity a) = putWord8 9 >> put a
put (D.RawHtml x) = putWord8 10 >> put x
get = do i <- getWord8
case i of
0 -> liftM D.Str get
1 -> return D.Space
2 -> return D.SoftBreak
3 -> return D.LineBreak
4 -> liftM D.Emph get
5 -> liftM D.Strong get
6 -> liftM2 D.Code get get
7 -> liftM3 D.Link get get get
8 -> liftM3 D.Image get get get
9 -> liftM D.Entity get
10 -> liftM D.RawHtml get
_ -> error "Corrupted binary data for Inline"
instance Binary CT.ListType where
put (CT.Bullet c) = putWord8 0 >> put c
put (CT.Numbered nw i) = putWord8 1 >> put nw >> put i
get = do i <- getWord8
case i of
0 -> liftM CT.Bullet get
1 -> liftM2 CT.Numbered get get
_ -> error "Corrupted binary data for ListType"
instance Binary CT.CodeAttr where
put (CT.CodeAttr a b) = put a >> put b
get = liftM2 CT.CodeAttr get get
instance Binary CT.NumWrapper where
put (CT.PeriodFollowing) = putWord8 0
put (CT.ParenFollowing) = putWord8 1
get = do i <- getWord8
case i of
0 -> return CT.PeriodFollowing
1 -> return CT.ParenFollowing
_ -> error "Corrupted binary data for NumWrapper"
----- Generated by 'derive'
instance Binary SizeChange where
put x
= case x of
Smaller -> putWord8 0
Same -> putWord8 1
Bigger -> putWord8 2
Unknown -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return Smaller
1 -> return Same
2 -> return Bigger
3 -> return Unknown
_ -> error "Corrupted binary data for SizeChange"
instance Binary CGInfo where
put (CGInfo x1 x2 x3 x4)
= do put x1
-- put x3 -- Already used SCG info for totality check
put x2
put x4
get
= do x1 <- get
x2 <- get
x3 <- get
return (CGInfo x1 x2 [] x3)
instance Binary CaseType where
put x = case x of
Updatable -> putWord8 0
Shared -> putWord8 1
get = do i <- getWord8
case i of
0 -> return Updatable
1 -> return Shared
_ -> error "Corrupted binary data for CaseType"
instance Binary SC where
put x
= case x of
Case x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
ProjCase x1 x2 -> do putWord8 1
put x1
put x2
STerm x1 -> do putWord8 2
put x1
UnmatchedCase x1 -> do putWord8 3
put x1
ImpossibleCase -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Case x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (ProjCase x1 x2)
2 -> do x1 <- get
return (STerm x1)
3 -> do x1 <- get
return (UnmatchedCase x1)
4 -> return ImpossibleCase
_ -> error "Corrupted binary data for SC"
instance Binary CaseAlt where
put x
= {-# SCC "putCaseAlt" #-}
case x of
ConCase x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
ConstCase x1 x2 -> do putWord8 1
put x1
put x2
DefaultCase x1 -> do putWord8 2
put x1
FnCase x1 x2 x3 -> do putWord8 3
put x1
put x2
put x3
SucCase x1 x2 -> do putWord8 4
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (ConCase x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
return (ConstCase x1 x2)
2 -> do x1 <- get
return (DefaultCase x1)
3 -> do x1 <- get
x2 <- get
x3 <- get
return (FnCase x1 x2 x3)
4 -> do x1 <- get
x2 <- get
return (SucCase x1 x2)
_ -> error "Corrupted binary data for CaseAlt"
instance Binary CaseDefs where
put (CaseDefs x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (CaseDefs x1 x2)
instance Binary CaseInfo where
put x@(CaseInfo x1 x2 x3) = do put x1
put x2
put x3
get = do x1 <- get
x2 <- get
x3 <- get
return (CaseInfo x1 x2 x3)
instance Binary Def where
put x
= {-# SCC "putDef" #-}
case x of
Function x1 x2 -> do putWord8 0
put x1
put x2
TyDecl x1 x2 -> do putWord8 1
put x1
put x2
-- all primitives just get added at the start, don't write
Operator x1 x2 x3 -> do return ()
-- no need to add/load original patterns, because they're not
-- used again after totality checking
CaseOp x1 x2 x3 _ _ x4 -> do putWord8 3
put x1
put x2
put x3
put x4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (Function x1 x2)
1 -> do x1 <- get
x2 <- get
return (TyDecl x1 x2)
-- Operator isn't written, don't read
3 -> do x1 <- get
x2 <- get
x3 <- get
-- x4 <- get
-- x3 <- get always []
x5 <- get
return (CaseOp x1 x2 x3 [] [] x5)
_ -> error "Corrupted binary data for Def"
instance Binary Accessibility where
put x
= case x of
Public -> putWord8 0
Frozen -> putWord8 1
Private -> putWord8 2
Hidden -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return Public
1 -> return Frozen
2 -> return Private
3 -> return Hidden
_ -> error "Corrupted binary data for Accessibility"
safeToEnum :: (Enum a, Bounded a, Integral int) => String -> int -> a
safeToEnum label x' = result
where
x = fromIntegral x'
result
| x < fromEnum (minBound `asTypeOf` result)
|| x > fromEnum (maxBound `asTypeOf` result)
= error $ label ++ ": corrupted binary representation in IBC"
| otherwise = toEnum x
instance Binary PReason where
put x
= case x of
Other x1 -> do putWord8 0
put x1
Itself -> putWord8 1
NotCovering -> putWord8 2
NotPositive -> putWord8 3
Mutual x1 -> do putWord8 4
put x1
NotProductive -> putWord8 5
BelieveMe -> putWord8 6
UseUndef x1 -> do putWord8 7
put x1
ExternalIO -> putWord8 8
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Other x1)
1 -> return Itself
2 -> return NotCovering
3 -> return NotPositive
4 -> do x1 <- get
return (Mutual x1)
5 -> return NotProductive
6 -> return BelieveMe
7 -> do x1 <- get
return (UseUndef x1)
8 -> return ExternalIO
_ -> error "Corrupted binary data for PReason"
instance Binary Totality where
put x
= case x of
Total x1 -> do putWord8 0
put x1
Partial x1 -> do putWord8 1
put x1
Unchecked -> do putWord8 2
Productive -> do putWord8 3
Generated -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Total x1)
1 -> do x1 <- get
return (Partial x1)
2 -> return Unchecked
3 -> return Productive
4 -> return Generated
_ -> error "Corrupted binary data for Totality"
instance Binary MetaInformation where
put x
= case x of
EmptyMI -> do putWord8 0
DataMI x1 -> do putWord8 1
put x1
get = do i <- getWord8
case i of
0 -> return EmptyMI
1 -> do x1 <- get
return (DataMI x1)
_ -> error "Corrupted binary data for MetaInformation"
instance Binary DataOpt where
put x = case x of
Codata -> putWord8 0
DefaultEliminator -> putWord8 1
DataErrRev -> putWord8 2
DefaultCaseFun -> putWord8 3
get = do i <- getWord8
case i of
0 -> return Codata
1 -> return DefaultEliminator
2 -> return DataErrRev
3 -> return DefaultCaseFun
_ -> error "Corrupted binary data for DataOpt"
instance Binary FnOpt where
put x
= case x of
Inlinable -> putWord8 0
TotalFn -> putWord8 1
Dictionary -> putWord8 2
AssertTotal -> putWord8 3
Specialise x -> do putWord8 4
put x
AllGuarded -> putWord8 5
PartialFn -> putWord8 6
Implicit -> putWord8 7
Reflection -> putWord8 8
ErrorHandler -> putWord8 9
ErrorReverse -> putWord8 10
CoveringFn -> putWord8 11
NoImplicit -> putWord8 12
Constructor -> putWord8 13
CExport x1 -> do putWord8 14
put x1
AutoHint -> putWord8 15
PEGenerated -> putWord8 16
StaticFn -> putWord8 17
OverlappingDictionary -> putWord8 18
ErrorReduce -> putWord8 20
get
= do i <- getWord8
case i of
0 -> return Inlinable
1 -> return TotalFn
2 -> return Dictionary
3 -> return AssertTotal
4 -> do x <- get
return (Specialise x)
5 -> return AllGuarded
6 -> return PartialFn
7 -> return Implicit
8 -> return Reflection
9 -> return ErrorHandler
10 -> return ErrorReverse
11 -> return CoveringFn
12 -> return NoImplicit
13 -> return Constructor
14 -> do x1 <- get
return $ CExport x1
15 -> return AutoHint
16 -> return PEGenerated
17 -> return StaticFn
18 -> return OverlappingDictionary
20 -> return ErrorReduce
_ -> error "Corrupted binary data for FnOpt"
instance Binary Fixity where
put x
= case x of
Infixl x1 -> do putWord8 0
put x1
Infixr x1 -> do putWord8 1
put x1
InfixN x1 -> do putWord8 2
put x1
PrefixN x1 -> do putWord8 3
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Infixl x1)
1 -> do x1 <- get
return (Infixr x1)
2 -> do x1 <- get
return (InfixN x1)
3 -> do x1 <- get
return (PrefixN x1)
_ -> error "Corrupted binary data for Fixity"
instance Binary FixDecl where
put (Fix x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (Fix x1 x2)
instance Binary ArgOpt where
put x
= case x of
HideDisplay -> putWord8 0
InaccessibleArg -> putWord8 1
AlwaysShow -> putWord8 2
UnknownImp -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return HideDisplay
1 -> return InaccessibleArg
2 -> return AlwaysShow
3 -> return UnknownImp
_ -> error "Corrupted binary data for Static"
instance Binary Static where
put x
= case x of
Static -> putWord8 0
Dynamic -> putWord8 1
get
= do i <- getWord8
case i of
0 -> return Static
1 -> return Dynamic
_ -> error "Corrupted binary data for Static"
instance Binary Plicity where
put x
= case x of
Imp x1 x2 x3 x4 _ x5 ->
do putWord8 0
put x1
put x2
put x3
put x4
put x5
Exp x1 x2 x3 x4 ->
do putWord8 1
put x1
put x2
put x3
put x4
Constraint x1 x2 x3 ->
do putWord8 2
put x1
put x2
put x3
TacImp x1 x2 x3 x4 ->
do putWord8 3
put x1
put x2
put x3
put x4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (Imp x1 x2 x3 x4 False x5)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (Exp x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
return (Constraint x1 x2 x3)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (TacImp x1 x2 x3 x4)
_ -> error "Corrupted binary data for Plicity"
instance (Binary t) => Binary (PDecl' t) where
put x
= case x of
PFix x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
PTy x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PClauses x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PData x1 x2 x3 x4 x5 x6 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
put x6
PParams x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
PNamespace x1 x2 x3 -> do putWord8 5
put x1
put x2
put x3
PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 ->
do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PInterface x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12
-> do putWord8 7
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PImplementation x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15 ->
do putWord8 8
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
put x13
put x14
put x15
PDSL x1 x2 -> do putWord8 9
put x1
put x2
PCAF x1 x2 x3 -> do putWord8 10
put x1
put x2
put x3
PMutual x1 x2 -> do putWord8 11
put x1
put x2
PPostulate x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 12
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PSyntax x1 x2 -> do putWord8 13
put x1
put x2
PDirective x1 -> error "Cannot serialize PDirective"
PProvider x1 x2 x3 x4 x5 x6 ->
do putWord8 15
put x1
put x2
put x3
put x4
put x5
put x6
PTransform x1 x2 x3 x4 -> do putWord8 16
put x1
put x2
put x3
put x4
PRunElabDecl x1 x2 x3 -> do putWord8 17
put x1
put x2
put x3
POpenInterfaces x1 x2 x3 -> do putWord8 18
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (PFix x1 x2 x3)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PTy x1 x2 x3 x4 x5 x6 x7 x8)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauses x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PData x1 x2 x3 x4 x5 x6)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (PParams x1 x2 x3)
5 -> do x1 <- get
x2 <- get
x3 <- get
return (PNamespace x1 x2 x3)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
7 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PInterface x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
8 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
x13 <- get
x14 <- get
x15 <- get
return (PImplementation x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 x13 x14 x15)
9 -> do x1 <- get
x2 <- get
return (PDSL x1 x2)
10 -> do x1 <- get
x2 <- get
x3 <- get
return (PCAF x1 x2 x3)
11 -> do x1 <- get
x2 <- get
return (PMutual x1 x2)
12 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PPostulate x1 x2 x3 x4 x5 x6 x7 x8)
13 -> do x1 <- get
x2 <- get
return (PSyntax x1 x2)
14 -> do error "Cannot deserialize PDirective"
15 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PProvider x1 x2 x3 x4 x5 x6)
16 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PTransform x1 x2 x3 x4)
17 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElabDecl x1 x2 x3)
18 -> do x1 <- get
x2 <- get
x3 <- get
return (POpenInterfaces x1 x2 x3)
_ -> error "Corrupted binary data for PDecl'"
instance Binary t => Binary (ProvideWhat' t) where
put (ProvTerm x1 x2) = do putWord8 0
put x1
put x2
put (ProvPostulate x1) = do putWord8 1
put x1
get = do y <- getWord8
case y of
0 -> do x1 <- get
x2 <- get
return (ProvTerm x1 x2)
1 -> do x1 <- get
return (ProvPostulate x1)
_ -> error "Corrupted binary data for ProvideWhat"
instance Binary Using where
put (UImplicit x1 x2) = do putWord8 0; put x1; put x2
put (UConstraint x1 x2) = do putWord8 1; put x1; put x2
get = do i <- getWord8
case i of
0 -> do x1 <- get; x2 <- get; return (UImplicit x1 x2)
1 -> do x1 <- get; x2 <- get; return (UConstraint x1 x2)
_ -> error "Corrupted binary data for Using"
instance Binary SyntaxInfo where
put (Syn x1 x2 x3 x4 _ _ x5 x6 x7 _ _ x8 _ _ _)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (Syn x1 x2 x3 x4 [] id x5 x6 x7 Nothing 0 x8 0 True True)
instance (Binary t) => Binary (PClause' t) where
put x
= case x of
PClause x1 x2 x3 x4 x5 x6 -> do putWord8 0
put x1
put x2
put x3
put x4
put x5
put x6
PWith x1 x2 x3 x4 x5 x6 x7 -> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
PClauseR x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PWithR x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PClause x1 x2 x3 x4 x5 x6)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
return (PWith x1 x2 x3 x4 x5 x6 x7)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauseR x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PWithR x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PClause'"
instance (Binary t) => Binary (PData' t) where
put x
= case x of
PDatadecl x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
PLaterdecl x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PDatadecl x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PLaterdecl x1 x2 x3)
_ -> error "Corrupted binary data for PData'"
instance Binary PunInfo where
put x
= case x of
TypeOrTerm -> putWord8 0
IsType -> putWord8 1
IsTerm -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return TypeOrTerm
1 -> return IsType
2 -> return IsTerm
_ -> error "Corrupted binary data for PunInfo"
instance Binary PTerm where
put x
= case x of
PQuote x1 -> do putWord8 0
put x1
PRef x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
PInferRef x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
PPatvar x1 x2 -> do putWord8 3
put x1
put x2
PLam x1 x2 x3 x4 x5 -> do putWord8 4
put x1
put x2
put x3
put x4
put x5
PPi x1 x2 x3 x4 x5 -> do putWord8 5
put x1
put x2
put x3
put x4
put x5
PLet x1 x2 x3 x4 x5 x6 -> do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
PTyped x1 x2 -> do putWord8 7
put x1
put x2
PAppImpl x1 x2 -> error "PAppImpl in final term"
PApp x1 x2 x3 -> do putWord8 8
put x1
put x2
put x3
PAppBind x1 x2 x3 -> do putWord8 9
put x1
put x2
put x3
PMatchApp x1 x2 -> do putWord8 10
put x1
put x2
PCase x1 x2 x3 -> do putWord8 11
put x1
put x2
put x3
PTrue x1 x2 -> do putWord8 12
put x1
put x2
PResolveTC x1 -> do putWord8 15
put x1
PRewrite x1 x2 x3 x4 x5 -> do putWord8 17
put x1
put x2
put x3
put x4
put x5
PPair x1 x2 x3 x4 x5 -> do putWord8 18
put x1
put x2
put x3
put x4
put x5
PDPair x1 x2 x3 x4 x5 x6 -> do putWord8 19
put x1
put x2
put x3
put x4
put x5
put x6
PAlternative x1 x2 x3 -> do putWord8 20
put x1
put x2
put x3
PHidden x1 -> do putWord8 21
put x1
PType x1 -> do putWord8 22
put x1
PGoal x1 x2 x3 x4 -> do putWord8 23
put x1
put x2
put x3
put x4
PConstant x1 x2 -> do putWord8 24
put x1
put x2
Placeholder -> putWord8 25
PDoBlock x1 -> do putWord8 26
put x1
PIdiom x1 x2 -> do putWord8 27
put x1
put x2
PMetavar x1 x2 -> do putWord8 29
put x1
put x2
PProof x1 -> do putWord8 30
put x1
PTactics x1 -> do putWord8 31
put x1
PImpossible -> putWord8 33
PCoerced x1 -> do putWord8 34
put x1
PUnifyLog x1 -> do putWord8 35
put x1
PNoImplicits x1 -> do putWord8 36
put x1
PDisamb x1 x2 -> do putWord8 37
put x1
put x2
PUniverse x1 x2 -> do putWord8 38
put x1
put x2
PRunElab x1 x2 x3 -> do putWord8 39
put x1
put x2
put x3
PAs x1 x2 x3 -> do putWord8 40
put x1
put x2
put x3
PElabError x1 -> do putWord8 41
put x1
PQuasiquote x1 x2 -> do putWord8 42
put x1
put x2
PUnquote x1 -> do putWord8 43
put x1
PQuoteName x1 x2 x3 -> do putWord8 44
put x1
put x2
put x3
PIfThenElse x1 x2 x3 x4 -> do putWord8 45
put x1
put x2
put x3
put x4
PConstSugar x1 x2 -> do putWord8 46
put x1
put x2
PWithApp x1 x2 x3 -> do putWord8 47
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (PQuote x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PRef x1 x2 x3)
2 -> do x1 <- get
x2 <- get
x3 <- get
return (PInferRef x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (PPatvar x1 x2)
4 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PLam x1 x2 x3 x4 x5)
5 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPi x1 x2 x3 x4 x5)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PLet x1 x2 x3 x4 x5 x6)
7 -> do x1 <- get
x2 <- get
return (PTyped x1 x2)
8 -> do x1 <- get
x2 <- get
x3 <- get
return (PApp x1 x2 x3)
9 -> do x1 <- get
x2 <- get
x3 <- get
return (PAppBind x1 x2 x3)
10 -> do x1 <- get
x2 <- get
return (PMatchApp x1 x2)
11 -> do x1 <- get
x2 <- get
x3 <- get
return (PCase x1 x2 x3)
12 -> do x1 <- get
x2 <- get
return (PTrue x1 x2)
15 -> do x1 <- get
return (PResolveTC x1)
17 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PRewrite x1 x2 x3 x4 x5)
18 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPair x1 x2 x3 x4 x5)
19 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PDPair x1 x2 x3 x4 x5 x6)
20 -> do x1 <- get
x2 <- get
x3 <- get
return (PAlternative x1 x2 x3)
21 -> do x1 <- get
return (PHidden x1)
22 -> do x1 <- get
return (PType x1)
23 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PGoal x1 x2 x3 x4)
24 -> do x1 <- get
x2 <- get
return (PConstant x1 x2)
25 -> return Placeholder
26 -> do x1 <- get
return (PDoBlock x1)
27 -> do x1 <- get
x2 <- get
return (PIdiom x1 x2)
29 -> do x1 <- get
x2 <- get
return (PMetavar x1 x2)
30 -> do x1 <- get
return (PProof x1)
31 -> do x1 <- get
return (PTactics x1)
33 -> return PImpossible
34 -> do x1 <- get
return (PCoerced x1)
35 -> do x1 <- get
return (PUnifyLog x1)
36 -> do x1 <- get
return (PNoImplicits x1)
37 -> do x1 <- get
x2 <- get
return (PDisamb x1 x2)
38 -> do x1 <- get
x2 <- get
return (PUniverse x1 x2)
39 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElab x1 x2 x3)
40 -> do x1 <- get
x2 <- get
x3 <- get
return (PAs x1 x2 x3)
41 -> do x1 <- get
return (PElabError x1)
42 -> do x1 <- get
x2 <- get
return (PQuasiquote x1 x2)
43 -> do x1 <- get
return (PUnquote x1)
44 -> do x1 <- get
x2 <- get
x3 <- get
return (PQuoteName x1 x2 x3)
45 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PIfThenElse x1 x2 x3 x4)
46 -> do x1 <- get
x2 <- get
return (PConstSugar x1 x2)
47 -> do x1 <- get
x2 <- get
x3 <- get
return (PWithApp x1 x2 x3)
_ -> error "Corrupted binary data for PTerm"
instance Binary PAltType where
put x
= case x of
ExactlyOne x1 -> do putWord8 0
put x1
FirstSuccess -> putWord8 1
TryImplicit -> putWord8 2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (ExactlyOne x1)
1 -> return FirstSuccess
2 -> return TryImplicit
_ -> error "Corrupted binary data for PAltType"
instance (Binary t) => Binary (PTactic' t) where
put x
= case x of
Intro x1 -> do putWord8 0
put x1
Focus x1 -> do putWord8 1
put x1
Refine x1 x2 -> do putWord8 2
put x1
put x2
Rewrite x1 -> do putWord8 3
put x1
LetTac x1 x2 -> do putWord8 4
put x1
put x2
Exact x1 -> do putWord8 5
put x1
Compute -> putWord8 6
Trivial -> putWord8 7
Solve -> putWord8 8
Attack -> putWord8 9
ProofState -> putWord8 10
ProofTerm -> putWord8 11
Undo -> putWord8 12
Try x1 x2 -> do putWord8 13
put x1
put x2
TSeq x1 x2 -> do putWord8 14
put x1
put x2
Qed -> putWord8 15
ApplyTactic x1 -> do putWord8 16
put x1
Reflect x1 -> do putWord8 17
put x1
Fill x1 -> do putWord8 18
put x1
Induction x1 -> do putWord8 19
put x1
ByReflection x1 -> do putWord8 20
put x1
ProofSearch x1 x2 x3 x4 x5 x6 -> do putWord8 21
put x1
put x2
put x3
put x4
put x5
put x6
DoUnify -> putWord8 22
CaseTac x1 -> do putWord8 23
put x1
SourceFC -> putWord8 24
Intros -> putWord8 25
Equiv x1 -> do putWord8 26
put x1
Claim x1 x2 -> do putWord8 27
put x1
put x2
Unfocus -> putWord8 28
MatchRefine x1 -> do putWord8 29
put x1
LetTacTy x1 x2 x3 -> do putWord8 30
put x1
put x2
put x3
TCImplementation -> putWord8 31
GoalType x1 x2 -> do putWord8 32
put x1
put x2
TCheck x1 -> do putWord8 33
put x1
TEval x1 -> do putWord8 34
put x1
TDocStr x1 -> do putWord8 35
put x1
TSearch x1 -> do putWord8 36
put x1
Skip -> putWord8 37
TFail x1 -> do putWord8 38
put x1
Abandon -> putWord8 39
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Intro x1)
1 -> do x1 <- get
return (Focus x1)
2 -> do x1 <- get
x2 <- get
return (Refine x1 x2)
3 -> do x1 <- get
return (Rewrite x1)
4 -> do x1 <- get
x2 <- get
return (LetTac x1 x2)
5 -> do x1 <- get
return (Exact x1)
6 -> return Compute
7 -> return Trivial
8 -> return Solve
9 -> return Attack
10 -> return ProofState
11 -> return ProofTerm
12 -> return Undo
13 -> do x1 <- get
x2 <- get
return (Try x1 x2)
14 -> do x1 <- get
x2 <- get
return (TSeq x1 x2)
15 -> return Qed
16 -> do x1 <- get
return (ApplyTactic x1)
17 -> do x1 <- get
return (Reflect x1)
18 -> do x1 <- get
return (Fill x1)
19 -> do x1 <- get
return (Induction x1)
20 -> do x1 <- get
return (ByReflection x1)
21 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (ProofSearch x1 x2 x3 x4 x5 x6)
22 -> return DoUnify
23 -> do x1 <- get
return (CaseTac x1)
24 -> return SourceFC
25 -> return Intros
26 -> do x1 <- get
return (Equiv x1)
27 -> do x1 <- get
x2 <- get
return (Claim x1 x2)
28 -> return Unfocus
29 -> do x1 <- get
return (MatchRefine x1)
30 -> do x1 <- get
x2 <- get
x3 <- get
return (LetTacTy x1 x2 x3)
31 -> return TCImplementation
32 -> do x1 <- get
x2 <- get
return (GoalType x1 x2)
33 -> do x1 <- get
return (TCheck x1)
34 -> do x1 <- get
return (TEval x1)
35 -> do x1 <- get
return (TDocStr x1)
36 -> do x1 <- get
return (TSearch x1)
37 -> return Skip
38 -> do x1 <- get
return (TFail x1)
39 -> return Abandon
_ -> error "Corrupted binary data for PTactic'"
instance (Binary t) => Binary (PDo' t) where
put x
= case x of
DoExp x1 x2 -> do putWord8 0
put x1
put x2
DoBind x1 x2 x3 x4 -> do putWord8 1
put x1
put x2
put x3
put x4
DoBindP x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
DoLet x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
DoLetP x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (DoExp x1 x2)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBind x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBindP x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (DoLet x1 x2 x3 x4 x5)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (DoLetP x1 x2 x3)
_ -> error "Corrupted binary data for PDo'"
instance (Binary t) => Binary (PArg' t) where
put x
= case x of
PImp x1 x2 x3 x4 x5 ->
do putWord8 0
put x1
put x2
put x3
put x4
put x5
PExp x1 x2 x3 x4 ->
do putWord8 1
put x1
put x2
put x3
put x4
PConstraint x1 x2 x3 x4 ->
do putWord8 2
put x1
put x2
put x3
put x4
PTacImplicit x1 x2 x3 x4 x5 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PImp x1 x2 x3 x4 x5)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PExp x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PConstraint x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PTacImplicit x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PArg'"
instance Binary InterfaceInfo where
put (CI x1 x2 x3 x4 x5 x6 x7 _ x8)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (CI x1 x2 x3 x4 x5 x6 x7 [] x8)
instance Binary RecordInfo where
put (RI x1 x2 x3)
= do put x1
put x2
put x3
get
= do x1 <- get
x2 <- get
x3 <- get
return (RI x1 x2 x3)
instance Binary OptInfo where
put (Optimise x1 x2 x3)
= do put x1
put x2
put x3
get
= do x1 <- get
x2 <- get
x3 <- get
return (Optimise x1 x2 x3)
instance Binary FnInfo where
put (FnInfo x1)
= put x1
get
= do x1 <- get
return (FnInfo x1)
instance Binary TypeInfo where
put (TI x1 x2 x3 x4 x5 x6) = do put x1
put x2
put x3
put x4
put x5
put x6
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (TI x1 x2 x3 x4 x5 x6)
instance Binary SynContext where
put x
= case x of
PatternSyntax -> putWord8 0
TermSyntax -> putWord8 1
AnySyntax -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return PatternSyntax
1 -> return TermSyntax
2 -> return AnySyntax
_ -> error "Corrupted binary data for SynContext"
instance Binary Syntax where
put (Rule x1 x2 x3)
= do putWord8 0
put x1
put x2
put x3
put (DeclRule x1 x2)
= do putWord8 1
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Rule x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (DeclRule x1 x2)
_ -> error "Corrupted binary data for Syntax"
instance (Binary t) => Binary (DSL' t) where
put (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
return (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9)
instance Binary SSymbol where
put x
= case x of
Keyword x1 -> do putWord8 0
put x1
Symbol x1 -> do putWord8 1
put x1
Expr x1 -> do putWord8 2
put x1
SimpleExpr x1 -> do putWord8 3
put x1
Binding x1 -> do putWord8 4
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Keyword x1)
1 -> do x1 <- get
return (Symbol x1)
2 -> do x1 <- get
return (Expr x1)
3 -> do x1 <- get
return (SimpleExpr x1)
4 -> do x1 <- get
return (Binding x1)
_ -> error "Corrupted binary data for SSymbol"
instance Binary Codegen where
put x
= case x of
Via ir str -> do putWord8 0
put ir
put str
Bytecode -> putWord8 1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (Via x1 x2)
1 -> return Bytecode
_ -> error "Corrupted binary data for Codegen"
instance Binary IRFormat where
put x = case x of
IBCFormat -> putWord8 0
JSONFormat -> putWord8 1
get = do i <- getWord8
case i of
0 -> return IBCFormat
1 -> return JSONFormat
_ -> error "Corrupted binary data for IRFormat"
|
jmitchell/Idris-dev
|
src/Idris/IBC.hs
|
bsd-3-clause
| 102,681
| 0
| 21
| 56,796
| 28,070
| 12,890
| 15,180
| 2,461
| 17
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings, FlexibleInstances, GeneralizedNewtypeDeriving #-}
module Options where
{-
This module is temporary, until ghc-mod exports the contents from its library,
as it is currently only in the ghc-mod executable.
It is based on/copied from
https://github.com/DanielG/ghc-mod/blob/master/src/GHCMod/Options.hs
-}
import Options.Applicative
import Data.Monoid
-- import Prelude
-- import Options.Applicative
import Options.Applicative.Types
import Language.Haskell.GhcMod.Types
import Control.Arrow
import Data.Char (toUpper, toLower)
import Data.List (intercalate)
import Language.Haskell.GhcMod.Read
-- import GHCMod.Options.Commands
-- import GHCMod.Version
-- import GHCMod.Options.DocUtils
-- import GHCMod.Options.Help
-- import GHCMod.Options.ShellParse
-- import Options.Applicative
import Options.Applicative.Help.Pretty (Doc)
import qualified Options.Applicative.Help.Pretty as PP
import Control.Monad.State
import GHC.Exts( IsString(..) )
import Data.Maybe
-- import Data.Monoid
import Prelude
-- ---------------------------------------------------------------------
splitOn :: Eq a => a -> [a] -> ([a], [a])
splitOn c = second (drop 1) . break (==c)
logLevelParser :: Parser GmLogLevel
logLevelParser =
logLevelSwitch <*>
logLevelOption
<||> silentSwitch
where
logLevelOption =
option parseLL
$$ long "verbose"
<=> metavar "LEVEL"
<=> value GmWarning
<=> showDefaultWith showLL
<=> help' $$$ do
"Set log level ("
<> int' (fromEnum (minBound :: GmLogLevel))
<> "-"
<> int' (fromEnum (maxBound :: GmLogLevel))
<> ")"
"You can also use strings (case-insensitive):"
para'
$ intercalate ", "
$ map showLL ([minBound..maxBound] :: [GmLogLevel])
logLevelSwitch =
repeatAp succ' . length <$> many $$ flag' ()
$$ short 'v'
<=> help "Increase log level"
silentSwitch = flag' GmSilent
$$ long "silent"
<=> short 's'
<=> help "Be silent, set log level to 'silent'"
showLL = drop 2 . map toLower . show
repeatAp f n = foldr (.) id (replicate n f)
succ' x | x == maxBound = x
| otherwise = succ x
parseLL = do
v <- readerAsk
let
il'= toEnum . min maxBound <$> readMaybe v
ll' = readMaybe ("Gm" ++ capFirst v)
maybe (readerError $ "Not a log level \"" ++ v ++ "\"") return $ ll' <|> il'
capFirst (h:t) = toUpper h : map toLower t
capFirst [] = []
outputOptsSpec :: Parser OutputOpts
outputOptsSpec = OutputOpts
<$> logLevelParser
<*> flag PlainStyle LispStyle
$$ long "tolisp"
<=> short 'l'
<=> help "Format output as an S-Expression"
<*> LineSeparator <$$> strOption
$$ long "boundary"
<=> long "line-separator"
<=> short 'b'
<=> metavar "SEP"
<=> value "\0"
<=> showDefault
<=> help "Output line separator"
<*> optional $$ splitOn ',' <$$> strOption
$$ long "line-prefix"
<=> metavar "OUT,ERR"
<=> help "Output prefixes"
programsArgSpec :: Parser Programs
programsArgSpec = Programs
<$> strOption
$$ long "with-ghc"
<=> value "ghc"
<=> showDefault
<=> help "GHC executable to use"
<*> strOption
$$ long "with-ghc-pkg"
<=> value "ghc-pkg"
<=> showDefault
<=> help "ghc-pkg executable to use (only needed when guessing from GHC path fails)"
<*> strOption
$$ long "with-cabal"
<=> value "cabal"
<=> showDefault
<=> help "cabal-install executable to use"
<*> strOption
$$ long "with-stack"
<=> value "stack"
<=> showDefault
<=> help "stack executable to use"
globalArgSpec :: Parser Options
globalArgSpec = Options
<$> outputOptsSpec
<*> programsArgSpec
<*> many $$ strOption
$$ long "ghcOpt"
<=> long "ghc-option"
<=> short 'g'
<=> metavar "OPT"
<=> help "Option to be passed to GHC"
<*> many fileMappingSpec
where
fileMappingSpec =
getFileMapping . splitOn '=' <$> strOption
$$ long "map-file"
<=> metavar "MAPPING"
<=> fileMappingHelp
fileMappingHelp = help' $ do
"Redirect one file to another"
"--map-file \"file1.hs=file2.hs\""
indent 4 $ do
"can be used to tell ghc-mod"
\\ "that it should take source code"
\\ "for `file1.hs` from `file2.hs`."
"`file1.hs` can be either full path,"
\\ "or path relative to project root."
"`file2.hs` has to be either relative to project root,"
\\ "or full path (preferred)"
"--map-file \"file.hs\""
indent 4 $ do
"can be used to tell ghc-mod that it should take"
\\ "source code for `file.hs` from stdin. File end"
\\ "marker is `\\n\\EOT\\n`, i.e. `\\x0A\\x04\\x0A`."
\\ "`file.hs` may or may not exist, and should be"
\\ "either full path, or relative to project root."
getFileMapping = second (\i -> if null i then Nothing else Just i)
-- ghc-mod: Making Haskell development *more* fun
-- Copyright (C) 2015 Nikolay Yakimov <root@livid.pp.ru>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
-- module GHCMod.Options.DocUtils (
-- ($$),
-- ($$$),
-- (<=>),
-- (<$$>),
-- (<||>)
-- ) where
-- import Options.Applicative
-- import Data.Monoid
-- import Prelude
infixl 6 <||>
infixr 7 <$$>
infixr 7 $$
infixr 8 <=>
infixr 9 $$$
($$) :: (a -> b) -> a -> b
($$) = ($)
($$$) :: (a -> b) -> a -> b
($$$) = ($)
(<||>) :: Alternative a => a b -> a b -> a b
(<||>) = (<|>)
(<=>) :: Monoid m => m -> m -> m
(<=>) = (<>)
(<$$>) :: Functor f => (a -> b) -> f a -> f b
(<$$>) = (<$>)
-- ---------------------------------------------------------------------
newtype MyDocM s a = MyDoc {unwrapState :: State s a}
deriving (Monad, Functor, Applicative, MonadState s)
type MyDoc = MyDocM (Maybe Doc) ()
instance IsString (MyDocM (Maybe Doc) a) where
fromString = append . para
instance Monoid (MyDocM (Maybe Doc) ()) where
mappend a b = append $ doc a <> doc b
mempty = append PP.empty
para :: String -> Doc
para = PP.fillSep . map PP.text . words
append :: Doc -> MyDocM (Maybe Doc) a
append s = modify m >> return undefined
where
m :: Maybe Doc -> Maybe Doc
m Nothing = Just s
m (Just old) = Just $ old PP..$. s
infixr 7 \\ -- can't be last on the line
(\\) :: MyDoc -> MyDoc -> MyDoc
(\\) a b = append $ doc a PP.<+> doc b
doc :: MyDoc -> Doc
doc = fromMaybe PP.empty . flip execState Nothing . unwrapState
help' :: MyDoc -> Mod f a
help' = helpDoc . Just . doc
desc :: MyDoc -> InfoMod a
desc = footerDoc . Just . doc . indent 2
code :: MyDoc -> MyDoc
code x = do
_ <- " "
indent 4 x
" "
progDesc' :: MyDoc -> InfoMod a
progDesc' = progDescDoc . Just . doc
indent :: Int -> MyDoc -> MyDoc
indent n = append . PP.indent n . doc
int' :: Int -> MyDoc
int' = append . PP.int
para' :: String -> MyDoc
para' = append . para
|
kmate/HaRe
|
src/Options.hs
|
bsd-3-clause
| 7,943
| 0
| 24
| 2,163
| 1,860
| 968
| 892
| 188
| 2
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Rho.InfoHash where
import Control.DeepSeq (NFData)
import Data.Bits
import qualified Data.ByteString as B
-- | 20-byte info_hash of torrent
-- TODO: Move this type to somewhere nice and remove this module.
newtype InfoHash = InfoHash { unwrapInfoHash :: B.ByteString } deriving (Eq, Ord, NFData)
instance Show InfoHash where
show (InfoHash hash) = concatMap toHexDigit $ B.unpack hash
where
toHexDigit d = [toHexDigit' (d `shiftR` 4), toHexDigit' (d .&. 0x0F)]
toHexDigit' 0 = '0'
toHexDigit' 1 = '1'
toHexDigit' 2 = '2'
toHexDigit' 3 = '3'
toHexDigit' 4 = '4'
toHexDigit' 5 = '5'
toHexDigit' 6 = '6'
toHexDigit' 7 = '7'
toHexDigit' 8 = '8'
toHexDigit' 9 = '9'
toHexDigit' 10 = 'a'
toHexDigit' 11 = 'b'
toHexDigit' 12 = 'c'
toHexDigit' 13 = 'd'
toHexDigit' 14 = 'e'
toHexDigit' 15 = 'f'
toHexDigit' d = error $ "toHexDigit: invalid digit: " ++ show d
|
osa1/rho-torrent
|
src/Rho/InfoHash.hs
|
bsd-3-clause
| 1,091
| 0
| 11
| 338
| 281
| 150
| 131
| 26
| 0
|
{-# LANGUAGE OverloadedStrings #-}
import Text.Printf (printf)
import Web.Scotty (scotty, get, json, liftAndCatchIO)
import Data.Aeson (object, (.=))
import Control.Concurrent.STM.TVar (newTVarIO, readTVar, modifyTVar)
import Control.Monad.STM (atomically)
main :: IO ()
main = do
putStrLn "\nStarting web server, see http://localhost:3000 ...\n"
counter <- newTVarIO (0 :: Int)
scotty 3000 $ do
get "/" $ do
-- Logging
count <- liftAndCatchIO $ do
count <- atomically $ do
modifyTVar counter (+1)
readTVar counter
printf "Request #%d\n" count
return count
let resp = object [ "count" .= count ]
json resp
|
simnalamburt/stack-practice
|
app/Main.hs
|
bsd-3-clause
| 763
| 0
| 22
| 244
| 212
| 109
| 103
| 20
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Reflex.Dom.Widget.Basic
( partitionMapBySetLT
, listHoldWithKey
, ChildResult (..)
-- * Displaying Values
, text
, dynText
, display
, button
, dyn
, widgetHold
-- * Working with Maps
, diffMapNoEq
, diffMap
, applyMap
, mapPartitionEithers
, applyMapKeysSet
-- * Widgets on Collections
, listWithKey
, listWithKey'
, listWithKeyShallowDiff
, listViewWithKey
, selectViewListWithKey
, selectViewListWithKey_
-- * Creating DOM Elements
, el
, elAttr
, elClass
, elDynAttr
, elDynClass
-- ** With Element Results
, el'
, elAttr'
, elClass'
, elDynAttr'
, elDynClass'
, elDynAttrNS'
, dynamicAttributesToModifyAttributes
-- * List Utils
, list
, simpleList
-- * Specific DOM Elements
, Link (..)
, linkClass
, link
, divClass
, dtdd
, blank
-- * Workflows
, Workflow (..)
, workflow
, workflowView
, mapWorkflow
-- * Tables and Lists
, tableDynAttr
, tabDisplay
, HasAttributes (..)
) where
import Reflex.Class as Reflex
import Reflex.Dom.Builder.Class
import Reflex.Dom.Class
import Reflex.Dom.Internal.Foreign ()
import Reflex.Dynamic
import Reflex.PostBuild.Class
import Control.Arrow
import Control.Lens hiding (children, element)
import Control.Monad.Reader hiding (forM, forM_, mapM, mapM_, sequence, sequence_)
import Data.Align
import Data.Default
import Data.Either
import Data.Foldable
import Data.Functor.Misc
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Semigroup
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.These
import Data.Traversable
import Prelude hiding (mapM, mapM_, sequence, sequence_)
widgetHoldInternal :: DomBuilder t m => m a -> Event t (m b) -> m (a, Event t b)
widgetHoldInternal child0 child' = do
childResult0 <- deletable (void child') child0
childResult' <- placeholder $ def & placeholderConfig_insertAbove .~ fmap (deletable (void child')) child'
return (childResult0, _placeholder_insertedAbove childResult')
-- | Breaks the given Map into pieces based on the given Set. Each piece will contain only keys that are less than the key of the piece, and greater than or equal to the key of the piece with the next-smaller key. There will be one additional piece containing all keys from the original Map that are larger or equal to the largest key in the Set.
-- Either k () is used instead of Maybe k so that the resulting map of pieces is sorted so that the additional piece has the largest key.
-- No empty pieces will be included in the output.
--TODO: This can probably be done more efficiently by dividing and conquering, re-using the structure of the Set instead of going through the Set linearally
partitionMapBySetLT :: forall k v. Ord k => Set k -> Map k v -> Map (Either k ()) (Map k v)
partitionMapBySetLT s m0 = Map.fromDistinctAscList $ go (Set.toAscList s) m0
where go :: [k] -> Map k v -> [(Either k (), Map k v)]
go [] m = if Map.null m
then []
else [(Right (), m)]
go (h:t) m = let (lt, eq, gt) = Map.splitLookup h m
geq = maybe id (Map.insert h) eq gt
in if Map.null lt
then go t geq
else (Left h, lt) : go t geq
newtype ChildResult t k a = ChildResult { unChildResult :: (a, Event t (Map k (Maybe (ChildResult t k a)))) }
listHoldWithKey :: forall t m k v a. (Ord k, DomBuilder t m, MonadHold t m, MonadFix m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> m a) -> m (Dynamic t (Map k a))
listHoldWithKey initialChildren modifyChildren buildChild = do
let deleteChildSelector = fanMap $ fmap void modifyChildren
liveChildren :: Dynamic t (Set k) <- foldDyn applyMapKeysSet (Map.keysSet initialChildren) modifyChildren
let placeChildSelector = fanMap $ attachWith partitionMapBySetLT (current liveChildren) $ fmap (Map.mapMaybe id) modifyChildren
buildAugmentedChild :: k -> v -> m (ChildResult t k a)
buildAugmentedChild k v = do
let delete = select deleteChildSelector $ Const2 k
myCfg = def
& insertAbove .~ fmap (imapM buildAugmentedChild) (select placeChildSelector $ Const2 $ Left k)
& deleteSelf .~ delete
ph <- placeholder myCfg
result <- deletable delete $ buildChild k v
return $ ChildResult (result, (fmap Just <$> _placeholder_insertedAbove ph) <> (Map.singleton k Nothing <$ _placeholder_deletedSelf ph)) --Note: we could also use the "deleted" output on deletable, if it had one; we're using this so that everything changes all at once, instead of deletions being prompt and insertions being delayed
rec initialAugmentedResults <- iforM initialChildren buildAugmentedChild
augmentedResults <- foldDyn applyMap initialAugmentedResults $ newInsertedBelow <> newInsertedAbove
let newInsertedAbove = switch $ mconcat . reverse . fmap (snd . unChildResult) . Map.elems <$> current augmentedResults
belowAll <- placeholder $ def & placeholderConfig_insertAbove .~ fmap (imapM buildAugmentedChild) (select placeChildSelector $ Const2 $ Right ())
let newInsertedBelow = fmap Just <$> _placeholder_insertedAbove belowAll
return $ fmap (fmap (fst . unChildResult)) augmentedResults
text :: DomBuilder t m => Text -> m ()
text t = void $ textNode $ def & textNodeConfig_initialContents .~ t
dynText :: forall t m. (PostBuild t m, DomBuilder t m) => Dynamic t Text -> m ()
dynText t = do
postBuild <- getPostBuild
void $ textNode $ (def :: TextNodeConfig t) & textNodeConfig_setContents .~ leftmost
[ updated t
, tag (current t) postBuild
]
display :: (PostBuild t m, DomBuilder t m, Show a) => Dynamic t a -> m ()
display = dynText . fmap (T.pack . show)
button :: DomBuilder t m => Text -> m (Event t ())
button t = do
(e, _) <- element "button" def $ text t
return $ domEvent Click e
--TODO: Should this be renamed to 'widgetView' for consistency with 'widgetHold'?
-- | Given a Dynamic of widget-creating actions, create a widget that is recreated whenever the Dynamic updates.
-- The returned Event of widget results occurs when the Dynamic does.
-- Note: Often, the type 'a' is an Event, in which case the return value is an Event-of-Events that would typically be flattened (via 'switchPromptly').
dyn :: (DomBuilder t m, PostBuild t m) => Dynamic t (m a) -> m (Event t a)
dyn child = do
postBuild <- getPostBuild
let newChild = leftmost [updated child, tag (current child) postBuild]
snd <$> widgetHoldInternal (return ()) newChild
-- | Given an initial widget and an Event of widget-creating actions, create a widget that is recreated whenever the Event fires.
-- The returned Dynamic of widget results occurs when the Event does.
-- Note: Often, the type 'a' is an Event, in which case the return value is a Dynamic-of-Events that would typically be flattened.
widgetHold :: (DomBuilder t m, MonadHold t m) => m a -> Event t (m a) -> m (Dynamic t a)
widgetHold child0 newChild = do
(result0, newResult) <- widgetHoldInternal child0 newChild
holdDyn result0 newResult
diffMapNoEq :: (Ord k) => Map k v -> Map k v -> Map k (Maybe v)
diffMapNoEq olds news = flip Map.mapMaybe (align olds news) $ \case
This _ -> Just Nothing
These _ new -> Just $ Just new
That new -> Just $ Just new
diffMap :: (Ord k, Eq v) => Map k v -> Map k v -> Map k (Maybe v)
diffMap olds news = flip Map.mapMaybe (align olds news) $ \case
This _ -> Just Nothing
These old new
| old == new -> Nothing
| otherwise -> Just $ Just new
That new -> Just $ Just new
applyMap :: Ord k => Map k (Maybe v) -> Map k v -> Map k v
applyMap patch old = insertions `Map.union` (old `Map.difference` deletions)
where (deletions, insertions) = mapPartitionEithers $ maybeToEither <$> patch
maybeToEither = \case
Nothing -> Left ()
Just r -> Right r
mapPartitionEithers :: Map k (Either a b) -> (Map k a, Map k b)
mapPartitionEithers m = (fromLeft <$> ls, fromRight <$> rs)
where (ls, rs) = Map.partition isLeft m
fromLeft (Left l) = l
fromLeft _ = error "mapPartitionEithers: fromLeft received a Right value; this should be impossible"
fromRight (Right r) = r
fromRight _ = error "mapPartitionEithers: fromRight received a Left value; this should be impossible"
-- | Apply a map patch to a set
-- > applyMapKeysSet patch (Map.keysSet m) == Map.keysSet (applyMap patch m)
applyMapKeysSet :: Ord k => Map k (Maybe v) -> Set k -> Set k
applyMapKeysSet patch old = Map.keysSet insertions `Set.union` (old `Set.difference` Map.keysSet deletions)
where (insertions, deletions) = Map.partition isJust patch
--TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
listWithKey :: forall t k v m a. (Ord k, DomBuilder t m, PostBuild t m, MonadFix m, MonadHold t m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Dynamic t (Map k a))
listWithKey vals mkChild = do
postBuild <- getPostBuild
rec sentVals :: Dynamic t (Map k v) <- foldDyn applyMap Map.empty changeVals
let changeVals :: Event t (Map k (Maybe v))
changeVals = attachWith diffMapNoEq (current sentVals) $ leftmost
[ updated vals
, tag (current vals) postBuild --TODO: This should probably be added to the attachWith, not to the updated; if we were using diffMap instead of diffMapNoEq, I think it might not work
]
listWithKeyShallowDiff Map.empty changeVals $ \k v0 dv -> do
mkChild k =<< holdDyn v0 dv
{-# DEPRECATED listWithKey' "listWithKey' has been renamed to listWithKeyShallowDiff; also, its behavior has changed to fix a bug where children were always rebuilt (never updated)" #-}
listWithKey' :: (Ord k, DomBuilder t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKey' = listWithKeyShallowDiff
-- | Display the given map of items (in key order) using the builder function provided, and update it with the given event. 'Nothing' update entries will delete the corresponding children, and 'Just' entries will create them if they do not exist or send an update event to them if they do.
listWithKeyShallowDiff :: (Ord k, DomBuilder t m, MonadFix m, MonadHold t m) => Map k v -> Event t (Map k (Maybe v)) -> (k -> v -> Event t v -> m a) -> m (Dynamic t (Map k a))
listWithKeyShallowDiff initialVals valsChanged mkChild = do
let childValChangedSelector = fanMap $ fmap (Map.mapMaybe id) valsChanged
sentVals <- foldDyn applyMap Map.empty $ fmap (fmap void) valsChanged
let relevantPatch patch _ = case patch of
Nothing -> Just Nothing -- Even if we let a Nothing through when the element doesn't already exist, this doesn't cause a problem because it is ignored
Just _ -> Nothing -- We don't want to let spurious re-creations of items through
listHoldWithKey initialVals (attachWith (flip (Map.differenceWith relevantPatch)) (current sentVals) valsChanged) $ \k v ->
mkChild k v $ Reflex.select childValChangedSelector $ Const2 k
--TODO: Something better than Dynamic t (Map k v) - we want something where the Events carry diffs, not the whole value
-- | Create a dynamically-changing set of Event-valued widgets.
-- This is like listWithKey, specialized for widgets returning (Event t a). listWithKey would return 'Dynamic t (Map k (Event t a))' in this scenario, but listViewWithKey flattens this to 'Event t (Map k a)' via 'switch'.
listViewWithKey :: (Ord k, DomBuilder t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m (Event t a)) -> m (Event t (Map k a))
listViewWithKey vals mkChild = switch . fmap mergeMap <$> listViewWithKey' vals mkChild
listViewWithKey' :: (Ord k, DomBuilder t m, PostBuild t m, MonadHold t m, MonadFix m) => Dynamic t (Map k v) -> (k -> Dynamic t v -> m a) -> m (Behavior t (Map k a))
listViewWithKey' vals mkChild = current <$> listWithKey vals mkChild
-- | Create a dynamically-changing set of widgets, one of which is selected at any time.
selectViewListWithKey :: forall t m k v a. (DomBuilder t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
=> Dynamic t k -- ^ Current selection key
-> Dynamic t (Map k v) -- ^ Dynamic key/value map
-> (k -> Dynamic t v -> Dynamic t Bool -> m (Event t a)) -- ^ Function to create a widget for a given key from Dynamic value and Dynamic Bool indicating if this widget is currently selected
-> m (Event t (k, a)) -- ^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
selectViewListWithKey selection vals mkChild = do
let selectionDemux = demux selection -- For good performance, this value must be shared across all children
selectChild <- listWithKey vals $ \k v -> do
let selected = demuxed selectionDemux k
selectSelf <- mkChild k v selected
return $ fmap ((,) k) selectSelf
return $ switchPromptlyDyn $ leftmost . Map.elems <$> selectChild
selectViewListWithKey_ :: forall t m k v a. (DomBuilder t m, Ord k, PostBuild t m, MonadHold t m, MonadFix m)
=> Dynamic t k -- ^ Current selection key
-> Dynamic t (Map k v) -- ^ Dynamic key/value map
-> (k -> Dynamic t v -> Dynamic t Bool -> m (Event t a)) -- ^ Function to create a widget for a given key from Dynamic value and Dynamic Bool indicating if this widget is currently selected
-> m (Event t k) -- ^ Event that fires when any child's return Event fires. Contains key of an arbitrary firing widget.
selectViewListWithKey_ selection vals mkChild = fmap fst <$> selectViewListWithKey selection vals mkChild
-- | Create a DOM element
-- > el "div" (text "Hello World")
-- <div>Hello World</div>
{-# INLINABLE el #-}
el :: forall t m a. DomBuilder t m => Text -> m a -> m a
el elementTag child = snd <$> el' elementTag child
-- | Create a DOM element with attributes
-- > elAttr "a" ("href" =: "http://google.com") (text "Google!")
-- <a href="http://google.com">Google!</a>
{-# INLINABLE elAttr #-}
elAttr :: forall t m a. DomBuilder t m => Text -> Map Text Text -> m a -> m a
elAttr elementTag attrs child = snd <$> elAttr' elementTag attrs child
-- | Create a DOM element with classes
-- > elClass "div" "row" (return ())
-- <div class="row"></div>
{-# INLINABLE elClass #-}
elClass :: forall t m a. DomBuilder t m => Text -> Text -> m a -> m a
elClass elementTag c child = snd <$> elClass' elementTag c child
-- | Create a DOM element with Dynamic Attributes
-- > elClass "div" (constDyn ("class" =: "row")) (return ())
-- <div class="row"></div>
{-# INLINABLE elDynAttr #-}
elDynAttr :: forall t m a. (DomBuilder t m, PostBuild t m) => Text -> Dynamic t (Map Text Text) -> m a -> m a
elDynAttr elementTag attrs child = snd <$> elDynAttr' elementTag attrs child
-- | Create a DOM element with a Dynamic Class
-- > elDynClass "div" (constDyn "row") (return ())
-- <div class="row"></div>
{-# INLINABLE elDynClass #-}
elDynClass :: forall t m a. (DomBuilder t m, PostBuild t m) => Text -> Dynamic t Text -> m a -> m a
elDynClass elementTag c child = snd <$> elDynClass' elementTag c child
-- | Create a DOM element and return the element
-- > do (e, _) <- el' "div" (text "Click")
-- > return $ domEvent Click e
{-# INLINABLE el' #-}
el' :: forall t m a. DomBuilder t m => Text -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
el' elementTag = element elementTag def
-- | Create a DOM element with attributes and return the element
{-# INLINABLE elAttr' #-}
elAttr' :: forall t m a. DomBuilder t m => Text -> Map Text Text -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elAttr' elementTag attrs = element elementTag $ def
& initialAttributes .~ Map.mapKeys (AttributeName Nothing) attrs
-- | Create a DOM element with a class and return the element
{-# INLINABLE elClass' #-}
elClass' :: forall t m a. DomBuilder t m => Text -> Text -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elClass' elementTag c = elAttr' elementTag ("class" =: c)
-- | Create a DOM element with Dynamic Attributes and return the element
{-# INLINABLE elDynAttr' #-}
elDynAttr' :: forall t m a. (DomBuilder t m, PostBuild t m) => Text -> Dynamic t (Map Text Text) -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elDynAttr' = elDynAttrNS' Nothing
-- | Create a DOM element with a Dynamic class and return the element
{-# INLINABLE elDynClass' #-}
elDynClass' :: forall t m a. (DomBuilder t m, PostBuild t m) => Text -> Dynamic t Text -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elDynClass' elementTag c = elDynAttr' elementTag (fmap ("class" =:) c)
{-# INLINABLE elDynAttrNS' #-}
elDynAttrNS' :: forall t m a. (DomBuilder t m, PostBuild t m) => Maybe Text -> Text -> Dynamic t (Map Text Text) -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elDynAttrNS' mns elementTag attrs child = do
modifyAttrs <- dynamicAttributesToModifyAttributes attrs
let cfg = def
& elementConfig_namespace .~ mns
& modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
element elementTag cfg child
dynamicAttributesToModifyAttributes :: (Ord k, PostBuild t m) => Dynamic t (Map k Text) -> m (Event t (Map k (Maybe Text)))
dynamicAttributesToModifyAttributes = dynamicAttributesToModifyAttributesWithInitial mempty
dynamicAttributesToModifyAttributesWithInitial :: (Ord k, PostBuild t m) => Map k Text -> Dynamic t (Map k Text) -> m (Event t (Map k (Maybe Text)))
dynamicAttributesToModifyAttributesWithInitial attrs0 d = do
postBuild <- getPostBuild
let modificationsNeeded = flip pushAlways (align postBuild $ updated d) $ \case
This () -> do
new <- sample $ current d
return $ diffMap attrs0 new
These () new -> return $ diffMap attrs0 new
That new -> do
old <- sample $ current d
return $ diffMap old new
return modificationsNeeded
--------------------------------------------------------------------------------
-- Copied and pasted from Reflex.Widget.Class
--------------------------------------------------------------------------------
-- | Create a dynamically-changing set of widgets from a Dynamic key/value map.
-- Unlike the 'withKey' variants, the child widgets are insensitive to which key they're associated with.
list :: (Ord k, DomBuilder t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t (Map k v) -> (Dynamic t v -> m a) -> m (Dynamic t (Map k a))
list dm mkChild = listWithKey dm (\_ dv -> mkChild dv)
-- | Create a dynamically-changing set of widgets from a Dynamic list.
simpleList :: (DomBuilder t m, MonadHold t m, PostBuild t m, MonadFix m) => Dynamic t [v] -> (Dynamic t v -> m a) -> m (Dynamic t [a])
simpleList xs mkChild = fmap (fmap (map snd . Map.toList)) $ flip list mkChild $ fmap (Map.fromList . zip [(1::Int)..]) xs
{-
schedulePostBuild x = performEvent_ . (x <$) =<< getPostBuild
elDynHtml' :: DomBuilder t m => Text -> Dynamic t Text -> m (El t)
elDynHtml' elementTag html = do
e <- buildEmptyElement elementTag (Map.empty :: Map Text Text)
schedulePostBuild $ setInnerHTML e . Just =<< sample (current html)
performEvent_ $ fmap (setInnerHTML e . Just) $ updated html
wrapElement defaultDomEventHandler e
elDynHtmlAttr' :: DomBuilder t m => Text -> Map Text Text -> Dynamic t Text -> m (El t)
elDynHtmlAttr' elementTag attrs html = do
e <- buildEmptyElement elementTag attrs
schedulePostBuild $ setInnerHTML e . Just =<< sample (current html)
performEvent_ $ fmap (setInnerHTML e . Just) $ updated html
wrapElement defaultDomEventHandler e
-}
data Link t
= Link { _link_clicked :: Event t ()
}
linkClass :: DomBuilder t m => Text -> Text -> m (Link t)
linkClass s c = do
(l,_) <- elAttr' "a" ("class" =: c) $ text s
return $ Link $ domEvent Click l
link :: DomBuilder t m => Text -> m (Link t)
link s = linkClass s ""
divClass :: forall t m a. DomBuilder t m => Text -> m a -> m a
divClass = elClass "div"
dtdd :: forall t m a. DomBuilder t m => Text -> m a -> m a
dtdd h w = do
el "dt" $ text h
el "dd" w
blank :: forall m. Monad m => m ()
blank = return ()
newtype Workflow t m a = Workflow { unWorkflow :: m (a, Event t (Workflow t m a)) }
workflow :: forall t m a. (DomBuilder t m, MonadFix m, MonadHold t m) => Workflow t m a -> m (Dynamic t a)
workflow w0 = do
rec eResult <- widgetHold (unWorkflow w0) $ fmap unWorkflow $ switch $ snd <$> current eResult
return $ fmap fst eResult
workflowView :: forall t m a. (DomBuilder t m, MonadFix m, MonadHold t m, PostBuild t m) => Workflow t m a -> m (Event t a)
workflowView w0 = do
rec eResult <- dyn . fmap unWorkflow =<< holdDyn w0 eReplace
eReplace <- fmap switch $ hold never $ fmap snd eResult
return $ fmap fst eResult
mapWorkflow :: (DomBuilder t m) => (a -> b) -> Workflow t m a -> Workflow t m b
mapWorkflow f (Workflow x) = Workflow (fmap (f *** fmap (mapWorkflow f)) x)
-- | A widget to display a table with static columns and dynamic rows.
tableDynAttr :: forall t m r k v. (Ord k, DomBuilder t m, MonadHold t m, PostBuild t m, MonadFix m)
=> Text -- ^ Class applied to <table> element
-> [(Text, k -> Dynamic t r -> m v)] -- ^ Columns of (header, row key -> row value -> child widget)
-> Dynamic t (Map k r) -- ^ Map from row key to row value
-> (k -> m (Dynamic t (Map Text Text))) -- ^ Function to compute <tr> element attributes from row key
-> m (Dynamic t (Map k (Element EventResult (DomBuilderSpace m) t, [v]))) -- ^ Map from row key to (El, list of widget return values)
tableDynAttr klass cols dRows rowAttrs = elAttr "div" (Map.singleton "style" "zoom: 1; overflow: auto; background: white;") $ do
elAttr "table" (Map.singleton "class" klass) $ do
el "thead" $ el "tr" $ do
mapM_ (\(h, _) -> el "th" $ text h) cols
el "tbody" $ do
listWithKey dRows (\k r -> do
dAttrs <- rowAttrs k
elDynAttr' "tr" dAttrs $ mapM (\x -> el "td" $ snd x k r) cols)
-- | A widget to construct a tabbed view that shows only one of its child widgets at a time.
-- Creates a header bar containing a <ul> with one <li> per child; clicking a <li> displays
-- the corresponding child and hides all others.
tabDisplay :: forall t m k. (MonadFix m, DomBuilder t m, MonadHold t m, PostBuild t m, Ord k)
=> Text -- ^ Class applied to <ul> element
-> Text -- ^ Class applied to currently active <li> element
-> Map k (Text, m ()) -- ^ Map from (arbitrary) key to (tab label, child widget)
-> m ()
tabDisplay ulClass activeClass tabItems = do
let t0 = listToMaybe $ Map.keys tabItems
rec currentTab :: Demux t (Maybe k) <- elAttr "ul" ("class" =: ulClass) $ do
tabClicksList :: [Event t k] <- Map.elems <$> imapM (\k (s,_) -> headerBarLink s k $ demuxed currentTab (Just k)) tabItems
let eTabClicks :: Event t k = leftmost tabClicksList
fmap demux $ holdDyn t0 $ fmap Just eTabClicks
el "div" $ do
iforM_ tabItems $ \k (_, w) -> do
let isSelected = demuxed currentTab $ Just k
attrs = ffor isSelected $ \s -> if s then Map.empty else Map.singleton "style" "display:none;"
elDynAttr "div" attrs w
return ()
where
headerBarLink :: Text -> k -> Dynamic t Bool -> m (Event t k)
headerBarLink x k isSelected = do
let attrs = fmap (\b -> if b then Map.singleton "class" activeClass else Map.empty) isSelected
elDynAttr "li" attrs $ do
a <- link x
return $ fmap (const k) (_link_clicked a)
class HasAttributes a where
type Attrs a :: *
attributes :: Lens' a (Attrs a)
|
manyoo/reflex-dom
|
src/Reflex/Dom/Widget/Basic.hs
|
bsd-3-clause
| 24,249
| 0
| 24
| 5,247
| 7,055
| 3,577
| 3,478
| 331
| 4
|
module Interpreter(evalExpr, evalExprs, eval) where
import Control.Monad
import qualified Control.Monad.Trans.State as S
import qualified Data.Functor.Identity as I
import Types
import Env
evalExpr :: Expr -> S.State Env Expr
evalExpr v@(EValue _) = return v
evalExpr (ESymbol s) = do
env <- S.get
return . envLookup s $ getEnv env
evalExpr (EComb (ex:rest)) = do
x <- evalExpr ex
case x of
(EProc fn) -> fn rest
(EFunc params body) -> do
S.modify addFrame
forM_ (zip params rest) $ \(ESymbol name, y) -> do
z <- evalExpr y
S.modify $ Env . addEntry name z . getEnv
res <- evalExprs body
S.modify dropFrame
return res
_ -> error "Invalid combination"
evalExpr e = error $ "Invalid state " ++ show e
evalExprs :: [Expr] -> S.State Env Expr
evalExprs [] = return ENull
evalExprs [e] = evalExpr e
evalExprs (e:rest) = do
_ <- evalExpr e
evalExprs rest
eval :: Env -> Expr -> (Expr, Env)
eval env expr = I.runIdentity $ S.runStateT (evalExpr expr) env
|
martintrojer/scheme-haskell
|
src/Interpreter.hs
|
bsd-3-clause
| 1,054
| 0
| 19
| 271
| 437
| 217
| 220
| 33
| 3
|
{-# LANGUAGE NamedFieldPuns #-}
module GHC.Util.HsDecl (declName,bindName)
where
import GHC.Hs
import GHC.Types.SrcLoc
import Language.Haskell.GhclibParserEx.GHC.Types.Name.Reader
-- | @declName x@ returns the \"new name\" that is created (for
-- example a function declaration) by @x@. If @x@ isn't a declaration
-- that creates a new name (for example an instance declaration),
-- 'Nothing' is returned instead. This is useful because we don't
-- want to tell users to rename binders that they aren't creating
-- right now and therefore usually cannot change.
declName :: LHsDecl GhcPs -> Maybe String
declName (L _ x) = occNameStr <$> case x of
TyClD _ FamDecl{tcdFam=FamilyDecl{fdLName}} -> Just $ unLoc fdLName
TyClD _ SynDecl{tcdLName} -> Just $ unLoc tcdLName
TyClD _ DataDecl{tcdLName} -> Just $ unLoc tcdLName
TyClD _ ClassDecl{tcdLName} -> Just $ unLoc tcdLName
ValD _ FunBind{fun_id} -> Just $ unLoc fun_id
ValD _ VarBind{var_id} -> Just var_id
ValD _ (PatSynBind _ PSB{psb_id}) -> Just $ unLoc psb_id
SigD _ (TypeSig _ (x:_) _) -> Just $ unLoc x
SigD _ (PatSynSig _ (x:_) _) -> Just $ unLoc x
SigD _ (ClassOpSig _ _ (x:_) _) -> Just $ unLoc x
ForD _ ForeignImport{fd_name} -> Just $ unLoc fd_name
ForD _ ForeignExport{fd_name} -> Just $ unLoc fd_name
_ -> Nothing
bindName :: LHsBind GhcPs -> Maybe String
bindName (L _ FunBind{fun_id}) = Just $ rdrNameStr fun_id
bindName (L _ VarBind{var_id}) = Just $ occNameStr var_id
bindName _ = Nothing
|
ndmitchell/hlint
|
src/GHC/Util/HsDecl.hs
|
bsd-3-clause
| 1,516
| 0
| 14
| 296
| 505
| 251
| 254
| 24
| 13
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.AmountOfMoney.FR.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.AmountOfMoney.FR.Corpus
import Duckling.Dimensions.Types
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "FR Tests"
[ makeCorpusTest [Seal AmountOfMoney] corpus
]
|
facebookincubator/duckling
|
tests/Duckling/AmountOfMoney/FR/Tests.hs
|
bsd-3-clause
| 522
| 0
| 9
| 78
| 79
| 50
| 29
| 11
| 1
|
{-# LANGUAGE CPP, DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Python.Common.Token
-- Copyright : (c) 2009 Bernie Pope
-- License : BSD-style
-- Maintainer : bjpop@csse.unimelb.edu.au
-- Stability : experimental
-- Portability : ghc
--
-- Lexical tokens for the Python lexer. Contains the superset of tokens from
-- version 2 and version 3 of Python (they are mostly the same).
-----------------------------------------------------------------------------
module Language.JavaScript.Parser.Token
(
-- * The tokens
Token (..)
, CommentAnnotation (..)
-- * String conversion
, debugTokenString
-- * Classification
-- TokenClass (..),
) where
import Data.Data
import Language.JavaScript.Parser.SrcLocation
data CommentAnnotation
= CommentA TokenPosn String
| WhiteSpace TokenPosn String
| NoComment
deriving (Eq, Show, Typeable, Data, Read)
-- | Lexical tokens.
-- Each may be annotated with any comment occurring between the prior token and this one
data Token
-- Comment
= CommentToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] } -- ^ Single line comment.
| WsToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] } -- ^ White space, for preservation.
-- Identifiers
| IdentifierToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] } -- ^ Identifier.
-- Javascript Literals
| DecimalToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- ^ Literal: Decimal
| HexIntegerToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- ^ Literal: Hexadecimal Integer
| OctalToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- ^ Literal: Octal Integer
| StringToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- ^ Literal: string, delimited by either single or double quotes
| RegExToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- ^ Literal: Regular Expression
-- Keywords
| AsToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| BreakToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| CaseToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| CatchToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ConstToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| LetToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ContinueToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| DebuggerToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| DefaultToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| DeleteToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| DoToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ElseToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| EnumToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| FalseToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| FinallyToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ForToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| FunctionToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| FromToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| IfToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| InToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| InstanceofToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| NewToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| NullToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| OfToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ReturnToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| SwitchToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ThisToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ThrowToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| TrueToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| TryToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| TypeofToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| VarToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| VoidToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| WhileToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ImportToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| WithToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| ExportToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- Future reserved words
| FutureToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- Needed, not sure what they are though.
| GetToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| SetToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
-- Delimiters
-- Operators
| AutoSemiToken { tokenSpan :: !TokenPosn, tokenLiteral :: !String, tokenComment :: ![CommentAnnotation] }
| SemiColonToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| CommaToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| HookToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| ColonToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| OrToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| AndToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| BitwiseOrToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| BitwiseXorToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| BitwiseAndToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| StrictEqToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| EqToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| TimesAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| DivideAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| ModAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| PlusAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| MinusAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LshAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| RshAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| UrshAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| AndAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| XorAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| OrAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| SimpleAssignToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| StrictNeToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| NeToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LshToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LeToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LtToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| UrshToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| RshToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| GeToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| GtToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| IncrementToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| DecrementToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| PlusToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| MinusToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| MulToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| DivToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| ModToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| NotToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| BitwiseNotToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| ArrowToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| SpreadToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| DotToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LeftBracketToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| RightBracketToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LeftCurlyToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| RightCurlyToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| LeftParenToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| RightParenToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
| CondcommentEndToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] }
-- Special cases
| TailToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] } -- ^ Stuff between last JS and EOF
| EOFToken { tokenSpan :: !TokenPosn, tokenComment :: ![CommentAnnotation] } -- ^ End of file
deriving (Eq, Show, Typeable)
-- | Produce a string from a token containing detailed information. Mainly intended for debugging.
debugTokenString :: Token -> String
debugTokenString = takeWhile (/= ' ') . show
|
alanz/language-javascript
|
src/Language/JavaScript/Parser/Token.hs
|
bsd-3-clause
| 11,779
| 0
| 10
| 2,080
| 2,753
| 1,636
| 1,117
| 625
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Database.Hitcask.Delete where
import Database.Hitcask.Types
import Database.Hitcask.Put
import Data.ByteString.Char8()
import qualified Data.HashMap.Strict as M
import Control.Concurrent.STM
delete :: Hitcask -> Key -> IO ()
delete h key = do
put h key "<<hitcask_tombstone>>"
atomically $ modifyTVar (keys h) (M.delete key)
|
tcrayford/hitcask
|
Database/Hitcask/Delete.hs
|
bsd-3-clause
| 373
| 0
| 11
| 50
| 108
| 61
| 47
| 11
| 1
|
{-# LANGUAGE TypeFamilies, TypeSynonymInstances, FlexibleInstances,
PatternGuards, RecordWildCards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Blip.Compiler.Compile
-- Copyright : (c) 2012, 2013, 2014 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Compilation of Python 3 source code into bytecode.
--
-- Basic algorithm:
--
-- 1) Parse the source code into an AST.
-- 2) Compute the scope of all variables in the module
-- (one pass over the AST).
-- 3) Compile the AST for the whole module into a (possibly nested)
-- code object (one pass over the AST).
-- 4) Write the code object to a .pyc file.
--
-- The following Python constructs are compiled into code objects:
-- - The top-level of the module.
-- - Function definitions (def and lambda).
-- - Class definitions.
-- - Comprehensions.
--
-- The statements and expressions in each of the above constructs are
-- recursively compiled into bytecode instructions. Initially, the actual
-- addresses of jump instruction targets are not known. Instead the jump
-- targets are just labels. At the end of the compilation of each
-- construct the labelled instructions are converted into jumps to
-- actual addresses (one pass over the bytecode stream).
-- Also the maximum stack size of each code object is computed (one pass
-- over the bytecode stream).
--
-- We currently make no attempt to optimise the generated code.
--
-- Bytecode is generated directly from the AST, there is no intermediate
-- language, and no explict control-flow graph.
--
-----------------------------------------------------------------------------
module Blip.Compiler.Compile
(compileFile, compileReplInput, writePycFile)
where
import Prelude hiding (mapM)
import Blip.Compiler.Desugar (desugarComprehension, desugarWith, resultName)
import Blip.Compiler.Utils
( isPureExpr, isPyObjectExpr, mkAssignVar, mkList
, mkVar, mkMethodCall, mkStmtExpr, mkSet, mkDict, mkAssign
, mkSubscript, mkReturn, mkYield, spanToScopeIdentifier )
import Blip.Compiler.StackDepth (maxStackDepth)
import Blip.Compiler.State
( setBlockState, getBlockState, initBlockState, initState
, emitCodeNoArg, emitCodeArg, compileConstantEmit
, compileConstant, getFileName, newLabel, labelNextInstruction
, getObjectName, setObjectName
, getNestedScope, ifDump, getLocalScope
, indexedVarSetKeys, emitReadVar, emitWriteVar, emitDeleteVar
, lookupNameVar, lookupClosureVar, setFlag
, peekFrameBlock, withFrameBlock, setFastLocals, setArgCount
, setLineNumber, setFirstLineNumber )
import Blip.Compiler.Assemble (assemble)
import Blip.Compiler.Monad (Compile (..), runCompileMonad)
import Blip.Compiler.Types
( Identifier, CompileConfig (..)
, CompileState (..), BlockState (..)
, AnnotatedCode (..), Dumpable (..), IndexedVarSet, VarInfo (..)
, FrameBlockInfo (..), Context (..), ParameterTypes (..), LocalScope (..) )
import Blip.Compiler.Scope (topScope, renderScope)
import Blip.Marshal as Blip
( writePyc, PycFile (..), PyObject (..), co_generator )
import Blip.Bytecode (Opcode (..), encode)
import Language.Python.Version3.Parser (parseModule, parseStmt)
import Language.Python.Common.AST as AST
( Annotated (..), ModuleSpan, Module (..), StatementSpan, Statement (..)
, ExprSpan, Expr (..), Ident (..), ArgumentSpan, Argument (..)
, OpSpan, Op (..), Handler (..), HandlerSpan, ExceptClause (..)
, ExceptClauseSpan, ImportItem (..), ImportItemSpan, ImportRelative (..)
, ImportRelativeSpan, FromItems (..), FromItemsSpan, FromItem (..)
, FromItemSpan, DecoratorSpan, Decorator (..), ComprehensionSpan
, Comprehension (..), SliceSpan, Slice (..), AssignOpSpan, AssignOp (..)
, ComprehensionExpr (..), ComprehensionExprSpan
, ParameterSpan, Parameter (..), RaiseExpr (..), RaiseExprSpan
, DictKeyDatumList(DictMappingPair), YieldArg (..), YieldArgSpan )
import Language.Python.Common (prettyText)
import Language.Python.Common.StringEscape (unescapeString)
import Language.Python.Common.SrcLocation (SrcSpan (..))
import System.FilePath ((<.>), takeBaseName)
-- XXX Commented out to avoid bug in unix package when building on OS X,
-- The unix package is depended on by the directory package.
-- import System.Directory (getModificationTime, canonicalizePath)
-- import System.Time (ClockTime (..))
import System.IO (openFile, IOMode(..), hClose, hFileSize, hGetContents)
import Data.Word (Word32, Word16)
import Data.Int (Int32)
import Data.Traversable as Traversable (mapM)
import qualified Data.ByteString.Lazy as B (pack)
import Data.String (fromString)
import Data.List (intersperse)
import Control.Monad (unless, forM_, when, replicateM_, foldM)
import Control.Monad.Trans (liftIO)
import Data.Bits ((.|.), shiftL)
-- Compile the input from the REPL command line to an object.
compileReplInput :: CompileConfig -> String -> IO PyObject
compileReplInput config replString = do
stmts <- parseStmtAndCheckErrors replString
let printWrapped = wrapWithPrint stmts
-- pretend that the statements are a module on their own to calculate the variable scope
(moduleLocals, nestedScope) <- topScope $ Module printWrapped
let state = initState ModuleContext moduleLocals
nestedScope config ""
compileReplStmts state printWrapped
-- Support for REPL printing of expressions.
-- If the statement entered at the REPL is an expression, then
-- we try to print it out.
-- We transform an expression E into:
-- _ = E
-- print(_)
--
-- XXX if the result of E is None then we should not print it out,
-- to be consistent with CPython.
-- Want something like this:
-- try:
-- _ = E
-- catch Exception as e:
-- stackTrace e
-- elif _ is not None:
-- print(e)
wrapWithPrint :: [StatementSpan] -> [StatementSpan]
wrapWithPrint [StmtExpr {..}] =
[assignStmt, printStmt]
where
assignStmt = Assign { assign_to = [underscoreVar], assign_expr = stmt_expr, stmt_annot = SpanEmpty }
underscoreIdent = Ident { ident_string = "_", ident_annot = SpanEmpty }
underscoreVar = Var { var_ident = underscoreIdent, expr_annot = SpanEmpty }
printIdent = Ident { ident_string = "print", ident_annot = SpanEmpty }
printVar = Var { var_ident = printIdent, expr_annot = SpanEmpty }
printArg = ArgExpr { arg_expr = underscoreVar, arg_annot = SpanEmpty }
printExpr = Call { call_fun = printVar, call_args = [printArg], expr_annot = SpanEmpty }
printStmt = StmtExpr { stmt_expr = printExpr, stmt_annot = SpanEmpty }
wrapWithPrint other = other
-- Compile Python source code to bytecode, returing a representation
-- of a .pyc file contents.
compileFile :: CompileConfig -- Configuration options
-> FilePath -- The file path of the input Python source
-> IO PycFile
compileFile config path = do
pyHandle <- openFile path ReadMode
sizeInBytes <- hFileSize pyHandle
fileContents <- hGetContents pyHandle
-- modifiedTime <- getModificationTime path
-- let modSeconds = case modifiedTime of TOD secs _picoSecs -> secs
let modSeconds = (0 :: Integer)
pyModule <- parseFileAndCheckErrors fileContents path
(moduleLocals, nestedScope) <- topScope pyModule
-- canonicalPath <- canonicalizePath path
canonicalPath <- return path
let state = initState ModuleContext moduleLocals
nestedScope config canonicalPath
pyc <- compileModule state (fromIntegral modSeconds)
(fromIntegral sizeInBytes) pyModule
return pyc
writePycFile :: PycFile -> FilePath -> IO ()
writePycFile pyc path = do
let pycFilePath = takeBaseName path <.> ".pyc"
pycHandle <- openFile pycFilePath WriteMode
writePyc pycHandle pyc
hClose pycHandle
-- Parse the Python source from a statement into an AST, check for any syntax errors.
parseStmtAndCheckErrors :: String -> IO [StatementSpan]
parseStmtAndCheckErrors stmtString =
case parseStmt stmtString "<stdin>" of
Left e -> error $ "parse error: " ++ prettyText e
Right (stmts, _comments) -> return stmts
-- Parse the Python source from a File into an AST, check for any syntax errors.
parseFileAndCheckErrors :: String -> FilePath -> IO ModuleSpan
parseFileAndCheckErrors fileContents sourceName =
case parseModule fileContents sourceName of
Left e -> error $ "parse error: " ++ prettyText e
Right (pyModule, _comments) -> return pyModule
compileModule :: CompileState -- initial compiler state
-> Word32 -- modification time
-> Word32 -- size in bytes
-> ModuleSpan -- AST
-> IO PycFile
compileModule state pyFileModifiedTime pyFileSizeBytes mod = do
obj <- compiler mod state
return $ PycFile
{ magic = compileConfig_magic $ state_config state
, modified_time = pyFileModifiedTime
, size = pyFileSizeBytes
, object = obj }
compileReplStmts :: CompileState -> [StatementSpan] -> IO PyObject
compileReplStmts state replStatements =
compiler (Body replStatements) state
compiler :: Compilable a => a -> CompileState -> IO (CompileResult a)
compiler = runCompileMonad . compile
class Compilable a where
type CompileResult a :: *
compile :: a -> Compile (CompileResult a)
instance Compilable a => Compilable [a] where
type CompileResult [a] = [CompileResult a]
compile = mapM compile
instance Compilable ModuleSpan where
type CompileResult ModuleSpan = PyObject
compile ast@(Module stmts) = do
maybeDumpScope
maybeDumpAST ast
setObjectName "<module>"
compileClassModuleDocString stmts
compile $ Body stmts
-- body of module, function and class
newtype Body = Body [StatementSpan]
instance Compilable Body where
type CompileResult Body = PyObject
compile (Body stmts) = do
mapM_ compile stmts
-- XXX we could avoid this 'return None' if all branches in the code
-- ended with a return statement. Can fix this in an optimisation step
-- with control flow analysis.
returnNone
assemble
makeObject
-- Build an object from all the state computed during compilation, such
-- as the bytecode sequence, variable information and so on.
-- argcount is the number of arguments, not counting *varargs or **kwargs.
makeObject :: Compile PyObject
makeObject = do
annotatedCode <- getBlockState state_instructions
let stackDepth = maxStackDepth annotatedCode
names <- getBlockState state_names
constants <- getBlockState state_constants
freeVars <- getBlockState state_freeVars
cellVars <- getBlockState state_cellVars
argcount <- getBlockState state_argcount
flags <- getBlockState state_flags
fastLocals <- getBlockState state_fastLocals
firstLineNumber <- getBlockState state_firstLineNumber
lineNumberTable <- compileLineNumberTable firstLineNumber
let code = map annotatedCode_bytecode annotatedCode
localVarNames = map Unicode $ indexedVarSetKeys fastLocals
maxStackDepth = maxBound
if stackDepth > maxStackDepth
-- XXX make a better error message
then error $ "Maximum stack depth " ++ show maxStackDepth ++
" exceeded: " ++ show stackDepth
else do
pyFileName <- getFileName
objectName <- getObjectName
let obj = Code
{ argcount = argcount
, kwonlyargcount = 0
, nlocals = fromIntegral $ length localVarNames
, stacksize = stackDepth
, flags = flags
, code = String $ encode code
, consts = makeConstants constants
, names = makeNames names
, varnames = Blip.Tuple localVarNames
, freevars = makeVarSetTuple freeVars
, cellvars = makeVarSetTuple cellVars
, filename = Unicode pyFileName
, name = Unicode objectName
, firstlineno = firstLineNumber
, lnotab = lineNumberTable
}
return obj
where
makeVarSetTuple :: IndexedVarSet -> PyObject
makeVarSetTuple varSet =
Blip.Tuple $ map Unicode $ indexedVarSetKeys varSet
makeConstants :: [PyObject] -> PyObject
makeConstants = Blip.Tuple . reverse
makeNames :: [Identifier] -> PyObject
makeNames = Blip.Tuple . map Unicode . reverse
instance Compilable StatementSpan where
type CompileResult StatementSpan = ()
compile stmt =
setLineNumber (annot stmt) >>
compileStmt stmt
compileStmt :: StatementSpan -> Compile ()
compileStmt (Assign {..}) = do
compile assign_expr
compileAssignments assign_to
compileStmt (AugmentedAssign {..}) =
case aug_assign_to of
Var {..} -> do
let varIdent = ident_string var_ident
emitReadVar varIdent
compile aug_assign_expr
compile aug_assign_op
emitWriteVar varIdent
Subscript {..} -> do
compile subscriptee
compile subscript_expr
emitCodeNoArg DUP_TOP_TWO -- avoids re-doing the above two later when we store
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
SlicedExpr {..} -> do
compile slicee
compileSlices slices
emitCodeNoArg DUP_TOP_TWO -- avoids re-doing the above two later when we store
emitCodeNoArg BINARY_SUBSCR
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_THREE
emitCodeNoArg STORE_SUBSCR
expr@(Dot {..}) -> do
compile dot_expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ dot_attribute
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
{-
expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) -> do
compile $ left_op_arg expr
emitCodeNoArg DUP_TOP
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg LOAD_ATTR index
compile aug_assign_expr
compile aug_assign_op
emitCodeNoArg ROT_TWO
emitCodeArg STORE_ATTR index
-}
other -> error $ "unexpected expression in augmented assignment: " ++ prettyText other
compileStmt (Return { return_expr = Nothing }) = returnNone
compileStmt (Return { return_expr = Just expr }) =
compile expr >> emitCodeNoArg RETURN_VALUE
compileStmt (Pass {}) = return ()
compileStmt (StmtExpr {..}) =
unless (isPureExpr stmt_expr) $
compile stmt_expr >> emitCodeNoArg POP_TOP
compileStmt (Conditional {..}) = do
restLabel <- newLabel
mapM_ (compileGuard restLabel) cond_guards
mapM_ compile cond_else
labelNextInstruction restLabel
compileStmt (While {..}) = do
startLoop <- newLabel
endLoop <- newLabel
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
withFrameBlock (FrameBlockLoop startLoop) $ do
labelNextInstruction startLoop
compile while_cond
emitCodeArg POP_JUMP_IF_FALSE anchor
mapM_ compile while_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile while_else
labelNextInstruction endLoop
compileStmt (For {..}) = do
startLoop <- newLabel
endLoop <- newLabel
withFrameBlock (FrameBlockLoop startLoop) $ do
anchor <- newLabel
emitCodeArg SETUP_LOOP endLoop
compile for_generator
emitCodeNoArg GET_ITER
labelNextInstruction startLoop
emitCodeArg FOR_ITER anchor
let num_targets = length for_targets
when (num_targets > 1) $ do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral num_targets
mapM_ compileAssignTo for_targets
mapM_ compile for_body
emitCodeArg JUMP_ABSOLUTE startLoop
labelNextInstruction anchor
emitCodeNoArg POP_BLOCK
mapM_ compile for_else
labelNextInstruction endLoop
compileStmt stmt@(Fun {..}) = compileFun stmt []
compileStmt stmt@(Class {..}) = compileClass stmt []
-- XXX assertions appear to be turned off if the code is compiled
-- for optimisation
-- If the assertion expression is a tuple of non-zero length, then
-- it is always True: CPython warns about this
compileStmt (Assert {..}) = do
case assert_exprs of
test_expr:restAssertExprs -> do
compile test_expr
end <- newLabel
emitCodeArg POP_JUMP_IF_TRUE end
assertionErrorVar <- lookupNameVar "AssertionError"
emitCodeArg LOAD_GLOBAL assertionErrorVar
case restAssertExprs of
assertMsgExpr:_ -> do
compile assertMsgExpr
emitCodeArg CALL_FUNCTION 1
_other -> return ()
emitCodeArg RAISE_VARARGS 1
labelNextInstruction end
_other -> error "assert with no test"
compileStmt stmt@(Try {..}) = compileTry stmt
compileStmt (Import {..}) = mapM_ compile import_items
-- XXX need to handle from __future__
compileStmt (FromImport {..}) = do
let level = 0 -- XXX this should be the level of nesting
compileConstantEmit $ Blip.Int level
let names = fromItemsIdentifiers from_items
namesTuple = Blip.Tuple $ map Unicode names
compileConstantEmit namesTuple
compileFromModule from_module
case from_items of
ImportEverything {} -> do
emitCodeNoArg IMPORT_STAR
FromItems {..} -> do
forM_ from_items_items $ \FromItem {..} -> do
index <- lookupNameVar $ ident_string from_item_name
emitCodeArg IMPORT_FROM index
let storeName = case from_as_name of
Nothing -> from_item_name
Just asName -> asName
emitWriteVar $ ident_string storeName
emitCodeNoArg POP_TOP
-- XXX should check that we are inside a loop
compileStmt (Break {}) = emitCodeNoArg BREAK_LOOP
compileStmt (Continue {}) = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just (FrameBlockLoop label) -> emitCodeArg JUMP_ABSOLUTE label
Just FrameBlockFinallyEnd ->
error finallyError
Just _other -> checkFrameBlocks
where
-- keep blocking the frame block stack until we either find
-- a loop entry, otherwise generate an error
checkFrameBlocks :: Compile ()
checkFrameBlocks = do
maybeFrameBlockInfo <- peekFrameBlock
case maybeFrameBlockInfo of
Nothing -> error loopError
Just FrameBlockFinallyEnd -> error finallyError
Just (FrameBlockLoop label) ->
emitCodeArg CONTINUE_LOOP label
Just _other -> checkFrameBlocks
loopError = "'continue' not properly in loop"
finallyError = "'continue' not supported inside 'finally' clause"
compileStmt (NonLocal {}) = return ()
compileStmt (Global {}) = return ()
compileStmt (Decorated {..}) =
case decorated_def of
Fun {} -> compileFun decorated_def decorated_decorators
Class {} -> compileClass decorated_def decorated_decorators
other -> error $ "Decorated statement is not a function or a class: " ++ prettyText other
compileStmt (Delete {..}) = mapM_ compileDelete del_exprs
compileStmt stmt@(With {..})
-- desugar with statements containing multiple contexts into nested
-- with statements containing single contexts
| length with_context > 1 = compileWith $ desugarWith stmt
| otherwise = compileWith stmt
compileStmt (Raise {..}) = compile raise_expr
compileStmt other = error $ "Unsupported statement:\n" ++ prettyText other
instance Compilable ExprSpan where
type CompileResult ExprSpan = ()
compile expr =
setLineNumber (annot expr) >>
compileExpr expr
compileExpr :: ExprSpan -> Compile ()
compileExpr (Var { var_ident = ident }) = do
emitReadVar $ ident_string ident
compileExpr expr@(AST.Strings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.ByteStrings {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Int {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Float {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Imaginary {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Bool {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.None {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr expr@(AST.Ellipsis {}) =
compileConstantEmit $ constantToPyObject expr
compileExpr (AST.Paren {..}) = compile paren_expr
compileExpr (AST.CondExpr {..}) = do
compile ce_condition
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
compile ce_true_branch
restLabel <- newLabel
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
compile ce_false_branch
labelNextInstruction restLabel
compileExpr expr@(AST.Tuple {..})
| isPyObjectExpr expr =
compileConstantEmit $ constantToPyObject expr
| otherwise = do
mapM_ compile tuple_exprs
emitCodeArg BUILD_TUPLE $ fromIntegral $ length tuple_exprs
compileExpr (AST.List {..}) = do
mapM_ compile list_exprs
emitCodeArg BUILD_LIST $ fromIntegral $ length list_exprs
compileExpr (AST.Set {..}) = do
mapM_ compile set_exprs
emitCodeArg BUILD_SET $ fromIntegral $ length set_exprs
compileExpr (Dictionary {..}) = do
emitCodeArg BUILD_MAP $ fromIntegral $ length dict_mappings
forM_ dict_mappings $ \(DictMappingPair key value) -> do
compile value
compile key
emitCodeNoArg STORE_MAP
compileExpr (ListComp {..}) = do
let initStmt = [mkAssignVar resultName (mkList [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "append" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<listcomp>" initStmt updater returnStmt list_comprehension
compileExpr (SetComp {..}) = do
let initStmt = [mkAssignVar resultName (mkSet [])]
updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkMethodCall (mkVar $ resultName) "add" expr
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<setcomp>" initStmt updater returnStmt set_comprehension
compileExpr (DictComp {..}) = do
let initStmt = [mkAssignVar resultName (mkDict [])]
updater = \(ComprehensionDict (DictMappingPair key val)) ->
mkAssign (mkSubscript (mkVar $ resultName) key) val
returnStmt = [mkReturn $ mkVar $ resultName]
compileComprehension "<dictcomp>" initStmt updater returnStmt dict_comprehension
compileExpr (Generator {..}) = do
let updater = \(ComprehensionExpr expr) -> mkStmtExpr $ mkYield expr
compileComprehension "<gencomp>" [] updater [] gen_comprehension
compileExpr (Yield { yield_arg = Nothing }) =
compileConstantEmit Blip.None >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr (Yield { yield_arg = Just (YieldExpr expr) }) =
compile expr >> emitCodeNoArg YIELD_VALUE >> setFlag co_generator
compileExpr e@(Yield { yield_arg = Just (YieldFrom expr _) })
= error $ "yield from not supported: " ++ show e
compileExpr (Call {..}) = do
compile call_fun
compileCall 0 call_args
compileExpr (Subscript {..}) = do
compile subscriptee
compile subscript_expr
emitCodeNoArg BINARY_SUBSCR
compileExpr (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg BINARY_SUBSCR
compileExpr (Dot {..}) = do
compile dot_expr
varInfo <- lookupNameVar $ ident_string dot_attribute
emitCodeArg LOAD_ATTR varInfo
compileExpr exp@(BinaryOp {..})
| isBoolean operator = compileBoolOpExpr exp
| isComparison operator = compileCompareOpExpr exp
| otherwise = do
compile left_op_arg
compile right_op_arg
compileOp operator
compileExpr (UnaryOp {..}) = do
compile op_arg
compileUnaryOp operator
compileExpr (Lambda {..}) = do
funBodyObj <- nestedBlock FunctionContext expr_annot $ do
-- make the first constant None, to indicate no doc string
-- for the lambda
_ <- compileConstant Blip.None
compile lambda_body
emitCodeNoArg RETURN_VALUE
assemble
makeObject
numDefaults <- compileDefaultParams lambda_args
compileClosure "<lambda>" funBodyObj numDefaults
compileExpr other = error $ "Unsupported expr:\n" ++ prettyText other
instance Compilable AssignOpSpan where
type CompileResult AssignOpSpan = ()
compile = emitCodeNoArg . assignOpCode
instance Compilable DecoratorSpan where
type CompileResult DecoratorSpan = ()
compile dec@(Decorator {..}) = do
compileDottedName decorator_name
let numDecorators = length decorator_args
when (numDecorators > 0) $
compileCall 0 decorator_args
where
compileDottedName (name:rest) = do
emitReadVar $ ident_string name
forM_ rest $ \var -> do
index <- lookupNameVar $ ident_string var
emitCodeArg LOAD_ATTR index
compileDottedName [] =
error $ "decorator with no name: " ++ prettyText dec
instance Compilable ArgumentSpan where
type CompileResult ArgumentSpan = ()
compile (ArgExpr {..}) = compile arg_expr
compile other = error $ "Unsupported argument:\n" ++ prettyText other
instance Compilable ImportItemSpan where
type CompileResult ImportItemSpan = ()
compile (ImportItem {..}) = do
compileConstantEmit $ Blip.Int 0 -- this always seems to be zero
compileConstantEmit Blip.None
let dottedNames = map ident_string import_item_name
-- assert (length dottedNames > 0)
let dottedNameStr =
concat $ intersperse "." dottedNames
index <- lookupNameVar dottedNameStr
emitCodeArg IMPORT_NAME index
storeName <-
case import_as_name of
Nothing -> return $ head import_item_name
Just asName -> do
forM_ (tail dottedNames) $ \attribute -> do
index <- lookupNameVar attribute
emitCodeArg LOAD_ATTR index
return asName
emitWriteVar $ ident_string storeName
instance Compilable RaiseExprSpan where
type CompileResult RaiseExprSpan = ()
compile (RaiseV3 maybeRaiseArg) = do
n <- case maybeRaiseArg of
Nothing -> return 0
Just (raiseExpr, maybeFrom) -> do
compile raiseExpr
case maybeFrom of
Nothing -> return 1
Just fromExpr -> do
compile fromExpr
return 2
emitCodeArg RAISE_VARARGS n
compile stmt@(RaiseV2 _) =
error $ "Python version 2 raise statement encountered: " ++ prettyText stmt
{-
From CPython compile.c
Code generated for "try: S except E1 as V1: S1 except E2 as V2: S2 ...":
(The contents of the value stack is shown in [], with the top
at the right; 'tb' is trace-back info, 'val' the exception's
associated value, and 'exc' the exception.)
Value stack Label Instruction Argument
[] SETUP_EXCEPT L1
[] <code for S>
[] POP_BLOCK
[] JUMP_FORWARD L0
[tb, val, exc] L1: DUP )
[tb, val, exc, exc] <evaluate E1> )
[tb, val, exc, exc, E1] COMPARE_OP EXC_MATCH ) only if E1
[tb, val, exc, 1-or-0] POP_JUMP_IF_FALSE L2 )
[tb, val, exc] POP
[tb, val] <assign to V1> (or POP if no V1)
[tb] POP
[] <code for S1>
POP_EXCEPT
JUMP_FORWARD L0
[tb, val, exc] L2: DUP
.............................etc.......................
[tb, val, exc] Ln+1: END_FINALLY # re-raise exception
[] L0: <next statement>
Of course, parts are not generated if Vi or Ei is not present.
-}
compileTry :: StatementSpan -> Compile ()
compileTry stmt@(Try {..})
| length try_finally == 0 = compileTryExcept stmt
| otherwise = compileTryFinally stmt
compileTry other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryFinally :: StatementSpan -> Compile ()
compileTryFinally stmt@(Try {..}) = do
end <- newLabel
emitCodeArg SETUP_FINALLY end
body <- newLabel
labelNextInstruction body
withFrameBlock FrameBlockFinallyTry $ do
if length try_excepts > 0
then compileTryExcept stmt
else mapM_ compile try_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction end
withFrameBlock FrameBlockFinallyEnd $ do
mapM_ compile try_finally
emitCodeNoArg END_FINALLY
compileTryFinally other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
compileTryExcept :: StatementSpan -> Compile ()
compileTryExcept (Try {..}) = do
firstHandler <- newLabel -- L1
emitCodeArg SETUP_EXCEPT firstHandler -- pushes handler onto block stack
withFrameBlock FrameBlockExcept $ do
mapM_ compile try_body -- <code for S>
emitCodeNoArg POP_BLOCK -- pops handler off block stack
orElse <- newLabel
emitCodeArg JUMP_FORWARD orElse
end <- newLabel -- L0
compileHandlers end firstHandler try_excepts
labelNextInstruction orElse
mapM_ compile try_else
labelNextInstruction end -- L0: <next statement>
compileTryExcept other =
error $ "Unexpected statement when compiling a try-except: " ++ prettyText other
-- Compile a sequence of exception handlers
compileHandlers :: Word16 -> Word16 -> [HandlerSpan] -> Compile ()
compileHandlers _end handlerLabel [] = do
labelNextInstruction handlerLabel -- Ln+1, # re-raise exception
emitCodeNoArg END_FINALLY
compileHandlers end handlerLabel (Handler {..} : rest) = do
labelNextInstruction handlerLabel
nextLabel <- newLabel
compileHandlerClause nextLabel handler_clause
emitCodeNoArg POP_TOP -- pop the traceback (tb) off the stack
withFrameBlock FrameBlockFinallyTry $ do
mapM_ compile handler_suite -- <code for S1, S2 ..>
emitCodeNoArg POP_EXCEPT -- pop handler off the block stack
emitCodeArg JUMP_FORWARD end
compileHandlers end nextLabel rest
-- enter here with stack == (s ++ [tb, val, exc]), leave with stack == s
compileHandlerClause :: Word16 -> ExceptClauseSpan -> Compile ()
compileHandlerClause nextHandler (ExceptClause {..}) = do
case except_clause of
Nothing -> do
emitCodeNoArg POP_TOP -- pop exc off the stack
emitCodeNoArg POP_TOP -- pop val off the stack
Just (target, asExpr) -> do
emitCodeNoArg DUP_TOP -- duplicate exc on stack
compile target -- <evaluate E1>
emitCodeArg COMPARE_OP exactMatchOp -- compare E1 to exc
emitCodeArg POP_JUMP_IF_FALSE nextHandler -- pop True/False and if no match try next handler
emitCodeNoArg POP_TOP -- pop exc off the stack
case asExpr of
Nothing -> emitCodeNoArg POP_TOP -- pop val off the stack
-- XXX we should del this name at the end.
Just expr -> compileAssignTo expr -- assign the exception to the as name, will remove val from stack
where
-- The code for an exact match operator.
exactMatchOp :: Word16
exactMatchOp = 10
withDecorators :: [DecoratorSpan] -> Compile () -> Compile ()
withDecorators decorators comp = do
-- push each of the decorators on the stack
mapM_ compile decorators
-- run the enclosed computation
comp
-- call each of the decorators
replicateM_ (length decorators) $
emitCodeArg CALL_FUNCTION 1
nestedBlock :: Context -> SrcSpan -> Compile a -> Compile a
nestedBlock context span comp = do
-- save the current block state
oldBlockState <- getBlockState id
-- set the new block state to initial values, and the
-- scope of the current definition
(name, localScope) <- getLocalScope $ spanToScopeIdentifier span
setBlockState $ initBlockState context localScope
-- set the new object name
setObjectName name
-- set the first line number of the block
setFirstLineNumber span
-- run the nested computation
result <- comp
-- restore the original block state
setBlockState oldBlockState
return result
-- Compile a function definition, possibly with decorators.
compileFun :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileFun (Fun {..}) decorators = do
let funName = ident_string $ fun_name
withDecorators decorators $ do
funBodyObj <- nestedBlock FunctionContext stmt_annot $ do
compileFunDocString fun_body
compile $ Body fun_body
numDefaults <- compileDefaultParams fun_args
compileClosure funName funBodyObj numDefaults
emitWriteVar funName
compileFun other _decorators = error $ "compileFun applied to a non function: " ++ prettyText other
-- Compile a class definition, possibly with decorators.
compileClass :: StatementSpan -> [DecoratorSpan] -> Compile ()
compileClass (Class {..}) decorators = do
let className = ident_string $ class_name
withDecorators decorators $ do
classBodyObj <- nestedBlock ClassContext stmt_annot $ do
-- classes have a special argument called __locals__
-- it is the only argument they have in the byte code, but it
-- does not come from the source code, so we have to add it.
setFastLocals ["__locals__"]
setArgCount 1
emitCodeArg LOAD_FAST 0
emitCodeNoArg STORE_LOCALS
emitReadVar "__name__"
emitWriteVar "__module__"
compileConstantEmit $ Unicode className
emitWriteVar "__qualname__"
compileClassModuleDocString class_body
compile $ Body class_body
emitCodeNoArg LOAD_BUILD_CLASS
compileClosure className classBodyObj 0
compileConstantEmit $ Unicode className
compileCall 2 class_args
emitWriteVar className
compileClass other _decorators = error $ "compileClass applied to a non class: " ++ prettyText other
-- XXX CPython uses a "qualified" name for the code object. For instance
-- nested functions look like "f.<locals>.g", whereas we currently use
-- just "g".
-- The free variables in a code object will either be cell variables
-- or free variables in the enclosing object. If there are no free
-- variables then we can avoid building the closure, and just make the function.
compileClosure :: String -> PyObject -> Word16 -> Compile ()
compileClosure name obj numDefaults = do
-- get the list of free variables from the code object
let Blip.Tuple freeVarStringObjs = freevars obj
freeVarIdentifiers = map unicode freeVarStringObjs
numFreeVars = length freeVarIdentifiers
if numFreeVars == 0
then do
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_FUNCTION numDefaults
else do
forM_ freeVarIdentifiers $ \var -> do
maybeVarInfo <- lookupClosureVar var
-- we don't use emitReadVar because it would generate
-- LOAD_DEREF instructions, but we want LOAD_CLOSURE
-- instead.
case maybeVarInfo of
Just (CellVar index) -> emitCodeArg LOAD_CLOSURE index
Just (FreeVar index) -> emitCodeArg LOAD_CLOSURE index
_other -> error $ name ++ " closure free variable not cell or free var in outer context: " ++ var
emitCodeArg BUILD_TUPLE $ fromIntegral numFreeVars
compileConstantEmit obj
compileConstantEmit $ Unicode name
emitCodeArg MAKE_CLOSURE numDefaults
-- Compile default parameters and return how many there are
compileDefaultParams :: [ParameterSpan] -> Compile Word16
compileDefaultParams = foldM compileParam 0
where
compileParam :: Word16 -> ParameterSpan -> Compile Word16
compileParam count (Param {..}) = do
case param_default of
Nothing -> return count
Just expr -> do
compile expr
return $ count + 1
compileParam count _other = return count
-- Compile a 'from module import'.
compileFromModule :: ImportRelativeSpan -> Compile ()
-- XXX what to do about the initial dots?
compileFromModule (ImportRelative {..}) = do
let moduleName =
case import_relative_module of
Nothing -> ""
Just dottedNames ->
concat $ intersperse "." $ map ident_string dottedNames
index <- lookupNameVar moduleName
emitCodeArg IMPORT_NAME index
fromItemsIdentifiers :: FromItemsSpan -> [Identifier]
fromItemsIdentifiers (ImportEverything {}) = ["*"]
fromItemsIdentifiers (FromItems {..}) =
map fromItemIdentifier from_items_items
where
fromItemIdentifier :: FromItemSpan -> Identifier
fromItemIdentifier (FromItem {..}) = ident_string $ from_item_name
-- compile multiple possible assignments:
-- x = y = z = rhs
compileAssignments :: [ExprSpan] -> Compile ()
compileAssignments [] = return ()
compileAssignments [e] = compileAssignTo e
compileAssignments (e1:e2:rest) = do
emitCodeNoArg DUP_TOP
compileAssignTo e1
compileAssignments (e2:rest)
-- the lhs of an assignment statement
-- we can assume that the parser has only accepted the appropriate
-- subset of expression types
compileAssignTo :: ExprSpan -> Compile ()
compileAssignTo (Var {..}) =
emitWriteVar $ ident_string var_ident
compileAssignTo (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg STORE_SUBSCR
-- XXX this can be optimised in places where the rhs is a
-- manifest list or tuple, avoiding the building list/tuple
-- only to deconstruct again
compileAssignTo (AST.Tuple {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length tuple_exprs
mapM_ compileAssignTo tuple_exprs
compileAssignTo (AST.List {..}) = do
emitCodeArg UNPACK_SEQUENCE $ fromIntegral $ length list_exprs
mapM_ compileAssignTo list_exprs
compileAssignTo (AST.Paren {..}) = compileAssignTo paren_expr
compileAssignTo expr@(Dot {..} ) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg STORE_ATTR index
{-
compileAssignTo expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}}) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg STORE_ATTR index
-}
compileAssignTo (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg STORE_SUBSCR
compileAssignTo other = error $ "assignment to unexpected expression:\n" ++ prettyText other
compileDelete :: ExprSpan -> Compile ()
compileDelete (Var {..}) = do
emitDeleteVar $ ident_string var_ident
compileDelete (Subscript {..}) =
compile subscriptee >>
compile subscript_expr >>
emitCodeNoArg DELETE_SUBSCR
compileDelete (AST.Paren {..}) = compileDelete paren_expr
compileDelete (Dot {..}) = do
compile dot_expr
index <- lookupNameVar $ ident_string dot_attribute
emitCodeArg DELETE_ATTR index
{-
compileDelete (expr@(BinaryOp { operator = Dot {}, right_op_arg = Var {..}})) = do
compile $ left_op_arg expr
index <- lookupNameVar $ ident_string $ var_ident
emitCodeArg DELETE_ATTR index
-}
compileDelete (SlicedExpr {..}) = do
compile slicee
compileSlices slices
emitCodeNoArg DELETE_SUBSCR
compileDelete other = error $ "delete of unexpected expression:\n" ++ prettyText other
compileWith :: StatementSpan -> Compile ()
compileWith stmt@(With {..}) =
case with_context of
[(context, maybeAs)] -> do
blockLabel <- newLabel
finallyLabel <- newLabel
compile context
emitCodeArg SETUP_WITH finallyLabel
labelNextInstruction blockLabel
withFrameBlock FrameBlockFinallyTry $ do
case maybeAs of
-- Discard result from context.__enter__()
Nothing -> emitCodeNoArg POP_TOP
Just expr -> compileAssignTo expr
mapM_ compile with_body
emitCodeNoArg POP_BLOCK
_ <- compileConstantEmit Blip.None
labelNextInstruction finallyLabel
withFrameBlock FrameBlockFinallyEnd $ do
emitCodeNoArg WITH_CLEANUP
emitCodeNoArg END_FINALLY
_other -> error $ "compileWith applied to non desugared with statement: " ++ prettyText stmt
compileWith other = error $ "compileWith applied to non with statement: " ++ prettyText other
-- Check for a docstring in the first statement of a function body.
-- The first constant in the corresponding code object is inspected
-- by the interpreter for the docstring. If there is no docstring
-- then the first constant must be None
compileFunDocString :: [StatementSpan] -> Compile ()
compileFunDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
= compileConstant (constantToPyObject stmt_expr) >> return ()
| otherwise = compileConstant Blip.None >> return ()
compileFunDocString [] = compileConstant Blip.None >> return ()
compileClassModuleDocString :: [StatementSpan] -> Compile ()
compileClassModuleDocString (firstStmt:_stmts)
| StmtExpr {..} <- firstStmt,
Strings {} <- stmt_expr
-- XXX what if another __doc__ is in scope?
= do compileConstantEmit $ constantToPyObject stmt_expr
emitWriteVar "__doc__"
| otherwise = return ()
compileClassModuleDocString [] = return ()
-- Compile a conditional guard
compileGuard :: Word16 -> (ExprSpan, [StatementSpan]) -> Compile ()
compileGuard restLabel (expr, stmts) = do
compile expr
falseLabel <- newLabel
emitCodeArg POP_JUMP_IF_FALSE falseLabel
mapM_ compile stmts
emitCodeArg JUMP_FORWARD restLabel
labelNextInstruction falseLabel
-- Desugar the comprehension into a zero-arity function (body) with
-- a (possibly nested) for loop, then call the function.
compileComprehension
:: Identifier
-> [StatementSpan]
-> (ComprehensionExprSpan -> StatementSpan)
-> [StatementSpan]
-> ComprehensionSpan
-> Compile ()
compileComprehension name initStmt updater returnStmt comprehension = do
let desugaredComp = desugarComprehension initStmt updater returnStmt comprehension
comprehensionSpan = comprehension_annot comprehension
funObj <- nestedBlock
FunctionContext
comprehensionSpan
(compile $ Body desugaredComp)
compileClosure name funObj 0
(_name, localScope) <- getLocalScope $ spanToScopeIdentifier comprehensionSpan
let parameterNames = parameterTypes_pos $ localScope_params localScope
mapM_ emitReadVar parameterNames
emitCodeArg CALL_FUNCTION $ fromIntegral $ length parameterNames
-- Convert a constant expression into the equivalent object. This
-- only works for expressions which have a counterpart in the object
-- representation used in .pyc files.
constantToPyObject :: ExprSpan -> PyObject
constantToPyObject (AST.Int {..})
| int_value > (fromIntegral max32BitSignedInt) ||
int_value < (fromIntegral min32BitSignedInt)
= Blip.Long int_value
| otherwise = Blip.Int $ fromIntegral int_value
where
max32BitSignedInt :: Int32
max32BitSignedInt = maxBound
min32BitSignedInt :: Int32
min32BitSignedInt = minBound
constantToPyObject (AST.Float {..}) = Blip.Float $ float_value
-- XXX we could optimise the case where we have 'float + imaginary j',
-- to generate a Complex number directly, rather than by doing
-- the addition operation.
constantToPyObject (AST.Imaginary {..}) =
Blip.Complex { real = 0.0, imaginary = imaginary_value }
constantToPyObject (AST.Bool { bool_value = True }) = Blip.TrueObj
constantToPyObject (AST.Bool { bool_value = False }) = Blip.FalseObj
constantToPyObject (AST.None {}) = Blip.None
constantToPyObject (AST.Ellipsis {}) = Blip.Ellipsis
-- assumes all the tuple elements are constant
constantToPyObject (AST.Tuple {..}) =
Blip.Tuple { elements = map constantToPyObject tuple_exprs }
constantToPyObject (AST.Strings {..}) =
Blip.Unicode { unicode = concat $ map normaliseString strings_strings }
constantToPyObject (AST.ByteStrings {..}) =
-- error $ show $ map normaliseString byte_string_strings
Blip.String { string = fromString $ concat $ map normaliseString byte_string_strings }
constantToPyObject other =
error $ "constantToPyObject applied to an unexpected expression: " ++ prettyText other
-- The strings in the AST retain their original quote marks which
-- need to be removed, we have to remove single or triple quotes.
-- We assume the parser has correctly matched the quotes.
-- Escaped characters such as \n \t are parsed as multiple characters
-- and need to be converted back into single characters.
normaliseString :: String -> String
normaliseString ('r':'b':rest) = removeQuotes rest
normaliseString ('b':'r':rest) = removeQuotes rest
normaliseString ('b':rest) = unescapeString $ removeQuotes rest
normaliseString ('r':rest) = removeQuotes rest
normaliseString other = unescapeString $ removeQuotes other
removeQuotes :: String -> String
removeQuotes ('\'':'\'':'\'':rest) = take (length rest - 3) rest
removeQuotes ('"':'"':'"':rest) = take (length rest - 3) rest
removeQuotes ('\'':rest) = init rest
removeQuotes ('"':rest) = init rest
removeQuotes other = error $ "bad literal string: " ++ other
data CallArgs =
CallArgs
{ callArgs_pos :: !Word16
, callArgs_keyword :: !Word16
, callArgs_varPos :: !Bool
, callArgs_varKeyword :: !Bool
}
initCallArgs :: CallArgs
initCallArgs =
CallArgs
{ callArgs_pos = 0
, callArgs_keyword = 0
, callArgs_varPos = False
, callArgs_varKeyword = False
}
-- Compile the arguments to a call and
-- decide which particular CALL_FUNCTION bytecode to emit.
-- numExtraArgs counts any additional arguments the function
-- might have been applied to, which is necessary for classes
-- which get extra arguments beyond the ones mentioned in the
-- program source.
compileCall :: Word16 -> [ArgumentSpan] -> Compile ()
compileCall numExtraArgs args = do
CallArgs {..} <- compileCallArgs args
let opArg = (callArgs_pos + numExtraArgs) .|. callArgs_keyword `shiftL` 8
case (callArgs_varPos, callArgs_varKeyword) of
(False, False) -> emitCodeArg CALL_FUNCTION opArg
(True, False) -> emitCodeArg CALL_FUNCTION_VAR opArg
(False, True) -> emitCodeArg CALL_FUNCTION_KW opArg
(True, True) -> emitCodeArg CALL_FUNCTION_VAR_KW opArg
-- Compile the arguments to a function call and return the number
-- of positional arguments, and the number of keyword arguments.
compileCallArgs :: [ArgumentSpan] -> Compile CallArgs
compileCallArgs = foldM compileArg initCallArgs
where
compileArg :: CallArgs -> ArgumentSpan -> Compile CallArgs
compileArg callArgs@(CallArgs {..}) (ArgExpr {..}) = do
compile arg_expr
return $ callArgs { callArgs_pos = callArgs_pos + 1 }
compileArg callArgs@(CallArgs {..}) (ArgKeyword {..}) = do
compileConstantEmit $ Unicode $ ident_string arg_keyword
compile arg_expr
return $ callArgs { callArgs_keyword = callArgs_keyword + 1 }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsPos {..}) = do
compile arg_expr
return $ callArgs { callArgs_varPos = True }
compileArg callArgs@(CallArgs {..}) (ArgVarArgsKeyword {..}) = do
compile arg_expr
return $ callArgs { callArgs_varKeyword = True }
-- XXX need to handle extended slices, slice expressions and ellipsis
compileSlices :: [SliceSpan] -> Compile ()
compileSlices [SliceProper {..}] = do
case slice_lower of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_upper of
Nothing -> compileConstantEmit Blip.None
Just expr -> compile expr
case slice_stride of
Nothing -> emitCodeArg BUILD_SLICE 2
-- Not sure about this, maybe it is None
Just Nothing -> emitCodeArg BUILD_SLICE 2
Just (Just expr) -> do
compile expr
emitCodeArg BUILD_SLICE 3
compileSlices other = error $ "unsupported slice: " ++ show other
-- Return the opcode for a given assignment operator.
assignOpCode :: AssignOpSpan -> Opcode
assignOpCode assign =
case assign of
PlusAssign {} -> INPLACE_ADD
MinusAssign {} -> INPLACE_SUBTRACT
MultAssign {} -> INPLACE_MULTIPLY
DivAssign {} -> INPLACE_TRUE_DIVIDE
ModAssign {} -> INPLACE_MODULO
PowAssign {} -> INPLACE_POWER
BinAndAssign {} -> INPLACE_AND
BinOrAssign {} -> INPLACE_OR
BinXorAssign {} -> INPLACE_XOR
LeftShiftAssign {} -> INPLACE_LSHIFT
RightShiftAssign {} -> INPLACE_RSHIFT
FloorDivAssign {} -> INPLACE_FLOOR_DIVIDE
{-
isDot :: OpSpan -> Bool
isDot (Dot {}) = True
isDot _other = False
-}
isBoolean :: OpSpan -> Bool
isBoolean (And {}) = True
isBoolean (Or {}) = True
isBoolean _other = False
isComparison :: OpSpan -> Bool
isComparison (LessThan {}) = True
isComparison (GreaterThan {}) = True
isComparison (Equality {}) = True
isComparison (GreaterThanEquals {}) = True
isComparison (LessThanEquals {}) = True
isComparison (NotEquals {}) = True
isComparison (In {}) = True
isComparison (NotIn {}) = True
isComparison (IsNot {}) = True
isComparison (Is {}) = True
isComparison _other = False
{-
compileDot :: ExprSpan -> Compile ()
compileDot (BinaryOp {..}) = do
compile left_op_arg
case right_op_arg of
Var {..} -> do
-- the right argument should be treated like name variable
varInfo <- lookupNameVar $ ident_string var_ident
emitCodeArg LOAD_ATTR varInfo
other -> error $ "right argument of dot operator not a variable:\n" ++ prettyText other
compileDot other =
error $ "compileDot applied to an unexpected expression: " ++ prettyText other
-}
compileBoolOpExpr :: ExprSpan -> Compile ()
compileBoolOpExpr (BinaryOp {..}) = do
endLabel <- newLabel
compile left_op_arg
case operator of
And {..} -> emitCodeArg JUMP_IF_FALSE_OR_POP endLabel
Or {..} -> emitCodeArg JUMP_IF_TRUE_OR_POP endLabel
other -> error $ "Unexpected boolean operator:\n" ++ prettyText other
compile right_op_arg
labelNextInstruction endLabel
compileBoolOpExpr other =
error $ "compileBoolOpExpr applied to an unexpected expression: " ++ prettyText other
compileOp :: OpSpan -> Compile ()
compileOp operator =
emitCodeNoArg $ case operator of
BinaryOr {} -> BINARY_OR
Xor {} -> BINARY_XOR
BinaryAnd {} -> BINARY_AND
ShiftLeft {} -> BINARY_LSHIFT
ShiftRight {} -> BINARY_RSHIFT
Exponent {} -> BINARY_POWER
Multiply {} -> BINARY_MULTIPLY
Plus {} -> BINARY_ADD
Minus {} -> BINARY_SUBTRACT
Divide {} -> BINARY_TRUE_DIVIDE
FloorDivide {} -> BINARY_FLOOR_DIVIDE
Modulo {} -> BINARY_MODULO
_other -> error $ "Unexpected operator:\n" ++ prettyText operator
compileUnaryOp :: OpSpan -> Compile ()
compileUnaryOp operator =
emitCodeNoArg $ case operator of
Minus {} -> UNARY_NEGATIVE
Plus {} -> UNARY_POSITIVE
Not {} -> UNARY_NOT
Invert {} -> UNARY_INVERT
other -> error $ "Unexpected unary operator: " ++ prettyText other
{-
from object.h
#define Py_LT 0
#define Py_LE 1
#define Py_EQ 2
#define Py_NE 3
#define Py_GT 4
#define Py_GE 5
and from opcode.h
enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE,
PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
-}
{- Operator chaining:
The parser treats comparison operators as left associative.
So: w < x < y < z is parsed as
(((w < x) < y) < z)
We want to compile this to:
[w]
[x]
DUP_TOP # make a copy of the result of x
ROT_THREE # put the copy of [x] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[y]
DUP_TOP # make a copy of [y]
ROT_THREE # put the copy of [y] to the bottom
<
JUMP_IF_FALSE_OR_POP cleanup
[z]
<
JUMP_FORWARD end
cleanup:
ROT_TWO # put the result of the last comparison on the bottom
# and put the duplicated [y] on the top
POP_TOP # remove the duplicated [y] from the top
end:
# whatever code follows
-}
compileCompareOpExpr :: ExprSpan -> Compile ()
compileCompareOpExpr expr@(BinaryOp {}) =
compileChain numOps chain
where
chain :: [ChainItem]
chain = flattenComparisonChain [] expr
numOps :: Int
numOps = length chain `div` 2
compileChain :: Int -> [ChainItem] -> Compile ()
compileChain numOps (Comparator e1 : internal@(Operator op : Comparator e2 : _rest)) = do
compile e1
if numOps == 1
then do
compile e2
emitCodeArg COMPARE_OP $ comparisonOpCode op
else do
cleanup <- newLabel
(lastOp, lastArg) <- compileChainInternal cleanup internal
compile lastArg
emitCodeArg COMPARE_OP $ comparisonOpCode lastOp
end <- newLabel
emitCodeArg JUMP_FORWARD end
labelNextInstruction cleanup
emitCodeNoArg ROT_TWO
emitCodeNoArg POP_TOP
labelNextInstruction end
compileChain _numOps _items = error $ "bad operator chain: " ++ prettyText expr
compileChainInternal :: Word16 -> [ChainItem] -> Compile (OpSpan, ExprSpan)
compileChainInternal _cleanup [Operator op, Comparator exp] = return (op, exp)
compileChainInternal cleanup (Operator op : Comparator e : rest) = do
compile e
emitCodeNoArg DUP_TOP
emitCodeNoArg ROT_THREE
emitCodeArg COMPARE_OP $ comparisonOpCode op
emitCodeArg JUMP_IF_FALSE_OR_POP cleanup
compileChainInternal cleanup rest
compileChainInternal _cleanup _other = error $ "bad comparison chain: " ++ prettyText expr
comparisonOpCode :: OpSpan -> Word16
comparisonOpCode (LessThan {}) = 0
comparisonOpCode (LessThanEquals {}) = 1
comparisonOpCode (Equality {}) = 2
comparisonOpCode (NotEquals {}) = 3
comparisonOpCode (GreaterThan {}) = 4
comparisonOpCode (GreaterThanEquals {}) = 5
comparisonOpCode (In {}) = 6
comparisonOpCode (NotIn {}) = 7
comparisonOpCode (Is {}) = 8
comparisonOpCode (IsNot {}) = 9
-- XXX we don't appear to have an exact match operator in the AST
comparisonOpCode operator = error $ "Unexpected comparison operator:\n" ++ prettyText operator
compileCompareOpExpr other = error $ "Unexpected comparison operator:\n" ++ prettyText other
data ChainItem = Comparator ExprSpan | Operator OpSpan
flattenComparisonChain :: [ChainItem] -> ExprSpan -> [ChainItem]
flattenComparisonChain acc opExpr@(BinaryOp {..})
| isComparison operator
= flattenComparisonChain newAcc left_op_arg
| otherwise = [Comparator opExpr] ++ acc
where
newAcc = [Operator operator, Comparator right_op_arg] ++ acc
flattenComparisonChain acc other = [Comparator other] ++ acc
-- Emit an instruction that returns the None contant.
returnNone :: Compile ()
returnNone = compileConstantEmit Blip.None >> emitCodeNoArg RETURN_VALUE
-- Print out the variable scope of the module if requested on the command line.
maybeDumpScope :: Compile ()
maybeDumpScope =
ifDump DumpScope $ do
nestedScope <- getNestedScope
liftIO $ putStrLn $ renderScope nestedScope
-- Print out the AST of the module if requested on the command line.
maybeDumpAST :: ModuleSpan -> Compile ()
maybeDumpAST ast = do
ifDump DumpAST $ do
liftIO $ putStrLn "Abstract Syntax Tree:"
liftIO $ putStrLn $ show ast
{-
From Cpython: Objects/lnotab_notes.txt
Code objects store a field named co_lnotab. This is an array of unsigned bytes
disguised as a Python string. It is used to map bytecode offsets to source code
line #s for tracebacks and to identify line number boundaries for line tracing.
The array is conceptually a compressed list of
(bytecode offset increment, line number increment)
pairs. The details are important and delicate, best illustrated by example:
byte code offset source code line number
0 1
6 2
50 7
350 307
361 308
Instead of storing these numbers literally, we compress the list by storing only
the increments from one row to the next. Conceptually, the stored list might
look like:
0, 1, 6, 1, 44, 5, 300, 300, 11, 1
The above doesn't really work, but it's a start. Note that an unsigned byte
can't hold negative values, or values larger than 255, and the above example
contains two such values. So we make two tweaks:
(a) there's a deep assumption that byte code offsets and their corresponding
line #s both increase monotonically, and
(b) if at least one column jumps by more than 255 from one row to the next,
more than one pair is written to the table. In case #b, there's no way to know
from looking at the table later how many were written. That's the delicate
part. A user of co_lnotab desiring to find the source line number
corresponding to a bytecode address A should do something like this
lineno = addr = 0
for addr_incr, line_incr in co_lnotab:
addr += addr_incr
if addr > A:
return lineno
lineno += line_incr
(In C, this is implemented by PyCode_Addr2Line().) In order for this to work,
when the addr field increments by more than 255, the line # increment in each
pair generated must be 0 until the remaining addr increment is < 256. So, in
the example above, assemble_lnotab in compile.c should not (as was actually done
until 2.2) expand 300, 300 to
255, 255, 45, 45,
but to
255, 0, 45, 255, 0, 45.
-}
-- Returns the bytestring representation of the compressed line number table
compileLineNumberTable :: Word32 -> Compile PyObject
compileLineNumberTable firstLineNumber = do
offsetToLine <- reverse `fmap` getBlockState state_lineNumberTable
let compressedTable = compress (0, firstLineNumber) offsetToLine
bs = B.pack $ concat
[ [fromIntegral offset, fromIntegral line] |
(offset, line) <- compressedTable ]
return Blip.String { string = bs }
where
compress :: (Word16, Word32) -> [(Word16, Word32)] -> [(Word16, Word32)]
compress _prev [] = []
compress (prevOffset, prevLine) (next@(nextOffset, nextLine):rest)
-- make sure all increments are non-negative
-- skipping any entries which are less than the predecessor
| nextLine < prevLine || nextOffset < prevOffset =
compress (prevOffset, prevLine) rest
| otherwise = chunkDeltas (offsetDelta, lineDelta) ++ compress next rest
where
offsetDelta = nextOffset - prevOffset
lineDelta = nextLine - prevLine
-- both offsetDelta and lineDelta must be non-negative
chunkDeltas :: (Word16, Word32) -> [(Word16, Word32)]
chunkDeltas (offsetDelta, lineDelta)
| offsetDelta < 256 =
if lineDelta < 256
then [(offsetDelta, lineDelta)]
else (offsetDelta, 255) : chunkDeltas (0, lineDelta - 255)
-- we must wait until offsetDelta is less than 256 before reducing lineDelta
| otherwise = (255, 0) : chunkDeltas (offsetDelta - 255, lineDelta)
|
bjpop/blip
|
blipcompiler/src/Blip/Compiler/Compile.hs
|
bsd-3-clause
| 60,092
| 19
| 24
| 14,052
| 12,651
| 6,320
| 6,331
| 1,060
| 17
|
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
module Ivory.OS.FreeRTOS.Tower.Time
( time_module
) where
import Ivory.Language
import Ivory.Tower
import qualified Ivory.OS.FreeRTOS.Time as T
time_module :: Module
time_module = package "tower_time" $ do
T.moddef
incl getTimeProc
where
getTimeProc :: Def('[]':->ITime)
getTimeProc = proc "tower_get_time" $ body $ do
ticks <- call T.getTickCount
ratems <- call T.getTickRateMilliseconds
let t_us :: Sint64
t_us = ((safeCast ticks) * 1000) `iDiv` (safeCast ratems)
ret (fromIMicroseconds t_us)
|
GaloisInc/ivory-tower-stm32
|
tower-freertos-stm32/src/Ivory/OS/FreeRTOS/Tower/Time.hs
|
bsd-3-clause
| 595
| 0
| 17
| 113
| 178
| 95
| 83
| 18
| 1
|
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module: $HEADER$
-- Description: Type restricted variants of functions from Data.Function
-- module, and more.
-- Copyright: (c) 2015, Peter Trško
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: experimental
-- Portability: NoImplicitPrelude
--
-- Type restricted variants of functions from "Data.Function" module, and more.
module Data.Proxy.Function
(
-- * Type Proxies For Functions
aFunction
, aFunction2
, aFunction3
-- * Restrict Functions Using Type Proxies
, resultOf
, hasResultOf
, argumentOf
, hasArgumentOf
-- * Restricted Versions of Standard Functions
, idOf
, forget
, thatForgotten
)
where
import Data.Function (const, flip, id)
import Data.Proxy (Proxy(Proxy))
-- | Type proxy for unary function. Note that @b@ may be a function too. All
-- this type proxy says is that it is at least unary function.
aFunction :: Proxy (a -> b)
aFunction = Proxy
{-# INLINE aFunction #-}
-- | Type proxy for binary function. Note that @b@ may be a function too. All
-- this type proxy says is that it is at least binary function.
aFunction2 :: Proxy (a -> b -> c)
aFunction2 = Proxy
{-# INLINE aFunction2 #-}
-- | Type proxy for ternary function. Note that @b@ may be a function too. All
-- this type proxy says is that it is at least ternary function.
aFunction3 :: Proxy (a -> b -> c -> d)
aFunction3 = Proxy
{-# INLINE aFunction3 #-}
-- | Restrict type of result of a function. Flipped version of 'resultOf'.
--
-- @
-- \\f -> f `hasResultOf` 'Data.Proxy.Either.anEitherOf' 'Data.Proxy.String.string' 'Data.Proxy.Int.int'
-- :: (a -> Either String Word64) -> a -> Either String Int
-- @
--
-- @
-- 'Data.Typeable.cast' `hasResultOf` 'Data.Proxy.Maybe.aMaybeOf' 'Data.Proxy.Int.int'
-- :: Typeable a => a -> Maybe Int
-- @
hasResultOf :: (a -> b) -> Proxy b -> a -> b
hasResultOf = const
{-# INLINE hasResultOf #-}
-- | Restrict type of result of a function. Flipped version of 'hasResultOf'.
--
-- @
-- 'resultOf' ('Data.Proxy.Either.anEitherOf' 'Data.Proxy.String.string' 'Data.Proxy.Int.int')
-- :: (a -> Either String Word64) -> a -> Either String Int
-- @
--
-- @
-- 'resultOf' ('Data.Proxy.Maybe.aMaybeOf' 'Data.Proxy.Int.int') 'Data.Typeable.cast'
-- :: Typeable a => a -> Maybe Int
-- @
resultOf :: Proxy b -> (a -> b) -> a -> b
resultOf Proxy = id
{-# INLINE resultOf #-}
-- | Restrict type of an argument of a function. Flipped variant of
-- 'hasArgumentOf'.
argumentOf :: Proxy a -> (a -> b) -> a -> b
argumentOf Proxy = id
{-# INLINE argumentOf #-}
-- | Restrict type of an argument of a function. Flipped variant of
-- 'argumentOf'.
hasArgumentOf :: (a -> b) -> Proxy a -> a -> b
hasArgumentOf = const
{-# INLINE hasArgumentOf #-}
-- | Type restricted identity function 'id' defined as:
--
-- @
-- 'idOf' = 'flip' 'asProxyTypeOf'
-- @
--
-- Examples:
--
-- @
-- 'idOf' 'Data.Proxy.Word.word16' :: 'Data.Word.Word16' -> 'Data.Word.Word16'
-- @
idOf :: Proxy a -> a -> a
idOf Proxy = id
{-# INLINE idOf #-}
-- | Type restricted variant of 'const'.
--
-- @
-- 'forget' 'Data.Proxy.Exception.ioException' (return Nothing)
-- :: 'Control.Exception.IOException' -> IO (Maybe a)
-- @
forget :: Proxy b -> a -> b -> a
forget Proxy = const -- \p -> argumentOf p . const
{-# INLINE forget #-}
-- | Type restricted version of @'flip' 'const'@.
--
-- @
-- 'Data.Proxy.Exception.ioException' `thatForgotten` return Nothing
-- :: 'Control.Exception.IOException' -> IO (Maybe a)
-- @
thatForgotten :: Proxy a -> a -> b -> b
thatForgotten Proxy = flip const -- \p -> `argumentOf` flip const
{-# INLINE thatForgotten #-}
|
trskop/type-proxies
|
src/Data/Proxy/Function.hs
|
bsd-3-clause
| 3,724
| 0
| 9
| 744
| 441
| 278
| 163
| 45
| 1
|
{-|
Module : Data.Boltzmann.Internal.Logging
Description : Basic logging utilities.
Copyright : (c) Maciej Bendkowski, 2017-2021
License : BSD3
Maintainer : maciej.bendkowski@tcs.uj.edu.pl
Stability : experimental
General logging utilities.
-}
module Data.Boltzmann.Internal.Logging
( Log(..)
, info
, warn
, warn'
, hint
, hint'
, fail
, fail'
) where
import Prelude hiding (log, fail)
import System.IO
import System.Exit
import System.Console.Pretty
import Data.Boltzmann.Internal.Utils
data Level = Info
| Warning
| Hint
| Error
instance Show Level where
show Info = "INF"
show Warning = "WAR"
show Hint = "TIP"
show Error = "ERR"
lvlColor :: Level -> Color
lvlColor Info = Blue
lvlColor Warning = Yellow
lvlColor Hint = Green
lvlColor Error = Red
brackets :: IO a -> IO ()
brackets m = hPutStr stderr "[" >> m >> hPutStr stderr "] " -- note the trailing space
parens :: IO a -> IO ()
parens m = hPutStr stderr "(" >> m >> hPutStr stderr ") " -- note the trailing space
data Log = Log { lvl :: Level -- ^ Logging level.
, msg :: String -- ^ Logging message.
}
printLabel :: Log -> IO ()
printLabel log = do
inColor <- supportsPretty
let label = show (lvl log)
let format = style Bold . color (lvlColor $ lvl log)
let x = if inColor then format label
else label
hPutStr stderr x
printTime :: String -> IO ()
printTime time = do
inColor <- supportsPretty
let format = style Italic
let x = if inColor then format time
else time
hPutStr stderr x
-- | Reports a logging message.
report :: Log -> IO ()
report log = do
time <- getTime
brackets $ printLabel log
parens $ printTime time
hPutStrLn stderr $ msg log
-- | Logs an INFO message.
info :: String -> IO ()
info s = report Log { lvl = Info, msg = s }
-- | Logs a WARNING message.
warn :: String -> IO ()
warn s = report Log { lvl = Warning, msg = s }
-- | Logs a HINT message.
hint :: String -> IO ()
hint s = report Log { lvl = Hint , msg = s }
-- | Logs a HINT message and terminates.
hint' :: String -> IO a
hint' s = do
report Log { lvl = Hint , msg = s }
exitWith (ExitFailure 1)
-- | Logs a WARNING message and terminates.
warn' :: String -> IO a
warn' s = do
report Log { lvl = Warning, msg = s }
exitWith (ExitFailure 1)
-- | Logs an ERROR message.
fail :: String -> IO ()
fail s = report Log { lvl = Error, msg = s }
-- | Logs an ERROR message and terminates.
fail' :: String -> IO a
fail' s = do
report Log { lvl = Error, msg = s }
exitWith (ExitFailure 1)
|
maciej-bendkowski/boltzmann-brain
|
Data/Boltzmann/Internal/Logging.hs
|
bsd-3-clause
| 2,721
| 0
| 14
| 785
| 837
| 435
| 402
| 75
| 2
|
{-# LANGUAGE LambdaCase, OverloadedStrings #-}
module Transformations.EffectMap
( effectMap
) where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid
import Data.Functor.Foldable as Foldable
import qualified Data.Foldable
import Lens.Micro
import Data.List
import Data.Maybe
import Debug.Trace
import Grin.Grin
import Grin.TypeEnv
import Grin.EffectMap
import Transformations.Util
effectMap :: (TypeEnv, Exp) -> EffectMap
effectMap (te, e) = EffectMap $ withEffectfulExternals $ effectfulFunctions $ unMMap $ snd $ para buildEffectMap e where
withEffectfulExternals :: Map Name Effects -> Map Name Effects
withEffectfulExternals
| Program exts _ <- e
= Map.union $ Map.fromSet (\ext -> Effects (Set.singleton ext) mempty mempty) effectfulExternals
| otherwise = id
effectfulExternals :: Set Name
effectfulExternals = case e of
Program es _ -> Set.fromList $ map eName $ filter eEffectful es
_ -> Set.empty
buildEffectMap :: ExpF (Exp, (Set EffectWithCalls, MonoidMap Name (Set EffectWithCalls))) -> (Set EffectWithCalls, MonoidMap Name (Set EffectWithCalls))
buildEffectMap = \case
DefF name _ (_,(effs, _)) -> (mempty, MMap $ Map.singleton name effs)
EBindF (SStore _,lhs) (Var v) (_,rhs)
| Just locs <- te ^? variable . at v . _Just . _T_SimpleType . _T_Location
-> let storeEff = (Set.singleton $ Effect $ storesEff locs, mempty)
in lhs <> rhs <> storeEff
SAppF name _
| Set.member name effectfulExternals -> (Set.singleton (Effect $ primopEff name), mempty)
| otherwise -> (Set.singleton (Call name), mempty)
SUpdateF name _
| Just locs <- te ^? variable . at name . _Just . _T_SimpleType . _T_Location
-> (Set.singleton $ Effect $ updatesEff locs, mempty)
rest -> Data.Foldable.fold . fmap snd $ rest
data EffectWithCalls
= Effect { toEffects :: Effects }
| Call { callsFunction :: Name }
deriving (Eq, Show, Ord)
-- Removes the calls information and collects all the transitive effects.
-- Returns a Map that contains only the effectful function calls.
effectfulFunctions :: Map Name (Set EffectWithCalls) -> Map Name Effects
effectfulFunctions em = removeCalls $ go em where
removeCalls = Map.map (foldMap toEffects) . Map.filter (not . Set.null) . Map.map (Set.filter (not . isCall))
go em0 = let em1 = flip Map.map em0 $ \es ->
let (calls, rest) = Set.partition isCall es
in Set.unions [calls, rest, mconcat $ map (fromMaybe mempty . flip Map.lookup em . callsFunction) $ Set.toList calls]
in if em0 == em1 then em0 else go em1
isCall = \case
Call _ -> True
_ -> False
-- MonoidMap
newtype MonoidMap k m = MMap { unMMap :: Map k m }
deriving Show
instance (Ord k, Semigroup m) => Semigroup (MonoidMap k m) where
(MMap m1) <> (MMap m2) = MMap (Map.unionWith (<>) m1 m2)
instance (Ord k, Monoid m) => Monoid (MonoidMap k m) where
mempty = MMap mempty
mappend (MMap m1) (MMap m2) = MMap (Map.unionWith mappend m1 m2)
|
andorp/grin
|
grin/src/Transformations/EffectMap.hs
|
bsd-3-clause
| 3,131
| 0
| 24
| 686
| 1,135
| 588
| 547
| -1
| -1
|
{-# LANGUAGE FlexibleInstances #-}
module Language.C.Analysis.TypeCheck where
import Control.Monad
import Data.Either
import Data.Maybe
import Language.C.Data.Ident
import Language.C.Data.Node
import Language.C.Data.Position
import Language.C.Pretty
import Language.C.Syntax.AST
import Language.C.Syntax.Constants
import Language.C.Syntax.Ops
import Language.C.Analysis.Debug
import Language.C.Analysis.DefTable
import Language.C.Analysis.SemRep
import Language.C.Analysis.TravMonad
import Language.C.Analysis.TypeConversions
import Language.C.Analysis.TypeUtils
import Text.PrettyPrint.HughesPJ
-- This is the standard MonadError instance for Either String
-- /FIXME/: Is this exported outside Language.C ?
instance Monad (Either String) where
return = Right
Left l >>= _ = Left l
Right r >>= k = k r
fail msg = Left msg
pType :: Type -> String
pType = render . pretty
typeErrorOnLeft :: (MonadCError m) => NodeInfo -> Either String a -> m a
typeErrorOnLeft ni (Left err) = typeError ni err
typeErrorOnLeft _ (Right v) = return v
-- XXX: this should use a custom error type, but typeMismatch isn't always right
typeError :: MonadCError m => NodeInfo -> String -> m a
typeError = astError
notFound :: Ident -> Either String a
notFound i = fail $ "not found: " ++ identToString i
checkScalar' :: MonadCError m => NodeInfo -> Type -> m ()
checkScalar' ni = typeErrorOnLeft ni . checkScalar
checkIntegral' :: MonadCError m => NodeInfo -> Type -> m ()
checkIntegral' ni = typeErrorOnLeft ni . checkIntegral
assignCompatible' :: MonadCError m =>
NodeInfo -> CAssignOp -> Type -> Type -> m ()
assignCompatible' ni op t1 t2 = typeErrorOnLeft ni (assignCompatible op t1 t2)
binopType' :: MonadCError m =>
NodeInfo -> CBinaryOp -> Type -> Type -> m Type
binopType' ni op t1 t2 = typeErrorOnLeft ni (binopType op t1 t2)
conditionalType' :: MonadCError m => NodeInfo -> Type -> Type -> m Type
conditionalType' ni t1 t2 = typeErrorOnLeft ni $ conditionalType t1 t2
checkScalar :: Type -> Either String ()
checkScalar t =
case canonicalType t of
DirectType _ _ _ -> return ()
PtrType _ _ _ -> return ()
ArrayType _ _ _ _ -> return () -- because it's just a pointer
t' -> fail $
"expected scalar type, got: "
++ pType t ++ " (" ++ pType t' ++ ")"
checkIntegral :: Type -> Either String ()
checkIntegral t | isIntegralType (canonicalType t) = return ()
| otherwise = fail $
"expected integral type, got: " ++
pType t ++ " (" ++
pType (canonicalType t) ++ ")"
-- | Determine the type of a constant.
constType :: (MonadCError m, MonadName m) => CConst -> m Type
constType (CIntConst (CInteger _ _ flags) _) =
return $ DirectType (TyIntegral (getIntType flags)) noTypeQuals noAttributes
constType (CCharConst (CChar _ True) _) =
return $ DirectType (TyIntegral TyInt) noTypeQuals noAttributes
constType (CCharConst (CChar _ False) _) =
return $ DirectType (TyIntegral TyChar) noTypeQuals noAttributes
constType (CCharConst (CChars _ _) _) =
return $ DirectType (TyIntegral TyInt) noTypeQuals noAttributes -- XXX
constType (CFloatConst (CFloat fs) _) =
return $ DirectType (TyFloating (getFloatType fs)) noTypeQuals noAttributes
-- XXX: should strings have any type qualifiers or attributes?
constType (CStrConst (CString chars wide) ni) =
do n <- genName
let charType | wide = TyInt -- XXX: this isn't universal
| otherwise = TyChar
ni' = mkNodeInfo (posOf ni) n
arraySize = ArraySize
True -- XXX: is it static?
(CConst
(CIntConst
(cInteger (toInteger (length chars))) ni'))
return $ ArrayType (DirectType (TyIntegral charType) noTypeQuals noAttributes)
arraySize noTypeQuals []
-- | Determine whether two types are compatible.
compatible :: Type -> Type -> Either String ()
compatible t1 t2 = compositeType t1 t2 >> return ()
-- | Determine the composite type of two compatible types.
compositeType :: Type -> Type -> Either String Type
compositeType t1 (DirectType (TyBuiltin TyAny) _ _) = return t1
compositeType (DirectType (TyBuiltin TyAny) _ _) t2 = return t2
compositeType t1@(DirectType tn1 q1 a1) t2@(DirectType tn2 q2 a2) =
do tn <- case (tn1, tn2) of
(TyVoid, TyVoid) -> return TyVoid
(TyIntegral _, TyEnum _) -> return tn1
(TyEnum _, TyIntegral _) -> return tn2
(TyIntegral i1, TyIntegral i2) ->
return $ TyIntegral (intConversion i1 i2)
(TyFloating f1, TyFloating f2) ->
return $ TyFloating (floatConversion f1 f2)
(TyComplex f1, TyComplex f2) ->
return $ TyComplex (floatConversion f1 f2)
(TyComp c1, TyComp c2) ->
do when (sueRef c1 /= sueRef c2) $
fail $ "incompatible composite types: "
++ pType t1 ++ ", " ++ pType t2
return tn1
(TyEnum e1, TyEnum e2) ->
do when (sueRef e1 /= sueRef e2) $
fail $ "incompatible enumeration types: "
++ pType t1 ++ ", " ++ pType t2
return $ TyEnum e1
(TyBuiltin TyVaList, TyBuiltin TyVaList) ->
return $ TyBuiltin TyVaList
(TyBuiltin _, TyBuiltin _) ->
fail $ "incompatible builtin types: "
++ pType t1 ++ ", " ++ pType t2
(_, _) -> fail $ "incompatible direct types: "
++ pType t1 ++ ", " ++ pType t2
return $ DirectType tn (mergeTypeQuals q1 q2) (mergeAttributes a1 a2)
compositeType (PtrType t1 q1 a1) t2 | isIntegralType t2 =
return $ PtrType t1 (mergeTypeQuals q1 (typeQuals t2)) a1
compositeType t1 (PtrType t2 q2 a2) | isIntegralType t1 =
return $ PtrType t2 (mergeTypeQuals (typeQuals t1) q2) a2
compositeType (ArrayType t1 s1 q1 a1) (ArrayType t2 s2 q2 a2) =
do t <- compositeType t1 t2
s <- compositeSize s1 s2
let quals = mergeTypeQuals q1 q2
attrs = mergeAttrs a1 a2
return (ArrayType t s quals attrs)
compositeType t1 t2 | isPointerType t1 && isPointerType t2 =
do t <- compositeType (baseType t1) (baseType t2)
let quals = mergeTypeQuals (typeQuals t1) (typeQuals t2)
attrs = mergeAttrs (typeAttrs t1) (typeAttrs t2)
return (PtrType t quals attrs)
compositeType (TypeDefType tdr1 q1 a1) (TypeDefType tdr2 q2 a2) =
case (tdr1, tdr2) of
(TypeDefRef i1 Nothing _, TypeDefRef i2 _ _) -> doTypeDef i1 i2 tdr1
(TypeDefRef i1 _ _, TypeDefRef i2 Nothing _) -> doTypeDef i1 i2 tdr2
(TypeDefRef _ (Just t1) _, TypeDefRef _ (Just t2) _) ->
compositeType t1 t2
where doTypeDef i1 i2 tdr =
do when (i1 /= i2) $ fail $ "incompatible typedef types: "
++ identToString i1 ++ ", " ++ identToString i2
return (TypeDefType tdr (mergeTypeQuals q1 q2) (mergeAttributes a1 a2))
compositeType (FunctionType ft1 attrs1) (FunctionType ft2 attrs2) =
case (ft1, ft2) of
(FunType rt1 args1 varargs1, FunType rt2 args2 varargs2) ->
do when (length args1 /= length args2) $
fail "different numbers of arguments in function types"
args <- mapM (uncurry compositeParamDecl) (zip args1 args2)
when (varargs1 /= varargs2) $
fail "incompatible varargs declarations"
doFunType rt1 rt2 args varargs1
(FunType rt1 args1 varargs1, FunTypeIncomplete rt2) ->
doFunType rt1 rt2 args1 varargs1
(FunTypeIncomplete rt1, FunType rt2 args2 varargs2) ->
doFunType rt1 rt2 args2 varargs2
(FunTypeIncomplete rt1, FunTypeIncomplete rt2) ->
do rt <- compositeType rt1 rt2
return (FunctionType (FunTypeIncomplete rt) (mergeAttrs attrs1 attrs2))
where doFunType rt1 rt2 args varargs =
do rt <- compositeType rt1 rt2
return (FunctionType
(FunType rt args varargs)
(mergeAttrs attrs1 attrs2))
compositeType t1 t2 = fail $ "incompatible types: "
++ pType t1 ++ ", " ++ pType t2
-- XXX: this may not be correct
compositeSize :: ArraySize -> ArraySize -> Either String ArraySize
compositeSize (UnknownArraySize _) s2 = return s2
compositeSize s1 (UnknownArraySize _) = return s1
compositeSize (ArraySize s1 e1) (ArraySize s2 e2)
| s1 == s2 && sizeEqual e1 e2 = return $ ArraySize s1 e1
| otherwise =
fail $ "incompatible array sizes: "
++ (render . pretty) e1 ++ ", " ++ (render . pretty) e2
sizeEqual :: CExpr -> CExpr -> Bool
sizeEqual (CConst (CIntConst i1 _)) (CConst (CIntConst i2 _)) = i1 == i2
sizeEqual e1 e2 = nodeInfo e1 == nodeInfo e2
mergeAttrs :: Attributes -> Attributes -> Attributes
mergeAttrs = (++) -- XXX: ultimately this should be smarter
compositeParamDecl :: ParamDecl -> ParamDecl -> Either String ParamDecl
compositeParamDecl (ParamDecl vd1 ni1) (ParamDecl vd2 _) =
compositeParamDecl' ParamDecl vd1 vd2 ni1
compositeParamDecl (AbstractParamDecl vd1 _) (ParamDecl vd2 ni2) =
compositeParamDecl' ParamDecl vd1 vd2 ni2
compositeParamDecl (ParamDecl vd1 ni1) (AbstractParamDecl vd2 _) =
compositeParamDecl' ParamDecl vd1 vd2 ni1
compositeParamDecl (AbstractParamDecl vd1 ni1) (AbstractParamDecl vd2 _) =
compositeParamDecl' AbstractParamDecl vd1 vd2 ni1
compositeParamDecl' :: (VarDecl -> NodeInfo -> ParamDecl)
-> VarDecl
-> VarDecl
-> NodeInfo
-> Either String ParamDecl
compositeParamDecl' f (VarDecl n1 attrs1 t1) (VarDecl n2 attrs2 t2) dni =
do vd <- compositeVarDecl (VarDecl n1 attrs1 t1') (VarDecl n2 attrs2 t2')
return $ f vd dni
where t1' = canonicalType t1
t2' = canonicalType t2
compositeVarDecl :: VarDecl -> VarDecl -> Either String VarDecl
compositeVarDecl (VarDecl n1 attrs1 t1) (VarDecl _ attrs2 t2) =
do t <- compositeType t1 t2
return (VarDecl n1 (compositeDeclAttrs attrs1 attrs2) t)
-- XXX: bad treatement of inline and storage
compositeDeclAttrs :: DeclAttrs -> DeclAttrs -> DeclAttrs
compositeDeclAttrs (DeclAttrs inl stor attrs1) (DeclAttrs _ _ attrs2) =
DeclAttrs inl stor (mergeAttrs attrs1 attrs2)
castCompatible :: Type -> Type -> Either String ()
castCompatible t1 t2 =
case (canonicalType t1, canonicalType t2) of
(DirectType TyVoid _ _, _) -> return ()
(_, _) -> checkScalar t1 >> checkScalar t2
-- | Determine whether two types are compatible in an assignment expression.
assignCompatible :: CAssignOp -> Type -> Type -> Either String ()
assignCompatible CAssignOp t1 t2 =
case (canonicalType t1, canonicalType t2) of
(DirectType (TyBuiltin TyAny) _ _, _) -> return ()
(_, DirectType (TyBuiltin TyAny) _ _) -> return ()
-- XXX: check qualifiers
(PtrType (DirectType TyVoid _ _) _ _, t2') | isPointerType t2' -> return ()
-- XXX: check qualifiers
(t1', PtrType (DirectType TyVoid _ _) _ _) | isPointerType t1' -> return ()
(PtrType _ _ _, t2') | isIntegralType t2' -> return ()
(t1', t2') | isPointerType t1' && isPointerType t2' ->
do compatible (baseType t1') (baseType t2')
--unless (typeQuals t2 <= typeQuals t1) $
-- fail $
-- "incompatible qualifiers in pointer assignment: "
-- ++ pType t1 ++ ", " ++ pType t2
(DirectType (TyComp c1) _ _, DirectType (TyComp c2) _ _)
| sueRef c1 == sueRef c2 -> return ()
| otherwise -> fail $
"incompatible compound types in assignment: "
++ pType t1 ++ ", " ++ pType t2
(DirectType (TyBuiltin TyVaList) _ _, DirectType (TyBuiltin TyVaList) _ _) ->
return ()
(DirectType tn1 _ _, DirectType tn2 _ _)
| isJust (arithmeticConversion tn1 tn2) -> return ()
| otherwise -> fail $ "incompatible direct types in assignment: "
++ pType t1 ++ ", " ++ pType t2
(t1', t2') -> compatible t1' t2'
assignCompatible op t1 t2 = binopType (assignBinop op) t1 t2 >> return ()
-- | Determine the type of a binary operation.
binopType :: CBinaryOp -> Type -> Type -> Either String Type
binopType op t1 t2 =
case (op, canonicalType t1, canonicalType t2) of
(_, t1', t2')
| isLogicOp op ->
checkScalar t1' >> checkScalar t2' >> return boolType
| isCmpOp op ->
case (t1', t2') of
(DirectType tn1 _ _, DirectType tn2 _ _) ->
case arithmeticConversion tn1 tn2 of
Just _ -> return boolType
Nothing -> fail
"incompatible arithmetic types in comparison"
(PtrType (DirectType TyVoid _ _) _ _, _)
| isPointerType t2' -> return boolType
(_, PtrType (DirectType TyVoid _ _) _ _)
| isPointerType t1' -> return boolType
(_, _)
| isPointerType t1' && isIntegralType t2' -> return boolType
| isIntegralType t1' && isPointerType t2' -> return boolType
| isPointerType t1' && isPointerType t2' ->
compatible t1' t2' >> return boolType
(_, _) -> fail "incompatible types in comparison"
(CSubOp, ArrayType t1' _ _ _, ArrayType t2' _ _ _) ->
compatible t1' t2' >> return ptrDiffType
(CSubOp, ArrayType t1' _ _ _, PtrType t2' _ _) ->
compatible t1' t2' >> return ptrDiffType
(CSubOp, PtrType t1' _ _, ArrayType t2' _ _ _) ->
compatible t1' t2' >> return ptrDiffType
(CSubOp, PtrType t1' _ _, PtrType t2' _ _) ->
compatible t1' t2' >> return ptrDiffType
(_, PtrType _ _ _, t2')
| isPtrOp op && isIntegralType t2' -> return t1
| otherwise -> fail $ "invalid pointer operation: " ++ show op
(CAddOp, t1', PtrType _ _ _) | isIntegralType t1' -> return t2
(_, ArrayType _ _ _ _, t2')
| isPtrOp op && isIntegralType t2' -> return t1
| otherwise -> fail $ "invalid pointer operation: " ++ show op
(CAddOp, t1', ArrayType _ _ _ _) | isIntegralType t1' -> return t2
(_, DirectType tn1 q1 a1, DirectType tn2 q2 a2) ->
do when (isBitOp op) (checkIntegral t1 >> checkIntegral t2)
case arithmeticConversion tn1 tn2 of
Just tn -> return $ DirectType tn (mergeTypeQuals q1 q2) (mergeAttributes a1 a2)
Nothing -> fail $ "invalid binary operation: " ++
show op ++ ", " ++ pType t1 ++ ", " ++ pType t2
(_, _, _) -> fail $ "unhandled binary operation: "
++ pType t1 ++ show op ++ pType t2
-- | Determine the type of a conditional expression.
conditionalType :: Type -> Type -> Either String Type
conditionalType t1 t2 =
case (canonicalType t1, canonicalType t2) of
(PtrType (DirectType TyVoid _ _) _ _, t2') | isPointerType t2' -> return t2
(t1', PtrType (DirectType TyVoid _ _) _ _) | isPointerType t1' -> return t1
(ArrayType t1' _ q1 a1, ArrayType t2' _ q2 a2) ->
do t <- compositeType t1' t2'
return $ ArrayType t (UnknownArraySize False)
(mergeTypeQuals q1 q2) (mergeAttrs a1 a2)
(t1'@(DirectType tn1 q1 a1), t2'@(DirectType tn2 q2 a2)) ->
case arithmeticConversion tn1 tn2 of
Just tn -> return $ DirectType tn (mergeTypeQuals q1 q2) (mergeAttributes a1 a2)
Nothing -> compositeType t1' t2'
(t1', t2') -> compositeType t1' t2'
derefType :: Type -> Either String Type
derefType (PtrType t _ _) = return t
derefType (ArrayType t _ _ _) = return t
derefType t =
-- XXX: is it good to use canonicalType here?
case canonicalType t of
PtrType t' _ _ -> return t'
ArrayType t' _ _ _ -> return t'
_ -> fail $ "dereferencing non-pointer: " ++ pType t
varAddrType :: IdentDecl -> Either String Type
varAddrType d =
do case declStorage d of
Auto True -> fail "address of register variable"
_ -> return ()
case t of
ArrayType _ _ q a -> return $ PtrType t q a
_ -> return $ simplePtr t
where t = declType d
-- | Get the type of field @m@ of type @t@
fieldType :: (MonadCError m, MonadSymtab m) => NodeInfo -> Ident -> Type -> m Type
fieldType ni m t =
case canonicalType t of
DirectType (TyComp ctr) _ _ ->
do td <- lookupSUE ni (sueRef ctr)
ms <- tagMembers ni td
case lookup m ms of
Just ft -> return ft
Nothing -> typeError ni $ "field not found: " ++ identToString m
_t' -> astError ni $
"field of non-composite type: " ++ identToString m
++ ", " ++ pType t
-- | Get all members of a struct, union, or enum, with their
-- types. Collapse fields of anonymous members.
tagMembers :: (MonadCError m, MonadSymtab m) =>
NodeInfo -> TagDef -> m [(Ident, Type)]
tagMembers ni td =
case td of
CompDef (CompType _ _ ms _ _) -> getMembers ms
EnumDef (EnumType _ es _ _) -> getMembers es
where getMembers ds =
do let ts = map declType ds
ns = map declName ds
concat `liftM` mapM (expandAnonymous ni) (zip ns ts)
-- | Expand an anonymous composite type into a list of member names
-- and their associated types.
expandAnonymous :: (MonadCError m, MonadSymtab m) =>
NodeInfo -> (VarName, Type)
-> m [(Ident, Type)]
expandAnonymous ni (NoName, DirectType (TyComp ctr) _ _) =
lookupSUE ni (sueRef ctr) >>= tagMembers ni
expandAnonymous _ (NoName, _) = return []
expandAnonymous _ (VarName n _, t) = return [(n, t)]
lookupSUE :: (MonadCError m, MonadSymtab m) =>
NodeInfo -> SUERef -> m TagDef
lookupSUE ni sue =
do dt <- getDefTable
case lookupTag sue dt of
Just (Right td) -> return td
_ ->
typeError ni $ "unknown composite type: " ++ (render . pretty) sue
deepTypeAttrs :: (MonadCError m, MonadSymtab m) =>
Type -> m Attributes
deepTypeAttrs (DirectType (TyComp (CompTypeRef sue _ ni)) _ attrs) =
(attrs ++) `liftM` sueAttrs ni sue
deepTypeAttrs (DirectType (TyEnum (EnumTypeRef sue ni)) _ attrs) =
(attrs ++) `liftM` sueAttrs ni sue
deepTypeAttrs (DirectType _ _ attrs) = return attrs
deepTypeAttrs (PtrType t _ attrs) = (attrs ++) `liftM` deepTypeAttrs t
deepTypeAttrs (ArrayType t _ _ attrs) = (attrs ++) `liftM` deepTypeAttrs t
deepTypeAttrs (FunctionType (FunType t _ _) attrs) =
(attrs ++) `liftM` deepTypeAttrs t
deepTypeAttrs (FunctionType (FunTypeIncomplete t) attrs) =
(attrs ++) `liftM` deepTypeAttrs t
deepTypeAttrs (TypeDefType (TypeDefRef i _ ni) _ attrs) =
(attrs ++) `liftM` typeDefAttrs ni i
typeDefAttrs :: (MonadCError m, MonadSymtab m) =>
NodeInfo -> Ident -> m Attributes
typeDefAttrs ni i =
do dt <- getDefTable
case lookupIdent i dt of
Nothing -> astError ni $ "can't find typedef name: " ++ identToString i
Just (Left (TypeDef _ t attrs _)) -> (attrs ++) `liftM` deepTypeAttrs t
Just (Right _) -> astError ni $ "not a typedef name: " ++ identToString i
sueAttrs :: (MonadCError m, MonadSymtab m) =>
NodeInfo -> SUERef -> m Attributes
sueAttrs ni sue =
do dt <- getDefTable
case lookupTag sue dt of
Nothing -> astError ni $ "SUE not found: " ++ render (pretty sue)
Just (Left _) -> return []
Just (Right (CompDef (CompType _ _ _ attrs _))) -> return attrs
Just (Right (EnumDef (EnumType _ _ attrs _))) -> return attrs
|
jthornber/language-c-ejt
|
src/Language/C/Analysis/TypeCheck.hs
|
bsd-3-clause
| 19,649
| 0
| 21
| 5,388
| 6,745
| 3,287
| 3,458
| 379
| 17
|
module Widgets.Settings where
data Setting = Cast | Reflection
deriving (Enum, Ord, Eq, Show)
|
Zielon/Bounce
|
src/Widgets/Settings.hs
|
bsd-3-clause
| 98
| 0
| 6
| 18
| 35
| 20
| 15
| 3
| 0
|
-- for TokParsing, MonadicParsing
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
-- {-# OPTIONS_GHC -Wall #-}
-- {-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-orphans #-}
-- {-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-- {-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module is part of Pire's parser.
--------------------------------------------------------------------
module Pire.Parser.Telescope
(
module Pire.Parser.Telescope
)
where
import Pire.Syntax.Telescope
import Pire.Syntax.Nm
import Pire.Syntax.Eps
-- import Pire.Syntax.Token
-- import Pire.Syntax.Constructor
-- import Pire.Syntax.Decl
-- import Pire.Syntax.Modules
-- import Pire.Syntax.ConstructorNames
import Pire.Syntax.Wildcard
import Pire.Syntax.Expr
-- import Pire.Syntax.MkVisible
import Pire.Syntax.Binder
-- import Pire.Utils
import Pire.Parser.Parser
import Pire.Parser.Basic
import Pire.Parser.Token
import Pire.Parser.VarWildcard
import Pire.Parser.Expr
-- import Pire.Parser.Layout
import Control.Applicative
-- MonadPlus
-- import Control.Monad.Error
#ifdef MIN_VERSION_GLASGOW_HASKELL
#if MIN_VERSION_GLASGOW_HASKELL(7,10,3,0)
-- ghc >= 7.10.3
-- import Control.Monad.Except
#else
-- older ghc versions, but MIN_VERSION_GLASGOW_HASKELL defined
#endif
#else
-- MIN_VERSION_GLASGOW_HASKELL not even defined yet (ghc <= 7.8.x)
-- import Control.Monad.Error
-- import Control.Monad
#endif
-- import Control.Monad.State
import Control.Monad.State.Strict
import Text.Parser.LookAhead
-- import Text.PrettyPrint (Doc)
-- import Text.PrettyPrint.HughesPJ (Doc)
-- import Text.PrettyPrint.ANSI.Leijen (Doc)
-- import qualified Data.Set as S
-- import qualified Data.Text as T (Text, pack)
import qualified Data.Text as T (Text)
-- examples, cf tests dir, lecture 4(?):
-- (m1:Nat)(m2:Nat)(Beautiful m1)(Beautiful m2)[n = plus m1 m2]
-- handled below as annot, (imp,) equal
telescope :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
) => m (Telescope T.Text T.Text)
telescope = do
bindings <- telebindings
return $ foldr id EmptyTele bindings where
telescope_ :: (TokenParsing m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
) => m (Telescope T.Text T.Text)
-- -- for starters...
-- telescope_ = return EmptyTele
telescope_ = do
-- bindings <- telebindings'
-- let bindings' = [b | (o, b, c) <- bindings]
-- return $ foldr id Empty' bindings' where
bindings <- telebindings_
-- let bindings' = [b | (o, b, c) <- bindings]
return $ foldr id EmptyTele bindings where
telebindings :: (TokenParsing m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m)
=> m [Telescope T.Text T.Text -> Telescope T.Text T.Text]
telebindings = many teleBinding
where
annot = do
(x,ty) <- try ((,) <$> varOrWildcard <*> (colon >> expr))
<|> ((,) <$> fresh wildcardName <*> expr)
return (Cons RuntimeP x ty)
imp = do
v <- varOrWildcard
_ <- colon
t <- expr
return (Cons ErasedP v t)
equal = do
v <- ide
reservedOp "="
t <- expr
return (Constraint (V v) t)
teleBinding =
( parens annot
<|> try (brackets imp)
<|> brackets equal) <?> "binding"
-- " (m1{-m-}:{-d-}Nat{-N-})"
annot_ :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
)=> m (Telescope T.Text T.Text -> Telescope T.Text T.Text)
annot_ = do
-- ( x : T )
-- ( _ : T )
try (do
po <- parenOpen_
-- v <- varOrWildcard_
(v, ws) <- varOrWildcard2_
cln <- colon_
ty <- expr_
pc <- parenClose_
return $ ConsInParens_ RuntimeP po (Nm1_ v ws) cln ty pc
)
<|>
-- ( T ) -> ( _ [:] T )
-- (Beautiful m2)
(do
po <- parenOpen_
-- v <- fresh wildcardName
i <- freshIdx
-- let v = mkVisible wildcardName i
-- PiP_ RuntimeP (Ann_ $ InferredAnnBnd_ (InvisibleBinder i) tyA) arr $ abstract1 nm $ tyB
ty <- expr_
pc <- parenClose_
-- no colon, thus SimpleConsInParens_
-- return $ ConsWildInParens_ RuntimeP po v ty pc
return $ ConsWildInParens_ RuntimeP po (InvisibleBinder i) ty pc
)
-- " [m1{-m-}:{-d-}Nat{-N-}]"
imp_ :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
)=> m (Telescope T.Text T.Text -> Telescope T.Text T.Text)
imp_ = do
bo <- bracketOpen_
-- v <- varOrWildcard_
(v, ws) <- varOrWildcard2_
c <- colon_
ty <- expr_
bc <- bracketClose_
return $ ConsInBrackets_ ErasedP bo (Nm1_ v ws) c ty bc
-- [n = plus m1 m2]
equal_ :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
) => m (Telescope T.Text T.Text -> Telescope T.Text T.Text)
equal_ = do
bo <- bracketOpen_
v <- var_
e <- eq_
t <- expr_
bc <- bracketClose_
return $ Constraint_ bo v e t bc
teleBinding_ :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m
) => m (Telescope T.Text T.Text -> Telescope T.Text T.Text)
teleBinding_ =
annot_
<|> try imp_
<|> equal_ <?> "binding"
telebindings_ :: (TokenParsing m
-- , Fresh m
, LookAheadParsing m
, DeltaParsing m
, MonadState PiState m)
=> m [Telescope T.Text T.Text -> Telescope T.Text T.Text]
telebindings_ = many teleBinding_
|
reuleaux/pire
|
src/Pire/Parser/Telescope.hs
|
bsd-3-clause
| 6,543
| 0
| 15
| 1,949
| 1,224
| 655
| 569
| 120
| 1
|
module ImportDict2 where
data A a = MkA a deriving Show
y = x
x = 3
|
roberth/uu-helium
|
test/correct/ImportDict2.hs
|
gpl-3.0
| 69
| 0
| 6
| 18
| 27
| 17
| 10
| 4
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>Support for the Open API Specification | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/openapi/src/main/javahelp/org/zaproxy/zap/extension/openapi/resources/help_hr_HR/helpset_hr_HR.hs
|
apache-2.0
| 1,000
| 80
| 66
| 164
| 423
| 214
| 209
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns#-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -ddump-splices #-}
import Test.Hspec
import Test.HUnit ((@?=))
import Data.Text (Text, pack, unpack, singleton)
import Yesod.Routes.Class hiding (Route)
import qualified Yesod.Routes.Class as YRC
import Yesod.Routes.Parse (parseRoutesNoCheck, parseTypeTree, TypeTree (..))
import Yesod.Routes.Overlap (findOverlapNames)
import Yesod.Routes.TH hiding (Dispatch)
import Language.Haskell.TH.Syntax
import Hierarchy
import qualified Data.ByteString.Char8 as S8
import qualified Data.Set as Set
data MyApp = MyApp
data MySub = MySub
instance RenderRoute MySub where
data
Route
MySub = MySubRoute ([Text], [(Text, Text)])
deriving (Show, Eq, Read)
renderRoute (MySubRoute x) = x
instance ParseRoute MySub where
parseRoute = Just . MySubRoute
getMySub :: MyApp -> MySub
getMySub MyApp = MySub
data MySubParam = MySubParam Int
instance RenderRoute MySubParam where
data
Route
MySubParam = ParamRoute Char
deriving (Show, Eq, Read)
renderRoute (ParamRoute x) = ([singleton x], [])
instance ParseRoute MySubParam where
parseRoute ([unpack -> [x]], _) = Just $ ParamRoute x
parseRoute _ = Nothing
getMySubParam :: MyApp -> Int -> MySubParam
getMySubParam _ = MySubParam
do
texts <- [t|[Text]|]
let resLeaves = map ResourceLeaf
[ Resource "RootR" [] (Methods Nothing ["GET"]) ["foo", "bar"] True
, Resource "BlogPostR" [Static "blog", Dynamic $ ConT ''Text] (Methods Nothing ["GET", "POST"]) [] True
, Resource "WikiR" [Static "wiki"] (Methods (Just texts) []) [] True
, Resource "SubsiteR" [Static "subsite"] (Subsite (ConT ''MySub) "getMySub") [] True
, Resource "SubparamR" [Static "subparam", Dynamic $ ConT ''Int] (Subsite (ConT ''MySubParam) "getMySubParam") [] True
]
resParent = ResourceParent
"ParentR"
True
[ Static "foo"
, Dynamic $ ConT ''Text
]
[ ResourceLeaf $ Resource "ChildR" [] (Methods Nothing ["GET"]) ["child"] True
]
ress = resParent : resLeaves
rrinst <- mkRenderRouteInstance [] (ConT ''MyApp) ress
rainst <- mkRouteAttrsInstance [] (ConT ''MyApp) ress
prinst <- mkParseRouteInstance [] (ConT ''MyApp) ress
dispatch <- mkDispatchClause MkDispatchSettings
{ mdsRunHandler = [|runHandler|]
, mdsSubDispatcher = [|subDispatch dispatcher|]
, mdsGetPathInfo = [|fst|]
, mdsMethod = [|snd|]
, mdsSetPathInfo = [|\p (_, m) -> (p, m)|]
, mds404 = [|pack "404"|]
, mds405 = [|pack "405"|]
, mdsGetHandler = defaultGetHandler
, mdsUnwrapper = return
} ress
return
#if MIN_VERSION_template_haskell(2,11,0)
$ InstanceD Nothing
#else
$ InstanceD
#endif
[]
(ConT ''Dispatcher
`AppT` ConT ''MyApp
`AppT` ConT ''MyApp)
[FunD (mkName "dispatcher") [dispatch]]
: prinst
: rainst
: rrinst
instance Dispatcher MySub master where
dispatcher env (pieces, _method) =
( pack $ "subsite: " ++ show pieces
, Just $ envToMaster env route
)
where
route = MySubRoute (pieces, [])
instance Dispatcher MySubParam master where
dispatcher env (pieces, _method) =
case map unpack pieces of
[[c]] ->
let route = ParamRoute c
toMaster = envToMaster env
MySubParam i = envSub env
in ( pack $ "subparam " ++ show i ++ ' ' : [c]
, Just $ toMaster route
)
_ -> (pack "404", Nothing)
{-
thDispatchAlias
:: (master ~ MyApp, sub ~ MyApp, handler ~ String, app ~ (String, Maybe (YRC.Route MyApp)))
=> master
-> sub
-> (YRC.Route sub -> YRC.Route master)
-> app -- ^ 404 page
-> handler -- ^ 405 page
-> Text -- ^ method
-> [Text]
-> app
--thDispatchAlias = thDispatch
thDispatchAlias master sub toMaster app404 handler405 method0 pieces0 =
case dispatch pieces0 of
Just f -> f master sub toMaster app404 handler405 method0
Nothing -> app404
where
dispatch = toDispatch
[ Route [] False $ \pieces ->
case pieces of
[] -> do
Just $ \master' sub' toMaster' _app404' handler405' method ->
let handler =
case Map.lookup method methodsRootR of
Just f -> f
Nothing -> handler405'
in runHandler handler master' sub' RootR toMaster'
_ -> error "Invariant violated"
, Route [D.Static "blog", D.Dynamic] False $ \pieces ->
case pieces of
[_, x2] -> do
y2 <- fromPathPiece x2
Just $ \master' sub' toMaster' _app404' handler405' method ->
let handler =
case Map.lookup method methodsBlogPostR of
Just f -> f y2
Nothing -> handler405'
in runHandler handler master' sub' (BlogPostR y2) toMaster'
_ -> error "Invariant violated"
, Route [D.Static "wiki"] True $ \pieces ->
case pieces of
_:x2 -> do
y2 <- fromPathMultiPiece x2
Just $ \master' sub' toMaster' _app404' _handler405' _method ->
let handler = handleWikiR y2
in runHandler handler master' sub' (WikiR y2) toMaster'
_ -> error "Invariant violated"
, Route [D.Static "subsite"] True $ \pieces ->
case pieces of
_:x2 -> do
Just $ \master' sub' toMaster' app404' handler405' method ->
dispatcher master' (getMySub sub') (toMaster' . SubsiteR) app404' handler405' method x2
_ -> error "Invariant violated"
, Route [D.Static "subparam", D.Dynamic] True $ \pieces ->
case pieces of
_:x2:x3 -> do
y2 <- fromPathPiece x2
Just $ \master' sub' toMaster' app404' handler405' method ->
dispatcher master' (getMySubParam sub' y2) (toMaster' . SubparamR y2) app404' handler405' method x3
_ -> error "Invariant violated"
]
methodsRootR = Map.fromList [("GET", getRootR)]
methodsBlogPostR = Map.fromList [("GET", getBlogPostR), ("POST", postBlogPostR)]
-}
main :: IO ()
main = hspec $ do
describe "RenderRoute instance" $ do
it "renders root correctly" $ renderRoute RootR @?= ([], [])
it "renders blog post correctly" $ renderRoute (BlogPostR $ pack "foo") @?= (map pack ["blog", "foo"], [])
it "renders wiki correctly" $ renderRoute (WikiR $ map pack ["foo", "bar"]) @?= (map pack ["wiki", "foo", "bar"], [])
it "renders subsite correctly" $ renderRoute (SubsiteR $ MySubRoute (map pack ["foo", "bar"], [(pack "baz", pack "bin")]))
@?= (map pack ["subsite", "foo", "bar"], [(pack "baz", pack "bin")])
it "renders subsite param correctly" $ renderRoute (SubparamR 6 $ ParamRoute 'c')
@?= (map pack ["subparam", "6", "c"], [])
describe "thDispatch" $ do
let disp m ps = dispatcher
(Env
{ envToMaster = id
, envMaster = MyApp
, envSub = MyApp
})
(map pack ps, S8.pack m)
it "routes to root" $ disp "GET" [] @?= (pack "this is the root", Just RootR)
it "POST root is 405" $ disp "POST" [] @?= (pack "405", Just RootR)
it "invalid page is a 404" $ disp "GET" ["not-found"] @?= (pack "404", Nothing :: Maybe (YRC.Route MyApp))
it "routes to blog post" $ disp "GET" ["blog", "somepost"]
@?= (pack "some blog post: somepost", Just $ BlogPostR $ pack "somepost")
it "routes to blog post, POST method" $ disp "POST" ["blog", "somepost2"]
@?= (pack "POST some blog post: somepost2", Just $ BlogPostR $ pack "somepost2")
it "routes to wiki" $ disp "DELETE" ["wiki", "foo", "bar"]
@?= (pack "the wiki: [\"foo\",\"bar\"]", Just $ WikiR $ map pack ["foo", "bar"])
it "routes to subsite" $ disp "PUT" ["subsite", "baz"]
@?= (pack "subsite: [\"baz\"]", Just $ SubsiteR $ MySubRoute ([pack "baz"], []))
it "routes to subparam" $ disp "PUT" ["subparam", "6", "q"]
@?= (pack "subparam 6 q", Just $ SubparamR 6 $ ParamRoute 'q')
describe "parsing" $ do
it "subsites work" $ do
parseRoute ([pack "subsite", pack "foo"], [(pack "bar", pack "baz")]) @?=
Just (SubsiteR $ MySubRoute ([pack "foo"], [(pack "bar", pack "baz")]))
describe "overlap checking" $ do
it "catches overlapping statics" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/foo Foo2
|]
findOverlapNames routes @?= [("Foo1", "Foo2")]
it "catches overlapping dynamics" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/#Int Foo1
/#String Foo2
|]
findOverlapNames routes @?= [("Foo1", "Foo2")]
it "catches overlapping statics and dynamics" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/#String Foo2
|]
findOverlapNames routes @?= [("Foo1", "Foo2")]
it "catches overlapping multi" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/##*Strings Foo2
|]
findOverlapNames routes @?= [("Foo1", "Foo2")]
it "catches overlapping subsite" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/foo Foo2 Subsite getSubsite
|]
findOverlapNames routes @?= [("Foo1", "Foo2")]
it "no false positives" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/bar/#String Foo2
|]
findOverlapNames routes @?= []
it "obeys ignore rules" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/#!String Foo2
/!foo Foo3
|]
findOverlapNames routes @?= []
it "obeys multipiece ignore rules #779" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
/+![String] Foo2
|]
findOverlapNames routes @?= []
it "ignore rules for entire route #779" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/foo Foo1
!/+[String] Foo2
!/#String Foo3
!/foo Foo4
|]
findOverlapNames routes @?= []
it "ignore rules for hierarchy" $ do
let routes :: [ResourceTree String]
routes = [parseRoutesNoCheck|
/+[String] Foo1
!/foo Foo2:
/foo Foo3
/foo Foo4:
/!#foo Foo5
|]
findOverlapNames routes @?= []
it "proper boolean logic" $ do
let routes = [parseRoutesNoCheck|
/foo/bar Foo1
/foo/baz Foo2
/bar/baz Foo3
|]
findOverlapNames routes @?= []
describe "routeAttrs" $ do
it "works" $ do
routeAttrs RootR @?= Set.fromList [pack "foo", pack "bar"]
it "hierarchy" $ do
routeAttrs (ParentR (pack "ignored") ChildR) @?= Set.singleton (pack "child")
hierarchy
describe "parseRouteType" $ do
let success s t = it s $ parseTypeTree s @?= Just t
failure s = it s $ parseTypeTree s @?= Nothing
success "Int" $ TTTerm "Int"
success "(Int)" $ TTTerm "Int"
failure "(Int"
failure "(Int))"
failure "[Int"
failure "[Int]]"
success "[Int]" $ TTList $ TTTerm "Int"
success "Foo-Bar" $ TTApp (TTTerm "Foo") (TTTerm "Bar")
success "Foo-Bar-Baz" $ TTApp (TTTerm "Foo") (TTTerm "Bar") `TTApp` TTTerm "Baz"
success "Foo Bar" $ TTApp (TTTerm "Foo") (TTTerm "Bar")
success "Foo Bar Baz" $ TTApp (TTTerm "Foo") (TTTerm "Bar") `TTApp` TTTerm "Baz"
getRootR :: Text
getRootR = pack "this is the root"
getBlogPostR :: Text -> String
getBlogPostR t = "some blog post: " ++ unpack t
postBlogPostR :: Text -> Text
postBlogPostR t = pack $ "POST some blog post: " ++ unpack t
handleWikiR :: [Text] -> String
handleWikiR ts = "the wiki: " ++ show ts
getChildR :: Text -> Text
getChildR = id
|
s9gf4ult/yesod
|
yesod-core/test/RouteSpec.hs
|
mit
| 13,258
| 0
| 22
| 4,237
| 2,993
| 1,543
| 1,450
| 212
| 1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[StgSyn]{Shared term graph (STG) syntax for spineless-tagless code generation}
This data type represents programs just before code generation (conversion to
@Cmm@): basically, what we have is a stylised form of @CoreSyntax@, the style
being one that happens to be ideally suited to spineless tagless code
generation.
-}
{-# LANGUAGE CPP #-}
module StgSyn (
GenStgArg(..),
GenStgBinding(..), GenStgExpr(..), GenStgRhs(..),
GenStgAlt, AltType(..),
UpdateFlag(..), isUpdatable,
StgBinderInfo,
noBinderInfo, stgSatOcc, stgUnsatOcc, satCallsOnly,
combineStgBinderInfo,
-- a set of synonyms for the most common (only :-) parameterisation
StgArg,
StgBinding, StgExpr, StgRhs, StgAlt,
-- StgOp
StgOp(..),
-- utils
topStgBindHasCafRefs, stgArgHasCafRefs, stgRhsArity,
isDllConApp,
stgArgType,
stripStgTicksTop,
pprStgBinding, pprStgBindings
) where
#include "HsVersions.h"
import CoreSyn ( AltCon, Tickish )
import CostCentre ( CostCentreStack )
import Data.List ( intersperse )
import DataCon
import DynFlags
import FastString
import ForeignCall ( ForeignCall )
import Id
import IdInfo ( mayHaveCafRefs )
import Literal ( Literal, literalType )
import Module ( Module )
import Outputable
import Packages ( isDllName )
import Platform
import PprCore ( {- instances -} )
import PrimOp ( PrimOp, PrimCall )
import TyCon ( PrimRep(..), TyCon )
import Type ( Type )
import RepType ( typePrimRep )
import Unique ( Unique )
import Util
{-
************************************************************************
* *
\subsection{@GenStgBinding@}
* *
************************************************************************
As usual, expressions are interesting; other things are boring. Here
are the boring things [except note the @GenStgRhs@], parameterised
with respect to binder and occurrence information (just as in
@CoreSyn@):
-}
data GenStgBinding bndr occ
= StgNonRec bndr (GenStgRhs bndr occ)
| StgRec [(bndr, GenStgRhs bndr occ)]
{-
************************************************************************
* *
\subsection{@GenStgArg@}
* *
************************************************************************
-}
data GenStgArg occ
= StgVarArg occ
| StgLitArg Literal
-- | Does this constructor application refer to
-- anything in a different *Windows* DLL?
-- If so, we can't allocate it statically
isDllConApp :: DynFlags -> Module -> DataCon -> [StgArg] -> Bool
isDllConApp dflags this_mod con args
| platformOS (targetPlatform dflags) == OSMinGW32
= isDllName dflags this_pkg this_mod (dataConName con) || any is_dll_arg args
| otherwise = False
where
-- NB: typePrimRep is legit because any free variables won't have
-- unlifted type (there are no unlifted things at top level)
is_dll_arg :: StgArg -> Bool
is_dll_arg (StgVarArg v) = isAddrRep (typePrimRep (idType v))
&& isDllName dflags this_pkg this_mod (idName v)
is_dll_arg _ = False
this_pkg = thisPackage dflags
-- True of machine addresses; these are the things that don't
-- work across DLLs. The key point here is that VoidRep comes
-- out False, so that a top level nullary GADT constructor is
-- False for isDllConApp
-- data T a where
-- T1 :: T Int
-- gives
-- T1 :: forall a. (a~Int) -> T a
-- and hence the top-level binding
-- $WT1 :: T Int
-- $WT1 = T1 Int (Coercion (Refl Int))
-- The coercion argument here gets VoidRep
isAddrRep :: PrimRep -> Bool
isAddrRep AddrRep = True
isAddrRep PtrRep = True
isAddrRep _ = False
-- | Type of an @StgArg@
--
-- Very half baked because we have lost the type arguments.
stgArgType :: StgArg -> Type
stgArgType (StgVarArg v) = idType v
stgArgType (StgLitArg lit) = literalType lit
-- | Strip ticks of a given type from an STG expression
stripStgTicksTop :: (Tickish Id -> Bool) -> StgExpr -> ([Tickish Id], StgExpr)
stripStgTicksTop p = go []
where go ts (StgTick t e) | p t = go (t:ts) e
go ts other = (reverse ts, other)
{-
************************************************************************
* *
\subsection{STG expressions}
* *
************************************************************************
The @GenStgExpr@ data type is parameterised on binder and occurrence
info, as before.
************************************************************************
* *
\subsubsection{@GenStgExpr@ application}
* *
************************************************************************
An application is of a function to a list of atoms [not expressions].
Operationally, we want to push the arguments on the stack and call the
function. (If the arguments were expressions, we would have to build
their closures first.)
There is no constructor for a lone variable; it would appear as
@StgApp var []@.
-}
data GenStgExpr bndr occ
= StgApp
occ -- function
[GenStgArg occ] -- arguments; may be empty
{-
************************************************************************
* *
\subsubsection{@StgConApp@ and @StgPrimApp@---saturated applications}
* *
************************************************************************
There are specialised forms of application, for constructors,
primitives, and literals.
-}
| StgLit Literal
-- StgConApp is vital for returning unboxed tuples or sums
-- which can't be let-bound first
| StgConApp DataCon
[GenStgArg occ] -- Saturated
[Type] -- See Note [Types in StgConApp] in UnariseStg
| StgOpApp StgOp -- Primitive op or foreign call
[GenStgArg occ] -- Saturated.
Type -- Result type
-- We need to know this so that we can
-- assign result registers
{-
************************************************************************
* *
\subsubsection{@StgLam@}
* *
************************************************************************
StgLam is used *only* during CoreToStg's work. Before CoreToStg has
finished it encodes (\x -> e) as (let f = \x -> e in f)
-}
| StgLam
[bndr]
StgExpr -- Body of lambda
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: case-expressions}
* *
************************************************************************
This has the same boxed/unboxed business as Core case expressions.
-}
| StgCase
(GenStgExpr bndr occ)
-- the thing to examine
bndr -- binds the result of evaluating the scrutinee
AltType
[GenStgAlt bndr occ]
-- The DEFAULT case is always *first*
-- if it is there at all
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: @let(rec)@-expressions}
* *
************************************************************************
The various forms of let(rec)-expression encode most of the
interesting things we want to do.
\begin{enumerate}
\item
\begin{verbatim}
let-closure x = [free-vars] [args] expr
in e
\end{verbatim}
is equivalent to
\begin{verbatim}
let x = (\free-vars -> \args -> expr) free-vars
\end{verbatim}
\tr{args} may be empty (and is for most closures). It isn't under
circumstances like this:
\begin{verbatim}
let x = (\y -> y+z)
\end{verbatim}
This gets mangled to
\begin{verbatim}
let-closure x = [z] [y] (y+z)
\end{verbatim}
The idea is that we compile code for @(y+z)@ in an environment in which
@z@ is bound to an offset from \tr{Node}, and @y@ is bound to an
offset from the stack pointer.
(A let-closure is an @StgLet@ with a @StgRhsClosure@ RHS.)
\item
\begin{verbatim}
let-constructor x = Constructor [args]
in e
\end{verbatim}
(A let-constructor is an @StgLet@ with a @StgRhsCon@ RHS.)
\item
Letrec-expressions are essentially the same deal as
let-closure/let-constructor, so we use a common structure and
distinguish between them with an @is_recursive@ boolean flag.
\item
\begin{verbatim}
let-unboxed u = an arbitrary arithmetic expression in unboxed values
in e
\end{verbatim}
All the stuff on the RHS must be fully evaluated.
No function calls either!
(We've backed away from this toward case-expressions with
suitably-magical alts ...)
\item
~[Advanced stuff here! Not to start with, but makes pattern matching
generate more efficient code.]
\begin{verbatim}
let-escapes-not fail = expr
in e'
\end{verbatim}
Here the idea is that @e'@ guarantees not to put @fail@ in a data structure,
or pass it to another function. All @e'@ will ever do is tail-call @fail@.
Rather than build a closure for @fail@, all we need do is to record the stack
level at the moment of the @let-escapes-not@; then entering @fail@ is just
a matter of adjusting the stack pointer back down to that point and entering
the code for it.
Another example:
\begin{verbatim}
f x y = let z = huge-expression in
if y==1 then z else
if y==2 then z else
1
\end{verbatim}
(A let-escapes-not is an @StgLetNoEscape@.)
\item
We may eventually want:
\begin{verbatim}
let-literal x = Literal
in e
\end{verbatim}
\end{enumerate}
And so the code for let(rec)-things:
-}
| StgLet
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
| StgLetNoEscape
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
{-
%************************************************************************
%* *
\subsubsection{@GenStgExpr@: @hpc@, @scc@ and other debug annotations}
%* *
%************************************************************************
Finally for @hpc@ expressions we introduce a new STG construct.
-}
| StgTick
(Tickish bndr)
(GenStgExpr bndr occ) -- sub expression
-- END of GenStgExpr
{-
************************************************************************
* *
\subsection{STG right-hand sides}
* *
************************************************************************
Here's the rest of the interesting stuff for @StgLet@s; the first
flavour is for closures:
-}
data GenStgRhs bndr occ
= StgRhsClosure
CostCentreStack -- CCS to be attached (default is CurrentCCS)
StgBinderInfo -- Info about how this binder is used (see below)
[occ] -- non-global free vars; a list, rather than
-- a set, because order is important
!UpdateFlag -- ReEntrant | Updatable | SingleEntry
[bndr] -- arguments; if empty, then not a function;
-- as above, order is important.
(GenStgExpr bndr occ) -- body
{-
An example may be in order. Consider:
\begin{verbatim}
let t = \x -> \y -> ... x ... y ... p ... q in e
\end{verbatim}
Pulling out the free vars and stylising somewhat, we get the equivalent:
\begin{verbatim}
let t = (\[p,q] -> \[x,y] -> ... x ... y ... p ...q) p q
\end{verbatim}
Stg-operationally, the @[x,y]@ are on the stack, the @[p,q]@ are
offsets from @Node@ into the closure, and the code ptr for the closure
will be exactly that in parentheses above.
The second flavour of right-hand-side is for constructors (simple but important):
-}
| StgRhsCon
CostCentreStack -- CCS to be attached (default is CurrentCCS).
-- Top-level (static) ones will end up with
-- DontCareCCS, because we don't count static
-- data in heap profiles, and we don't set CCCS
-- from static closure.
DataCon -- Constructor. Never an unboxed tuple or sum, as those
-- are not allocated.
[GenStgArg occ] -- Args
stgRhsArity :: StgRhs -> Int
stgRhsArity (StgRhsClosure _ _ _ _ bndrs _)
= ASSERT( all isId bndrs ) length bndrs
-- The arity never includes type parameters, but they should have gone by now
stgRhsArity (StgRhsCon _ _ _) = 0
-- Note [CAF consistency]
-- ~~~~~~~~~~~~~~~~~~~~~~
--
-- `topStgBindHasCafRefs` is only used by an assert (`consistentCafInfo` in
-- `CoreToStg`) to make sure CAF-ness predicted by `TidyPgm` is consistent with
-- reality.
--
-- Specifically, if the RHS mentions any Id that itself is marked
-- `MayHaveCafRefs`; or if the binding is a top-level updateable thunk; then the
-- `Id` for the binding should be marked `MayHaveCafRefs`. The potential trouble
-- is that `TidyPgm` computed the CAF info on the `Id` but some transformations
-- have taken place since then.
topStgBindHasCafRefs :: GenStgBinding bndr Id -> Bool
topStgBindHasCafRefs (StgNonRec _ rhs)
= topRhsHasCafRefs rhs
topStgBindHasCafRefs (StgRec binds)
= any topRhsHasCafRefs (map snd binds)
topRhsHasCafRefs :: GenStgRhs bndr Id -> Bool
topRhsHasCafRefs (StgRhsClosure _ _ _ upd _ body)
= -- See Note [CAF consistency]
isUpdatable upd || exprHasCafRefs body
topRhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
exprHasCafRefs :: GenStgExpr bndr Id -> Bool
exprHasCafRefs (StgApp f args)
= stgIdHasCafRefs f || any stgArgHasCafRefs args
exprHasCafRefs StgLit{}
= False
exprHasCafRefs (StgConApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgOpApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgLam _ body)
= exprHasCafRefs body
exprHasCafRefs (StgCase scrt _ _ alts)
= exprHasCafRefs scrt || any altHasCafRefs alts
exprHasCafRefs (StgLet bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgLetNoEscape bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgTick _ expr)
= exprHasCafRefs expr
bindHasCafRefs :: GenStgBinding bndr Id -> Bool
bindHasCafRefs (StgNonRec _ rhs)
= rhsHasCafRefs rhs
bindHasCafRefs (StgRec binds)
= any rhsHasCafRefs (map snd binds)
rhsHasCafRefs :: GenStgRhs bndr Id -> Bool
rhsHasCafRefs (StgRhsClosure _ _ _ _ _ body)
= exprHasCafRefs body
rhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
altHasCafRefs :: GenStgAlt bndr Id -> Bool
altHasCafRefs (_, _, rhs) = exprHasCafRefs rhs
stgArgHasCafRefs :: GenStgArg Id -> Bool
stgArgHasCafRefs (StgVarArg id)
= stgIdHasCafRefs id
stgArgHasCafRefs _
= False
stgIdHasCafRefs :: Id -> Bool
stgIdHasCafRefs id =
-- We are looking for occurrences of an Id that is bound at top level, and may
-- have CAF refs. At this point (after TidyPgm) top-level Ids (whether
-- imported or defined in this module) are GlobalIds, so the test is easy.
isGlobalId id && mayHaveCafRefs (idCafInfo id)
-- Here's the @StgBinderInfo@ type, and its combining op:
data StgBinderInfo
= NoStgBinderInfo
| SatCallsOnly -- All occurrences are *saturated* *function* calls
-- This means we don't need to build an info table and
-- slow entry code for the thing
-- Thunks never get this value
noBinderInfo, stgUnsatOcc, stgSatOcc :: StgBinderInfo
noBinderInfo = NoStgBinderInfo
stgUnsatOcc = NoStgBinderInfo
stgSatOcc = SatCallsOnly
satCallsOnly :: StgBinderInfo -> Bool
satCallsOnly SatCallsOnly = True
satCallsOnly NoStgBinderInfo = False
combineStgBinderInfo :: StgBinderInfo -> StgBinderInfo -> StgBinderInfo
combineStgBinderInfo SatCallsOnly SatCallsOnly = SatCallsOnly
combineStgBinderInfo _ _ = NoStgBinderInfo
--------------
pp_binder_info :: StgBinderInfo -> SDoc
pp_binder_info NoStgBinderInfo = empty
pp_binder_info SatCallsOnly = text "sat-only"
{-
************************************************************************
* *
\subsection[Stg-case-alternatives]{STG case alternatives}
* *
************************************************************************
Very like in @CoreSyntax@ (except no type-world stuff).
The type constructor is guaranteed not to be abstract; that is, we can
see its representation. This is important because the code generator
uses it to determine return conventions etc. But it's not trivial
where there's a module loop involved, because some versions of a type
constructor might not have all the constructors visible. So
mkStgAlgAlts (in CoreToStg) ensures that it gets the TyCon from the
constructors or literals (which are guaranteed to have the Real McCoy)
rather than from the scrutinee type.
-}
type GenStgAlt bndr occ
= (AltCon, -- alts: data constructor,
[bndr], -- constructor's parameters,
GenStgExpr bndr occ) -- ...right-hand side.
data AltType
= PolyAlt -- Polymorphic (a type variable)
| MultiValAlt Int -- Multi value of this arity (unboxed tuple or sum)
| AlgAlt TyCon -- Algebraic data type; the AltCons will be DataAlts
| PrimAlt TyCon -- Primitive data type; the AltCons will be LitAlts
{-
************************************************************************
* *
\subsection[Stg]{The Plain STG parameterisation}
* *
************************************************************************
This happens to be the only one we use at the moment.
-}
type StgBinding = GenStgBinding Id Id
type StgArg = GenStgArg Id
type StgExpr = GenStgExpr Id Id
type StgRhs = GenStgRhs Id Id
type StgAlt = GenStgAlt Id Id
{-
************************************************************************
* *
\subsubsection[UpdateFlag-datatype]{@UpdateFlag@}
* *
************************************************************************
This is also used in @LambdaFormInfo@ in the @ClosureInfo@ module.
A @ReEntrant@ closure may be entered multiple times, but should not be
updated or blackholed. An @Updatable@ closure should be updated after
evaluation (and may be blackholed during evaluation). A @SingleEntry@
closure will only be entered once, and so need not be updated but may
safely be blackholed.
-}
data UpdateFlag = ReEntrant | Updatable | SingleEntry
instance Outputable UpdateFlag where
ppr u = char $ case u of
ReEntrant -> 'r'
Updatable -> 'u'
SingleEntry -> 's'
isUpdatable :: UpdateFlag -> Bool
isUpdatable ReEntrant = False
isUpdatable SingleEntry = False
isUpdatable Updatable = True
{-
************************************************************************
* *
\subsubsection{StgOp}
* *
************************************************************************
An StgOp allows us to group together PrimOps and ForeignCalls.
It's quite useful to move these around together, notably
in StgOpApp and COpStmt.
-}
data StgOp
= StgPrimOp PrimOp
| StgPrimCallOp PrimCall
| StgFCallOp ForeignCall Unique
-- The Unique is occasionally needed by the C pretty-printer
-- (which lacks a unique supply), notably when generating a
-- typedef for foreign-export-dynamic
{-
************************************************************************
* *
\subsection[Stg-pretty-printing]{Pretty-printing}
* *
************************************************************************
Robin Popplestone asked for semi-colon separators on STG binds; here's
hoping he likes terminators instead... Ditto for case alternatives.
-}
pprGenStgBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgBinding bndr bdee -> SDoc
pprGenStgBinding (StgNonRec bndr rhs)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr rhs <> semi)
pprGenStgBinding (StgRec pairs)
= vcat $ ifPprDebug (text "{- StgRec (begin) -}") :
map (ppr_bind) pairs ++ [ifPprDebug (text "{- StgRec (end) -}")]
where
ppr_bind (bndr, expr)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr expr <> semi)
pprStgBinding :: StgBinding -> SDoc
pprStgBinding bind = pprGenStgBinding bind
pprStgBindings :: [StgBinding] -> SDoc
pprStgBindings binds = vcat $ intersperse blankLine (map pprGenStgBinding binds)
instance (Outputable bdee) => Outputable (GenStgArg bdee) where
ppr = pprStgArg
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgBinding bndr bdee) where
ppr = pprGenStgBinding
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgExpr bndr bdee) where
ppr = pprStgExpr
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgRhs bndr bdee) where
ppr rhs = pprStgRhs rhs
pprStgArg :: (Outputable bdee) => GenStgArg bdee -> SDoc
pprStgArg (StgVarArg var) = ppr var
pprStgArg (StgLitArg con) = ppr con
pprStgExpr :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgExpr bndr bdee -> SDoc
-- special case
pprStgExpr (StgLit lit) = ppr lit
-- general case
pprStgExpr (StgApp func args)
= hang (ppr func) 4 (sep (map (ppr) args))
pprStgExpr (StgConApp con args _)
= hsep [ ppr con, brackets (interppSP args) ]
pprStgExpr (StgOpApp op args _)
= hsep [ pprStgOp op, brackets (interppSP args)]
pprStgExpr (StgLam bndrs body)
= sep [ char '\\' <+> ppr_list (map (pprBndr LambdaBind) bndrs)
<+> text "->",
pprStgExpr body ]
where ppr_list = brackets . fsep . punctuate comma
-- special case: let v = <very specific thing>
-- in
-- let ...
-- in
-- ...
--
-- Very special! Suspicious! (SLPJ)
{-
pprStgExpr (StgLet srt (StgNonRec bndr (StgRhsClosure cc bi free_vars upd_flag args rhs))
expr@(StgLet _ _))
= ($$)
(hang (hcat [text "let { ", ppr bndr, ptext (sLit " = "),
ppr cc,
pp_binder_info bi,
text " [", ifPprDebug (interppSP free_vars), ptext (sLit "] \\"),
ppr upd_flag, text " [",
interppSP args, char ']'])
8 (sep [hsep [ppr rhs, text "} in"]]))
(ppr expr)
-}
-- special case: let ... in let ...
pprStgExpr (StgLet bind expr@(StgLet _ _))
= ($$)
(sep [hang (text "let {")
2 (hsep [pprGenStgBinding bind, text "} in"])])
(ppr expr)
-- general case
pprStgExpr (StgLet bind expr)
= sep [hang (text "let {") 2 (pprGenStgBinding bind),
hang (text "} in ") 2 (ppr expr)]
pprStgExpr (StgLetNoEscape bind expr)
= sep [hang (text "let-no-escape {")
2 (pprGenStgBinding bind),
hang (text "} in ")
2 (ppr expr)]
pprStgExpr (StgTick tickish expr)
= sdocWithDynFlags $ \dflags ->
if gopt Opt_PprShowTicks dflags
then sep [ ppr tickish, pprStgExpr expr ]
else pprStgExpr expr
pprStgExpr (StgCase expr bndr alt_type alts)
= sep [sep [text "case",
nest 4 (hsep [pprStgExpr expr,
ifPprDebug (dcolon <+> ppr alt_type)]),
text "of", pprBndr CaseBind bndr, char '{'],
nest 2 (vcat (map pprStgAlt alts)),
char '}']
pprStgAlt :: (OutputableBndr bndr, Outputable occ, Ord occ)
=> GenStgAlt bndr occ -> SDoc
pprStgAlt (con, params, expr)
= hang (hsep [ppr con, sep (map (pprBndr CasePatBind) params), text "->"])
4 (ppr expr <> semi)
pprStgOp :: StgOp -> SDoc
pprStgOp (StgPrimOp op) = ppr op
pprStgOp (StgPrimCallOp op)= ppr op
pprStgOp (StgFCallOp op _) = ppr op
instance Outputable AltType where
ppr PolyAlt = text "Polymorphic"
ppr (MultiValAlt n) = text "MultiAlt" <+> ppr n
ppr (AlgAlt tc) = text "Alg" <+> ppr tc
ppr (PrimAlt tc) = text "Prim" <+> ppr tc
pprStgRhs :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgRhs bndr bdee -> SDoc
-- special case
pprStgRhs (StgRhsClosure cc bi [free_var] upd_flag [{-no args-}] (StgApp func []))
= hsep [ ppr cc,
pp_binder_info bi,
brackets (ifPprDebug (ppr free_var)),
text " \\", ppr upd_flag, ptext (sLit " [] "), ppr func ]
-- general case
pprStgRhs (StgRhsClosure cc bi free_vars upd_flag args body)
= sdocWithDynFlags $ \dflags ->
hang (hsep [if gopt Opt_SccProfilingOn dflags then ppr cc else empty,
pp_binder_info bi,
ifPprDebug (brackets (interppSP free_vars)),
char '\\' <> ppr upd_flag, brackets (interppSP args)])
4 (ppr body)
pprStgRhs (StgRhsCon cc con args)
= hcat [ ppr cc,
space, ppr con, text "! ", brackets (interppSP args)]
|
mettekou/ghc
|
compiler/stgSyn/StgSyn.hs
|
bsd-3-clause
| 26,796
| 0
| 16
| 7,677
| 3,655
| 1,951
| 1,704
| 304
| 2
|
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
module DiskSync
( DiskSync
, ActiveTask (..)
, AppContext
, createAppContext
, getAppContext
, getActiveTasks
, newActiveTask
, addActiveTask
, rmActiveTask
, findActiveTask
, progressActiveTask
, resultActiveTask
, admctlActiveTask
, serverCksumActiveTask
, liftRpc
, runDiskSync
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import qualified Control.Exception as E
import Control.Concurrent
import Control.Monad.Trans
import Data.Maybe
import Rpc
import Errors
import Types
import VhdSync
data AppState = AppState
{ appActiveTasks :: [ActiveTask] }
type AppContext = MVar AppState
createAppContext :: IO AppContext
createAppContext = newMVar emptyAppState
data ActiveTask = ActiveTask {
atImage :: Image
, atDisk :: Disk
, atSync :: SyncProcess
, atProgress :: Maybe (SyncStage, SyncProgress)
, atResult :: Maybe (SyncResultDetails)
, atAdmctl :: Maybe (SyncAdmctlDetails)
, atSrvcksum :: Maybe (SyncSrvcksumDetails)
}
-- the primary application monad, on top of rpc one
newtype DiskSync a =
DiskSync (ReaderT AppContext (StateT AppState Rpc) a)
deriving (Monad,
MonadReader AppContext,
MonadState AppState,
MonadError DiskSyncError,
MonadIO)
runDiskSync :: AppContext -> DiskSync a -> Rpc a
runDiskSync ctx (DiskSync sm) =
do rpc_ctx <- rpcGetContext
-- have to be careful to not deadlock on the mvar if exception ensures, hence
-- the crazy usage of modifyMVar
-- FIXME: this is terrible though with the ugly tunneling of error
status <- liftIO . modifyMVar ctx $ \s0 -> do
status <- rpc rpc_ctx $ runStateT (runReaderT sm ctx) s0
case status of
Left err -> return (s0, Left err)
Right (r, s1) -> return (s1, Right r)
case status of
Left err -> throwError err
Right r -> return r
instance Functor DiskSync where
fmap = liftM
instance Applicative DiskSync where
pure = return
(<*>) = ap
liftRpc :: Rpc a -> DiskSync a
liftRpc = DiskSync . lift . lift
emptyAppState :: AppState
emptyAppState = AppState []
getAppContext :: DiskSync AppContext
getAppContext = ask
newActiveTask :: Image -> Disk -> SyncProcess -> ActiveTask
newActiveTask image disk sync = ActiveTask image disk sync Nothing Nothing Nothing Nothing
getActiveTasks :: DiskSync [ActiveTask]
getActiveTasks = appActiveTasks <$> get
--- Does the new task have to be appended, or can we cons it t:s instead?
addActiveTask :: ActiveTask -> DiskSync ()
addActiveTask t = modify $ \s -> AppState (appActiveTasks s ++ [t])
findActiveTask :: DiskId-> DiskSync (Maybe ActiveTask)
findActiveTask disk_id =
fmap (listToMaybe . filter (equal disk_id) . appActiveTasks) get
equal disk_id at = diskId (atDisk at) == disk_id
modifyActiveTask :: DiskId -> (ActiveTask -> ActiveTask) -> DiskSync ()
modifyActiveTask disk_id f =
modify $ AppState . map mod . appActiveTasks
where
mod at | (diskId $ atDisk at) == disk_id = f at
| otherwise = at
progressActiveTask :: DiskId -> SyncStage -> SyncProgress -> DiskSync ()
progressActiveTask disk_id stage pr = modifyActiveTask disk_id $ \at -> at { atProgress = Just (stage,pr) }
resultActiveTask :: DiskId -> SyncResultDetails -> DiskSync ()
resultActiveTask disk_id result = modifyActiveTask disk_id $ \at -> at { atResult = Just result }
admctlActiveTask :: DiskId -> SyncAdmctlDetails -> DiskSync ()
admctlActiveTask disk_id admctl = modifyActiveTask disk_id $ \at -> at { atAdmctl = Just admctl }
serverCksumActiveTask :: DiskId -> SyncSrvcksumDetails -> DiskSync ()
serverCksumActiveTask disk_id srvcksum = modifyActiveTask disk_id $ \at -> at { atSrvcksum = Just srvcksum }
rmActiveTask :: DiskId -> DiskSync ()
rmActiveTask disk_id =
modify $ AppState . filter (not . equal disk_id) . appActiveTasks
|
jean-edouard/manager
|
disksync/DiskSync.hs
|
gpl-2.0
| 4,919
| 0
| 17
| 1,153
| 1,128
| 606
| 522
| 98
| 3
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
@DsMonad@: monadery used in desugaring
-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-} -- instance MonadThings is necessarily an orphan
module DsMonad (
DsM, mapM, mapAndUnzipM,
initDs, initDsTc, fixDs,
foldlM, foldrM, whenGOptM, unsetGOptM, unsetWOptM,
Applicative(..),(<$>),
newLocalName,
duplicateLocalDs, newSysLocalDs, newSysLocalsDs, newUniqueId,
newFailLocalDs, newPredVarDs,
getSrcSpanDs, putSrcSpanDs,
mkPrintUnqualifiedDs,
newUnique,
UniqSupply, newUniqueSupply,
getGhcModeDs, dsGetFamInstEnvs, dsGetStaticBindsVar,
dsLookupGlobal, dsLookupGlobalId, dsDPHBuiltin, dsLookupTyCon, dsLookupDataCon,
PArrBuiltin(..),
dsLookupDPHRdrEnv, dsLookupDPHRdrEnv_maybe,
dsInitPArrBuiltin,
DsMetaEnv, DsMetaVal(..), dsGetMetaEnv, dsLookupMetaEnv, dsExtendMetaEnv,
-- Warnings
DsWarning, warnDs, failWithDs, discardWarningsDs,
-- Data types
DsMatchContext(..),
EquationInfo(..), MatchResult(..), DsWrapper, idDsWrapper,
CanItFail(..), orFail
) where
import TcRnMonad
import FamInstEnv
import CoreSyn
import HsSyn
import TcIface
import LoadIface
import Finder
import PrelNames
import RdrName
import HscTypes
import Bag
import DataCon
import TyCon
import Id
import Module
import Outputable
import SrcLoc
import Type
import UniqSupply
import Name
import NameEnv
import DynFlags
import ErrUtils
import FastString
import Maybes
import GHC.Fingerprint
import Data.IORef
import Control.Monad
{-
************************************************************************
* *
Data types for the desugarer
* *
************************************************************************
-}
data DsMatchContext
= DsMatchContext (HsMatchContext Name) SrcSpan
deriving ()
data EquationInfo
= EqnInfo { eqn_pats :: [Pat Id], -- The patterns for an eqn
eqn_rhs :: MatchResult } -- What to do after match
instance Outputable EquationInfo where
ppr (EqnInfo pats _) = ppr pats
type DsWrapper = CoreExpr -> CoreExpr
idDsWrapper :: DsWrapper
idDsWrapper e = e
-- The semantics of (match vs (EqnInfo wrap pats rhs)) is the MatchResult
-- \fail. wrap (case vs of { pats -> rhs fail })
-- where vs are not bound by wrap
-- A MatchResult is an expression with a hole in it
data MatchResult
= MatchResult
CanItFail -- Tells whether the failure expression is used
(CoreExpr -> DsM CoreExpr)
-- Takes a expression to plug in at the
-- failure point(s). The expression should
-- be duplicatable!
data CanItFail = CanFail | CantFail
orFail :: CanItFail -> CanItFail -> CanItFail
orFail CantFail CantFail = CantFail
orFail _ _ = CanFail
{-
************************************************************************
* *
Monad functions
* *
************************************************************************
-}
-- Compatibility functions
fixDs :: (a -> DsM a) -> DsM a
fixDs = fixM
type DsWarning = (SrcSpan, SDoc)
-- Not quite the same as a WarnMsg, we have an SDoc here
-- and we'll do the print_unqual stuff later on to turn it
-- into a Doc.
initDs :: HscEnv
-> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv
-> DsM a
-> IO (Messages, Maybe a)
-- Print errors and warnings, if any arise
initDs hsc_env mod rdr_env type_env fam_inst_env thing_inside
= do { msg_var <- newIORef (emptyBag, emptyBag)
; static_binds_var <- newIORef []
; let dflags = hsc_dflags hsc_env
(ds_gbl_env, ds_lcl_env) = mkDsEnvs dflags mod rdr_env type_env
fam_inst_env msg_var
static_binds_var
; either_res <- initTcRnIf 'd' hsc_env ds_gbl_env ds_lcl_env $
loadDAP $
initDPHBuiltins $
tryM thing_inside -- Catch exceptions (= errors during desugaring)
-- Display any errors and warnings
-- Note: if -Werror is used, we don't signal an error here.
; msgs <- readIORef msg_var
; let final_res | errorsFound dflags msgs = Nothing
| otherwise = case either_res of
Right res -> Just res
Left exn -> pprPanic "initDs" (text (show exn))
-- The (Left exn) case happens when the thing_inside throws
-- a UserError exception. Then it should have put an error
-- message in msg_var, so we just discard the exception
; return (msgs, final_res)
}
where
-- Extend the global environment with a 'GlobalRdrEnv' containing the exported entities of
-- * 'Data.Array.Parallel' iff '-XParallelArrays' specified (see also 'checkLoadDAP').
-- * 'Data.Array.Parallel.Prim' iff '-fvectorise' specified.
loadDAP thing_inside
= do { dapEnv <- loadOneModule dATA_ARRAY_PARALLEL_NAME checkLoadDAP paErr
; dappEnv <- loadOneModule dATA_ARRAY_PARALLEL_PRIM_NAME (goptM Opt_Vectorise) veErr
; updGblEnv (\env -> env {ds_dph_env = dapEnv `plusOccEnv` dappEnv }) thing_inside
}
where
loadOneModule :: ModuleName -- the module to load
-> DsM Bool -- under which condition
-> MsgDoc -- error message if module not found
-> DsM GlobalRdrEnv -- empty if condition 'False'
loadOneModule modname check err
= do { doLoad <- check
; if not doLoad
then return emptyGlobalRdrEnv
else do {
; result <- liftIO $ findImportedModule hsc_env modname Nothing
; case result of
FoundModule h -> loadModule err (fr_mod h)
_ -> pprPgmError "Unable to use Data Parallel Haskell (DPH):" err
} }
paErr = ptext (sLit "To use ParallelArrays,") <+> specBackend $$ hint1 $$ hint2
veErr = ptext (sLit "To use -fvectorise,") <+> specBackend $$ hint1 $$ hint2
specBackend = ptext (sLit "you must specify a DPH backend package")
hint1 = ptext (sLit "Look for packages named 'dph-lifted-*' with 'ghc-pkg'")
hint2 = ptext (sLit "You may need to install them with 'cabal install dph-examples'")
initDPHBuiltins thing_inside
= do { -- If '-XParallelArrays' given, we populate the builtin table for desugaring those
; doInitBuiltins <- checkLoadDAP
; if doInitBuiltins
then dsInitPArrBuiltin thing_inside
else thing_inside
}
checkLoadDAP = do { paEnabled <- xoptM Opt_ParallelArrays
; return $ paEnabled &&
mod /= gHC_PARR' &&
moduleName mod /= dATA_ARRAY_PARALLEL_NAME
}
-- do not load 'Data.Array.Parallel' iff compiling 'base:GHC.PArr' or a
-- module called 'dATA_ARRAY_PARALLEL_NAME'; see also the comments at the top
-- of 'base:GHC.PArr' and 'Data.Array.Parallel' in the DPH libraries
initDsTc :: DsM a -> TcM a
initDsTc thing_inside
= do { this_mod <- getModule
; tcg_env <- getGblEnv
; msg_var <- getErrsVar
; dflags <- getDynFlags
; static_binds_var <- liftIO $ newIORef []
; let type_env = tcg_type_env tcg_env
rdr_env = tcg_rdr_env tcg_env
fam_inst_env = tcg_fam_inst_env tcg_env
ds_envs = mkDsEnvs dflags this_mod rdr_env type_env fam_inst_env
msg_var static_binds_var
; setEnvs ds_envs thing_inside
}
mkDsEnvs :: DynFlags -> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv
-> IORef Messages -> IORef [(Fingerprint, (Id, CoreExpr))]
-> (DsGblEnv, DsLclEnv)
mkDsEnvs dflags mod rdr_env type_env fam_inst_env msg_var static_binds_var
= let if_genv = IfGblEnv { if_rec_types = Just (mod, return type_env) }
if_lenv = mkIfLclEnv mod (ptext (sLit "GHC error in desugarer lookup in") <+> ppr mod)
gbl_env = DsGblEnv { ds_mod = mod
, ds_fam_inst_env = fam_inst_env
, ds_if_env = (if_genv, if_lenv)
, ds_unqual = mkPrintUnqualified dflags rdr_env
, ds_msgs = msg_var
, ds_dph_env = emptyGlobalRdrEnv
, ds_parr_bi = panic "DsMonad: uninitialised ds_parr_bi"
, ds_static_binds = static_binds_var
}
lcl_env = DsLclEnv { dsl_meta = emptyNameEnv
, dsl_loc = noSrcSpan
}
in (gbl_env, lcl_env)
-- Attempt to load the given module and return its exported entities if successful.
--
loadModule :: SDoc -> Module -> DsM GlobalRdrEnv
loadModule doc mod
= do { env <- getGblEnv
; setEnvs (ds_if_env env) $ do
{ iface <- loadInterface doc mod ImportBySystem
; case iface of
Failed err -> pprPanic "DsMonad.loadModule: failed to load" (err $$ doc)
Succeeded iface -> return $ mkGlobalRdrEnv . gresFromAvails prov . mi_exports $ iface
} }
where
prov = Just (ImpSpec { is_decl = imp_spec, is_item = ImpAll })
imp_spec = ImpDeclSpec { is_mod = name, is_qual = True,
is_dloc = wiredInSrcSpan, is_as = name }
name = moduleName mod
{-
************************************************************************
* *
Operations in the monad
* *
************************************************************************
And all this mysterious stuff is so we can occasionally reach out and
grab one or more names. @newLocalDs@ isn't exported---exported
functions are defined with it. The difference in name-strings makes
it easier to read debugging output.
-}
-- Make a new Id with the same print name, but different type, and new unique
newUniqueId :: Id -> Type -> DsM Id
newUniqueId id = mkSysLocalM (occNameFS (nameOccName (idName id)))
duplicateLocalDs :: Id -> DsM Id
duplicateLocalDs old_local
= do { uniq <- newUnique
; return (setIdUnique old_local uniq) }
newPredVarDs :: PredType -> DsM Var
newPredVarDs pred
= newSysLocalDs pred
newSysLocalDs, newFailLocalDs :: Type -> DsM Id
newSysLocalDs = mkSysLocalM (fsLit "ds")
newFailLocalDs = mkSysLocalM (fsLit "fail")
newSysLocalsDs :: [Type] -> DsM [Id]
newSysLocalsDs tys = mapM newSysLocalDs tys
{-
We can also reach out and either set/grab location information from
the @SrcSpan@ being carried around.
-}
getGhcModeDs :: DsM GhcMode
getGhcModeDs = getDynFlags >>= return . ghcMode
getSrcSpanDs :: DsM SrcSpan
getSrcSpanDs = do { env <- getLclEnv; return (dsl_loc env) }
putSrcSpanDs :: SrcSpan -> DsM a -> DsM a
putSrcSpanDs new_loc thing_inside = updLclEnv (\ env -> env {dsl_loc = new_loc}) thing_inside
warnDs :: SDoc -> DsM ()
warnDs warn = do { env <- getGblEnv
; loc <- getSrcSpanDs
; dflags <- getDynFlags
; let msg = mkWarnMsg dflags loc (ds_unqual env) warn
; updMutVar (ds_msgs env) (\ (w,e) -> (w `snocBag` msg, e)) }
failWithDs :: SDoc -> DsM a
failWithDs err
= do { env <- getGblEnv
; loc <- getSrcSpanDs
; dflags <- getDynFlags
; let msg = mkErrMsg dflags loc (ds_unqual env) err
; updMutVar (ds_msgs env) (\ (w,e) -> (w, e `snocBag` msg))
; failM }
mkPrintUnqualifiedDs :: DsM PrintUnqualified
mkPrintUnqualifiedDs = ds_unqual <$> getGblEnv
instance MonadThings (IOEnv (Env DsGblEnv DsLclEnv)) where
lookupThing = dsLookupGlobal
dsLookupGlobal :: Name -> DsM TyThing
-- Very like TcEnv.tcLookupGlobal
dsLookupGlobal name
= do { env <- getGblEnv
; setEnvs (ds_if_env env)
(tcIfaceGlobal name) }
dsLookupGlobalId :: Name -> DsM Id
dsLookupGlobalId name
= tyThingId <$> dsLookupGlobal name
-- |Get a name from "Data.Array.Parallel" for the desugarer, from the 'ds_parr_bi' component of the
-- global desugerar environment.
--
dsDPHBuiltin :: (PArrBuiltin -> a) -> DsM a
dsDPHBuiltin sel = (sel . ds_parr_bi) <$> getGblEnv
dsLookupTyCon :: Name -> DsM TyCon
dsLookupTyCon name
= tyThingTyCon <$> dsLookupGlobal name
dsLookupDataCon :: Name -> DsM DataCon
dsLookupDataCon name
= tyThingDataCon <$> dsLookupGlobal name
-- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim'.
-- Panic if there isn't one, or if it is defined multiple times.
dsLookupDPHRdrEnv :: OccName -> DsM Name
dsLookupDPHRdrEnv occ
= liftM (fromMaybe (pprPanic nameNotFound (ppr occ)))
$ dsLookupDPHRdrEnv_maybe occ
where nameNotFound = "Name not found in 'Data.Array.Parallel' or 'Data.Array.Parallel.Prim':"
-- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim',
-- returning `Nothing` if it's not defined. Panic if it's defined multiple times.
dsLookupDPHRdrEnv_maybe :: OccName -> DsM (Maybe Name)
dsLookupDPHRdrEnv_maybe occ
= do { env <- ds_dph_env <$> getGblEnv
; let gres = lookupGlobalRdrEnv env occ
; case gres of
[] -> return $ Nothing
[gre] -> return $ Just $ gre_name gre
_ -> pprPanic multipleNames (ppr occ)
}
where multipleNames = "Multiple definitions in 'Data.Array.Parallel' and 'Data.Array.Parallel.Prim':"
-- Populate 'ds_parr_bi' from 'ds_dph_env'.
--
dsInitPArrBuiltin :: DsM a -> DsM a
dsInitPArrBuiltin thing_inside
= do { lengthPVar <- externalVar (fsLit "lengthP")
; replicatePVar <- externalVar (fsLit "replicateP")
; singletonPVar <- externalVar (fsLit "singletonP")
; mapPVar <- externalVar (fsLit "mapP")
; filterPVar <- externalVar (fsLit "filterP")
; zipPVar <- externalVar (fsLit "zipP")
; crossMapPVar <- externalVar (fsLit "crossMapP")
; indexPVar <- externalVar (fsLit "!:")
; emptyPVar <- externalVar (fsLit "emptyP")
; appPVar <- externalVar (fsLit "+:+")
-- ; enumFromToPVar <- externalVar (fsLit "enumFromToP")
-- ; enumFromThenToPVar <- externalVar (fsLit "enumFromThenToP")
; enumFromToPVar <- return arithErr
; enumFromThenToPVar <- return arithErr
; updGblEnv (\env -> env {ds_parr_bi = PArrBuiltin
{ lengthPVar = lengthPVar
, replicatePVar = replicatePVar
, singletonPVar = singletonPVar
, mapPVar = mapPVar
, filterPVar = filterPVar
, zipPVar = zipPVar
, crossMapPVar = crossMapPVar
, indexPVar = indexPVar
, emptyPVar = emptyPVar
, appPVar = appPVar
, enumFromToPVar = enumFromToPVar
, enumFromThenToPVar = enumFromThenToPVar
} })
thing_inside
}
where
externalVar :: FastString -> DsM Var
externalVar fs = dsLookupDPHRdrEnv (mkVarOccFS fs) >>= dsLookupGlobalId
arithErr = panic "Arithmetic sequences have to wait until we support type classes"
dsGetFamInstEnvs :: DsM FamInstEnvs
-- Gets both the external-package inst-env
-- and the home-pkg inst env (includes module being compiled)
dsGetFamInstEnvs
= do { eps <- getEps; env <- getGblEnv
; return (eps_fam_inst_env eps, ds_fam_inst_env env) }
dsGetMetaEnv :: DsM (NameEnv DsMetaVal)
dsGetMetaEnv = do { env <- getLclEnv; return (dsl_meta env) }
dsLookupMetaEnv :: Name -> DsM (Maybe DsMetaVal)
dsLookupMetaEnv name = do { env <- getLclEnv; return (lookupNameEnv (dsl_meta env) name) }
dsExtendMetaEnv :: DsMetaEnv -> DsM a -> DsM a
dsExtendMetaEnv menv thing_inside
= updLclEnv (\env -> env { dsl_meta = dsl_meta env `plusNameEnv` menv }) thing_inside
-- | Gets a reference to the SPT entries created so far.
dsGetStaticBindsVar :: DsM (IORef [(Fingerprint, (Id,CoreExpr))])
dsGetStaticBindsVar = fmap ds_static_binds getGblEnv
discardWarningsDs :: DsM a -> DsM a
-- Ignore warnings inside the thing inside;
-- used to ignore inaccessable cases etc. inside generated code
discardWarningsDs thing_inside
= do { env <- getGblEnv
; old_msgs <- readTcRef (ds_msgs env)
; result <- thing_inside
-- Revert messages to old_msgs
; writeTcRef (ds_msgs env) old_msgs
; return result }
|
acowley/ghc
|
compiler/deSugar/DsMonad.hs
|
bsd-3-clause
| 17,977
| 2
| 19
| 5,874
| 3,332
| 1,793
| 1,539
| 288
| 5
|
module A4 where
import D4
main = (sumFun [1 .. 4]) + (sum (map (f 1) [1 .. 7]))
|
SAdams601/HaRe
|
old/testing/generaliseDef/A4_AstOut.hs
|
bsd-3-clause
| 81
| 0
| 11
| 20
| 54
| 31
| 23
| 3
| 1
|
{-# LANGUAGE Safe #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE AutoDeriveTypeable, StandaloneDeriving #-}
-------------------------------------------------------------------------------
-- |
-- Module : System.Timeout
-- Copyright : (c) The University of Glasgow 2007
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable
--
-- Attach a timeout event to arbitrary 'IO' computations.
--
-------------------------------------------------------------------------------
module System.Timeout ( timeout ) where
#ifndef mingw32_HOST_OS
import Control.Monad
import GHC.Event (getSystemTimerManager,
registerTimeout, unregisterTimeout)
#endif
import Control.Concurrent
import Control.Exception (Exception(..), handleJust, bracket,
uninterruptibleMask_,
asyncExceptionToException,
asyncExceptionFromException)
import Data.Typeable
import Data.Unique (Unique, newUnique)
-- An internal type that is thrown as a dynamic exception to
-- interrupt the running IO computation when the timeout has
-- expired.
newtype Timeout = Timeout Unique deriving (Eq, Typeable)
instance Show Timeout where
show _ = "<<timeout>>"
-- Timeout is a child of SomeAsyncException
instance Exception Timeout where
toException = asyncExceptionToException
fromException = asyncExceptionFromException
-- |Wrap an 'IO' computation to time out and return @Nothing@ in case no result
-- is available within @n@ microseconds (@1\/10^6@ seconds). In case a result
-- is available before the timeout expires, @Just a@ is returned. A negative
-- timeout interval means \"wait indefinitely\". When specifying long timeouts,
-- be careful not to exceed @maxBound :: Int@.
--
-- The design of this combinator was guided by the objective that @timeout n f@
-- should behave exactly the same as @f@ as long as @f@ doesn't time out. This
-- means that @f@ has the same 'myThreadId' it would have without the timeout
-- wrapper. Any exceptions @f@ might throw cancel the timeout and propagate
-- further up. It also possible for @f@ to receive exceptions thrown to it by
-- another thread.
--
-- A tricky implementation detail is the question of how to abort an @IO@
-- computation. This combinator relies on asynchronous exceptions internally.
-- The technique works very well for computations executing inside of the
-- Haskell runtime system, but it doesn't work at all for non-Haskell code.
-- Foreign function calls, for example, cannot be timed out with this
-- combinator simply because an arbitrary C function cannot receive
-- asynchronous exceptions. When @timeout@ is used to wrap an FFI call that
-- blocks, no timeout event can be delivered until the FFI call returns, which
-- pretty much negates the purpose of the combinator. In practice, however,
-- this limitation is less severe than it may sound. Standard I\/O functions
-- like 'System.IO.hGetBuf', 'System.IO.hPutBuf', Network.Socket.accept, or
-- 'System.IO.hWaitForInput' appear to be blocking, but they really don't
-- because the runtime system uses scheduling mechanisms like @select(2)@ to
-- perform asynchronous I\/O, so it is possible to interrupt standard socket
-- I\/O or file I\/O using this combinator.
timeout :: Int -> IO a -> IO (Maybe a)
timeout n f
| n < 0 = fmap Just f
| n == 0 = return Nothing
#ifndef mingw32_HOST_OS
| rtsSupportsBoundThreads = do
-- In the threaded RTS, we use the Timer Manager to delay the
-- (fairly expensive) 'forkIO' call until the timeout has expired.
--
-- An additional thread is required for the actual delivery of
-- the Timeout exception because killThread (or another throwTo)
-- is the only way to reliably interrupt a throwTo in flight.
pid <- myThreadId
ex <- fmap Timeout newUnique
tm <- getSystemTimerManager
-- 'lock' synchronizes the timeout handler and the main thread:
-- * the main thread can disable the handler by writing to 'lock';
-- * the handler communicates the spawned thread's id through 'lock'.
-- These two cases are mutually exclusive.
lock <- newEmptyMVar
let handleTimeout = do
v <- isEmptyMVar lock
when v $ void $ forkIOWithUnmask $ \unmask -> unmask $ do
v2 <- tryPutMVar lock =<< myThreadId
when v2 $ throwTo pid ex
cleanupTimeout key = uninterruptibleMask_ $ do
v <- tryPutMVar lock undefined
if v then unregisterTimeout tm key
else takeMVar lock >>= killThread
handleJust (\e -> if e == ex then Just () else Nothing)
(\_ -> return Nothing)
(bracket (registerTimeout tm n handleTimeout)
cleanupTimeout
(\_ -> fmap Just f))
#endif
| otherwise = do
pid <- myThreadId
ex <- fmap Timeout newUnique
handleJust (\e -> if e == ex then Just () else Nothing)
(\_ -> return Nothing)
(bracket (forkIOWithUnmask $ \unmask ->
unmask $ threadDelay n >> throwTo pid ex)
(uninterruptibleMask_ . killThread)
(\_ -> fmap Just f))
-- #7719 explains why we need uninterruptibleMask_ above.
|
jtojnar/haste-compiler
|
libraries/ghc-7.10/base/System/Timeout.hs
|
bsd-3-clause
| 5,589
| 0
| 20
| 1,456
| 641
| 355
| 286
| 52
| 4
|
import Data.Ratio -- 1.3
import Data.Array -- 1.3
infix 1 =:
(=:) a b = (a,b)
main = putStr (shows sub_b "\n")
where
sub_b :: Array Int Double
sub_b = ixmap (102, 113) id b
b :: Array Int Double
b = fmap ( \ r -> fromRational r / pi )
(ixmap (101,200) (\ i -> toInteger i - 100) a)
a :: Array Integer (Ratio Integer)
a = array (1,100) ((1 =: 1) : [i =: fromInteger i * a!(i-1)
| i <- [2..100]])
|
sdiehl/ghc
|
testsuite/tests/array/should_run/arr013.hs
|
bsd-3-clause
| 507
| 0
| 14
| 198
| 232
| 126
| 106
| 13
| 1
|
module T7145a ( Applicative(pure) ) where
|
urbanslug/ghc
|
testsuite/tests/rename/should_compile/T7145a.hs
|
bsd-3-clause
| 43
| 0
| 5
| 7
| 13
| 9
| 4
| 3
| 0
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.RTCRtpTransceiver
(setDirection, stop, getMid, getMidUnsafe, getMidUnchecked,
getSender, getReceiver, getStopped, getDirection,
RTCRtpTransceiver(..), gTypeRTCRtpTransceiver)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.setDirection Mozilla RTCRtpTransceiver.setDirection documentation>
setDirection ::
(MonadDOM m) =>
RTCRtpTransceiver -> RTCRtpTransceiverDirection -> m ()
setDirection self direction
= liftDOM (void (self ^. jsf "setDirection" [toJSVal direction]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.stop Mozilla RTCRtpTransceiver.stop documentation>
stop :: (MonadDOM m) => RTCRtpTransceiver -> m ()
stop self = liftDOM (void (self ^. jsf "stop" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.mid Mozilla RTCRtpTransceiver.mid documentation>
getMid ::
(MonadDOM m, FromJSString result) =>
RTCRtpTransceiver -> m (Maybe result)
getMid self = liftDOM ((self ^. js "mid") >>= fromMaybeJSString)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.mid Mozilla RTCRtpTransceiver.mid documentation>
getMidUnsafe ::
(MonadDOM m, HasCallStack, FromJSString result) =>
RTCRtpTransceiver -> m result
getMidUnsafe self
= liftDOM
(((self ^. js "mid") >>= fromMaybeJSString) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.mid Mozilla RTCRtpTransceiver.mid documentation>
getMidUnchecked ::
(MonadDOM m, FromJSString result) => RTCRtpTransceiver -> m result
getMidUnchecked self
= liftDOM ((self ^. js "mid") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.sender Mozilla RTCRtpTransceiver.sender documentation>
getSender :: (MonadDOM m) => RTCRtpTransceiver -> m RTCRtpSender
getSender self
= liftDOM ((self ^. js "sender") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.receiver Mozilla RTCRtpTransceiver.receiver documentation>
getReceiver ::
(MonadDOM m) => RTCRtpTransceiver -> m RTCRtpReceiver
getReceiver self
= liftDOM ((self ^. js "receiver") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.stopped Mozilla RTCRtpTransceiver.stopped documentation>
getStopped :: (MonadDOM m) => RTCRtpTransceiver -> m Bool
getStopped self = liftDOM ((self ^. js "stopped") >>= valToBool)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpTransceiver.direction Mozilla RTCRtpTransceiver.direction documentation>
getDirection ::
(MonadDOM m) => RTCRtpTransceiver -> m RTCRtpTransceiverDirection
getDirection self
= liftDOM ((self ^. js "direction") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/RTCRtpTransceiver.hs
|
mit
| 3,842
| 0
| 12
| 542
| 837
| 481
| 356
| -1
| -1
|
import Prelude hiding (and, concat, elem)
and :: [Bool] -> Bool
--ad [] = True
--ad (b : bs) = b && ad bs
--and [] = True
--and (b : bs)
-- | b = and bs
-- | otherwise = False
--and [] = False
--and (b : bs) = b && and bs
--and [] = False
--and (b : bs) = b && and bs
--and [] = True
--and (b : bs)
-- | b == False = False
-- | otherwise = and bs
--and [] = True
--and (b : bs) = b || and bs
--and [] = True
--and (b : bs) = and bs && b
and [] = True
and (b : bs)
| b = b
| otherwise = and bs
test_and = do
print("True", and [True, True, True])
print("False", and [True, False, False])
-- Ex. 5: Concat
concat :: [[a]] -> [a]
--concat [] = []
--concat (xs : xss) = xs : concat xss
--concat [] = []
--concat (xs :xss) = xs ++ concat xss
--concat [] = [[]]
--concat (xs :xss) = xs ++ concat xss
concat [[]] = []
concat (xs :xss) = xs ++ concat xss
test_concat = do
print(concat [[1,2,3], [4,5,6], [7,8,9]])
-- Ex. 6:
rep :: Int -> a -> [a]
--rep 1 x = x
--rep n x = x : rep (n - 1) x
--rep 0 _ = []
--rep n x = x : rep (n - 1) x : x
--rep 1 _ = []
--rep n x = rep (n - 1) x ++ [x]
rep 0 _ = []
rep n x = x: rep (n - 1) x
test_rep = do
print("[4,4,4]", rep 4 4)
print("[]", rep 0 4)
print("[2]", rep 1 2)
print("aaa", rep 3 'a')
-- Ex. 8:
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem x (y : ys)
| x == y = True
| otherwise = elem x ys
test_elem = do
print("True", elem 2 [1..10])
print("False", elem 2 [3..10])
-- Ex. 9: Merge
merge :: Ord a => [a] -> [a] -> [a]
test_merge = do
print("[1,2,3,4,5,6]", merge [2,5,6] [1,3,4])
|
supermitch/learn-haskell
|
edx-fp101x/5_hw.hs
|
mit
| 1,625
| 0
| 11
| 464
| 587
| 326
| 261
| 33
| 1
|
module BACnet.TagSpec where
import BACnet.Tag.Core
import BACnet.Tag.Reader
import Test.Hspec
import BACnet.Reader
-- TODO: Remove the readAPTag tests. Remove the function as well.
spec :: Spec
spec =
describe "readNullAPTag" $
it "returns () for input [0x00]" $
run readNullAPTag [0x00] `shouldBe` NullAP
|
michaelgwelch/bacnet
|
test/BACnet/TagSpec.hs
|
mit
| 320
| 0
| 8
| 56
| 67
| 39
| 28
| 10
| 1
|
module Location where
import Control.Applicative
import Data.List ( sortBy )
import Data.Map ( Map )
import Data.Maybe ( mapMaybe, isNothing )
import Data.Set ( Set )
import Data.Tree ( Tree(Node), Forest )
import Text.JSON
import qualified Data.Graph as Graph
import qualified Data.Map as Map
import qualified Data.Set as Set
-- Generate an edge, which contains the note, the uid of the note, and
edge :: (Ord n) => Map n (Maybe n) -> n -> (n, n, [n])
edge dict n = (,,) n n (children dict n)
-- Edges usable by Data.Graph
edges :: (Ord n) => Map n (Maybe n) -> [(n, n, [n])]
edges = map <$> edge <*> Map.keys
-- Generate a tree from a map!
-- Technically, we generate a forest, one tree from each root
forest :: (Ord n) => Map n (Maybe n) -> Forest n
forest dict = map (fmap vertToA) (Graph.dfs g vs) where
(g, vertToEdge, idToVert) = Graph.graphFromEdges $ edges dict
vs = mapMaybe idToVert . Set.toList $ roots dict
vertToA = (\(n, _, _) -> n) . vertToEdge
-- Find all children of a note
children :: Ord o => Map o (Maybe o) -> o -> [o]
children dict n = Map.findWithDefault [] n $ invert dict
-- Flip a Child -> Parent mapping into a Parent -> [Children] mapping
invert :: Ord o => Map o (Maybe o) -> Map o [o]
invert = Map.foldWithKey inv Map.empty where
inv k (Just v) = Map.insertWith (++) v [k]
inv _ Nothing = id
-- Find elements in a map that have no parents
roots :: Ord o => Map o (Maybe o) -> Set o
roots = Set.fromList . Map.keys . Map.filter hasNoParent
where hasNoParent = isNothing
sorted :: (Ord a) => Tree a -> Tree a
sorted (Node n ts) = Node n (sortBy sortNode $ map sorted ts)
where sortNode (Node x _) (Node y _) = compare x y
unify :: a -> Forest a -> Tree a
unify _ [a] = a
unify a as = Node a as
class Bubble a where
label :: a -> String
amount :: a -> Rational
bubbleTree :: (Bubble a) => Tree a -> JSValue
bubbleTree (Node a as) = JSObject $ toJSObject
[ ("label", JSString $ toJSString $ label a)
, ("amount", JSRational True $ amount a)
, ("children", JSArray $ map bubbleTree as)
]
|
Soares/tagwiki
|
src/Location.hs
|
mit
| 2,077
| 0
| 10
| 462
| 850
| 453
| 397
| 43
| 2
|
module Rebase.Data.Word
(
module Data.Word
)
where
import Data.Word
|
nikita-volkov/rebase
|
library/Rebase/Data/Word.hs
|
mit
| 71
| 0
| 5
| 12
| 20
| 13
| 7
| 4
| 0
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------
-- |
-- Module : Network.MQTT.Topic
-- Copyright : (c) Lars Petersen 2016
-- License : MIT
--
-- Maintainer : info@lars-petersen.net
-- Stability : experimental
--------------------------------------------------------------------------------
module Network.MQTT.Message.Topic
( -- ** Topic
Topic ()
, topicLevels
, topicLength
, topicParser
, topicBuilder
-- ** Filter
, Filter (..)
, filterLevels
, filterLength
, filterParser
, filterBuilder
-- ** Level
, Level ()
, levelParser
, multiLevelWildcard
, singleLevelWildcard
, startsWithDollar
) where
import Control.Applicative
import Control.Monad (void)
import qualified Data.Attoparsec.ByteString as A
import qualified Data.Binary as B
import qualified Data.ByteString.Builder as BS
import qualified Data.ByteString.Short as BS
import Data.List
import Data.List.NonEmpty (NonEmpty (..))
import Data.Monoid ((<>))
import Data.String
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import Data.Word
-- | According to the MQTT specification a topic
--
-- * must not be empty
-- * must not contain @+@, @#@ or @\\NUL@ characters
newtype Topic = Topic (NonEmpty Level) deriving (Eq, Ord)
-- | According to the MQTT specification a filter
--
-- * must not be empty
-- * must not contain a @\\NUL@ character
newtype Filter = Filter (NonEmpty Level) deriving (Eq, Ord)
-- | A `Level` represents a single path element of a `Topic` or a `Filter`.
newtype Level = Level BS.ShortByteString deriving (Eq, Ord, B.Binary)
instance Show Topic where
show (Topic xs) = show (Filter xs)
instance Show Filter where
show (Filter (x:|xs)) = concat ["\"", intercalate "/" $ f x : map f xs, "\""]
where
f (Level l) = T.unpack $ T.decodeUtf8With T.lenientDecode $ BS.fromShort l
instance Show Level where
show (Level x) =
concat ["\"", T.unpack $ T.decodeUtf8With T.lenientDecode $ BS.fromShort x, "\""]
instance IsString Topic where
fromString s = case A.parseOnly topicParser (T.encodeUtf8 $ T.pack s) of
Left e -> error e
Right t -> t
instance IsString Filter where
fromString s = case A.parseOnly filterParser (T.encodeUtf8 $ T.pack s) of
Left e -> error e
Right t -> t
instance IsString Level where
fromString s = case A.parseOnly levelParser (T.encodeUtf8 $ T.pack s) of
Left e -> error e
Right t -> t
instance B.Binary Topic where
put (Topic (x:|xs)) =
B.put x >> B.put xs
get = do
x <- B.get
xs <- B.get
pure (Topic (x:|xs))
instance B.Binary Filter where
put (Filter (x:|xs)) =
B.put x >> B.put xs
get = do
x <- B.get
xs <- B.get
pure (Filter (x:|xs))
topicLevels :: Topic -> NonEmpty Level
topicLevels (Topic x) = x
{-# INLINE topicLevels #-}
filterLevels :: Filter -> NonEmpty Level
filterLevels (Filter x) = x
{-# INLINE filterLevels #-}
topicParser :: A.Parser Topic
topicParser = (<|> fail "invalid topic") $ Topic <$> do
void A.peekWord8'
level <- pLevel
levels <- A.many' (pSlash >> pLevel)
A.endOfInput
pure (level :| levels)
where
pSlash = void (A.word8 slash)
pLevel = Level . BS.toShort <$> A.takeWhile
(\w8-> w8 /= slash && w8 /= zero && w8 /= hash && w8 /= plus)
{-# INLINABLE topicParser #-}
topicBuilder :: Topic -> BS.Builder
topicBuilder (Topic (Level x:|xs)) =
foldl'
(\acc (Level l)-> acc <> BS.word8 slash <> BS.shortByteString l)
(BS.shortByteString x) xs
{-# INLINE topicBuilder #-}
filterBuilder :: Filter -> BS.Builder
filterBuilder (Filter (Level x:|xs)) =
foldl'
(\acc (Level l)-> acc <> BS.word8 slash <> BS.shortByteString l)
(BS.shortByteString x) xs
{-# INLINE filterBuilder #-}
topicLength :: Topic -> Int
topicLength (Topic (Level x:|xs)) =
BS.length x + len' xs 0
where
len' [] acc = acc
len' (Level z:zs) acc = len' zs $! acc + 1 + BS.length z
{-# INLINE topicLength #-}
filterLength :: Filter -> Int
filterLength (Filter (Level x:|xs)) =
BS.length x + len' xs 0
where
len' [] acc = acc
len' (Level z:zs) acc = len' zs $! acc + 1 + BS.length z
{-# INLINE filterLength #-}
filterParser :: A.Parser Filter
filterParser = (<|> fail "invalid filter") $ Filter <$> do
void A.peekWord8'
(x:xs) <- pLevels
pure (x:|xs)
where
pSlash = void (A.word8 slash)
pLevel = Level . BS.toShort <$> A.takeWhile
(\w8-> w8 /= slash && w8 /= zero && w8 /= hash && w8 /= plus)
pLevels
= (void (A.word8 hash) >> A.endOfInput >> pure [multiLevelWildcard])
<|> (void (A.word8 plus) >> ((A.endOfInput >> pure [singleLevelWildcard]) <|>
(pSlash >> (:) <$> pure singleLevelWildcard <*> pLevels)))
<|> (pLevel >>= \x-> (x:) <$> ((A.endOfInput >> pure []) <|> (pSlash >> pLevels)))
{-# INLINABLE filterParser #-}
levelParser :: A.Parser Level
levelParser = do
x <- A.takeWhile (\w8-> w8 /= slash && w8 /= zero)
A.endOfInput
pure (Level $ BS.toShort x)
{-# INLINE levelParser #-}
-- | The @#@ path element. It must only appear at the end of a `Filter`.
multiLevelWildcard :: Level
multiLevelWildcard = Level $ BS.pack $ pure hash
{-# INLINE multiLevelWildcard #-}
-- | The @+@ path element. It may appear anywhere within a `Filter`.
singleLevelWildcard :: Level
singleLevelWildcard = Level $ BS.pack $ pure plus
{-# INLINE singleLevelWildcard #-}
-- | Returns `True` iff the `Level` starts with @$@.
startsWithDollar :: Level -> Bool
startsWithDollar (Level bs) =
not (BS.null bs) && BS.index bs 0 == dollar
{-# INLINE startsWithDollar #-}
zero, plus, hash, slash, dollar :: Word8
zero = 0x00
plus = 0x2b
hash = 0x23
slash = 0x2f
dollar = 0x24
|
lpeterse/haskell-mqtt
|
src/Network/MQTT/Message/Topic.hs
|
mit
| 6,114
| 0
| 17
| 1,475
| 1,915
| 1,015
| 900
| 150
| 2
|
module Y2016.M11.D29.Solution where
import Control.Arrow ((&&&), second)
import Data.Time
-- below imports is available at 1HaskellADay git repository
import Control.List (weave)
import Control.Presentation (laxmi)
import Control.Scan.CSV (csv)
import Y2016.M11.D28.Solution
{--
One of the things about a time-series is that, well, it changes over time.
The time-series we are working with is monotomically increasing, but many
data-over-time series do not have that restriction.
Thought for later.
But, so this is increasing at some rate, yes? What rate? How do we model or
estimate the rate of change of data over time?
One way is to take the mean of all the gains. Voila! An estimate of your
progression.
A good one?
Let's see.
Today's Haskell exercise. Read in the updated time series from series.csv at
this directory, or at the URL:
https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2016/M11/D29/scores.csv
compute the daily gains, as you did yesterday, then find the mean gain
--}
readDailyScores :: FilePath -> IO [(Day, Score)]
readDailyScores =
fmap (map ((read . head &&& read . last) . csv) . tail . lines) . readFile
{--
*Y2016.M11.D29.Solution> readDailyScores "Y2016/M11/D29/scores.csv" ~> scores
[(2016-11-22,11289979),(2016-11-23,11422295),...]
--}
µgain :: (Num a, Fractional a, RealFrac a) => [a] -> a
µgain = uncurry (/) . (sum . gains &&& fromIntegral . length)
-- *Y2016.M11.D29.Solution> µgain (map (toRational . snd) it) ~>
-- 779172 % 7 or ~ 111310 for us mere humans
-- (side note/gripe: I wish we could control output à la Prolog so that
-- rational values had a yummier taste to my eyes, but That's Just Me (tm))
{-- BONUS -----------------------------------------------------------------
What does the mean gain give you? One way to look at the mean gain is the
slope of the linear fit of a data set. Okay, so you have the rise, you have the
run (increments of one day in the data), where is the origin?
Hm. That's another problem ... for another day. For now, let's use the first
datum as the origin.
So, with that, and using your favorite charting software, plot the (original)
data points along with the fitted curve using the µgain.
Good fit of data? Show your results.
--}
chartProgression :: Show a => FilePath -> [(a, Rational)] -> IO ()
chartProgression outputfile dailyscores = writeFile outputfile .
unlines . ("Date,Score,Fitted Score":) $ extension dailyscores
extension :: Show a => [(a, Rational)] -> [String]
extension scores@((_, score):_) =
ext scores (toRational score) (µgain (map (toRational . snd) scores))
ext :: Show a => [(a, Rational)] -> Rational -> Rational -> [String]
ext [] _ _ = []
ext ((d,s):rest) score add = weave [show d, lax s, lax score]
:ext rest (score + add) add
where lax = laxmi 2
{--
*Y2016.M11.D29.Solution> readDailyScores "Y2016/M11/D29/scores.csv" >>=
chartProgression "Y2016/M11/D29/progression.csv"
. map (second toRational)
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M11/D29/Solution.hs
|
mit
| 3,038
| 0
| 16
| 552
| 484
| 270
| 214
| -1
| -1
|
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
--
-- This solution works, but it is EXTREMELY slow
-- This is because I am doing an exponential amount of recursion
-- I need to re-formulate this problem, in a non-exponential manner
--
-- Anyway, just including the solution for completeness. You can play around with maxLevel
-- to change the depth of recursion. Lower depth makes it quicker, but with lower accuracy,
-- and vice versa.
--
-- For sample
-- 1
-- abb
--
-- w/ maxLevel = 10 we get 2.68785
-- w/ maxLevel = 15 we get 2.94176
-- w/ maxLevel = 20 we get 2.99007
--
-- The correct answer is 3.0
--
import Control.Monad
import Data.List
import qualified Data.Map as M
import Data.Ratio
import Data.Maybe
-- calculate probabilities up to this level
maxLevel :: Int
maxLevel = 20
-- as per problem constraints, biggest number is 8
fact :: Int -> Int
fact 0 = 1
fact 1 = 1
fact 2 = 2
fact 3 = 6
fact 4 = 24
fact 5 = 120
fact 6 = 720
fact 7 = 5040
fact 8 = 40320
choose :: Int -> Int -> Int
choose n k = (fact n) `div` (fact k * fact (n-k))
isPalin :: String -> Bool
isPalin ss = ss == reverse ss
getCombos :: Int -> [(Int, Int)]
getCombos 2 = [(0, 1)]
getCombos n = [(p, n-1) | p <- [0..(n-2)]] ++ getCombos (n-1)
-- creates a new string by swapping the indices indicated
createString :: String -> (Int, Int) -> String
createString ss (p, n) = (take p ss) ++ [ss !! n] ++ (take (n-p-1) . drop (p+1) $ ss) ++ [ss !! p] ++ (drop (n+1) ss)
-- how many combinations of 2swaps, from all possible 2swaps, can change this into a palindrome?
getProb :: [String] -> String -> (Double, [String])
getProb palinss str =
let numCombos = (length str) `choose` 2 --only 2 elems swapped at a time
allCombos = map (createString str) $ getCombos (length str)
numPalins = length . filter (`elem` palinss) $ allCombos
notPalins = filter (\x -> not $ x `elem` palinss) $ allCombos
in ((fromIntegral numPalins) / (fromIntegral numCombos), notPalins)
-- compute the expected amount of swaps, to change this string, at this level into a palindrome
-- if this level fails, recurse beyond it
probForLevel :: [String] -> M.Map String (Double, [String]) -> Int -> Double -> String -> Double
probForLevel palinss mmap swapNo probSoFar curStr =
let (succProb, failstrs) = fromMaybe (0, []) $ M.lookup curStr mmap --if this swap were to succeed
leafExpect = fromIntegral swapNo * succProb * probSoFar
numCombos = (length curStr) `choose` 2 --only 2 elems swapped at a time
multProb = probSoFar / (fromIntegral numCombos)
failExpect = sum $ map (probForLevel palinss mmap (swapNo + 1) multProb) failstrs
in if swapNo == maxLevel then 0 else leafExpect + failExpect
expectedSwaps :: String -> Double
expectedSwaps [] = 0 --empty string
expectedSwaps [_] = 0 --or one length string dont need any swaps
expectedSwaps str =
let allss = permutations str
palinss = filter isPalin allss
mymap = M.empty
finmap = foldl' (\accmap st -> M.insert st (getProb palinss st) accmap) mymap $ allss
in if isPalin str then 0 else probForLevel palinss finmap 1 1 str
main :: IO ()
main = do
ip <- getContents
let ss = tail . lines $ ip
let expects = map expectedSwaps ss
mapM_ (putStrLn) $ map show expects
|
cbrghostrider/Hacking
|
HackerRank/Mathematics/Probability/palindromeExpectation.hs
|
mit
| 3,645
| 0
| 15
| 754
| 1,006
| 549
| 457
| 56
| 2
|
module Tak.Range (makeRange, asTuple, shiftRange, startPos, endPos) where
import Tak.Types
import Tak.Util
makeRange :: Pos -> Pos -> Range
makeRange p0 p1 =
Range (min p0 p1) (max p0 p1)
asTuple :: Range -> (Pos, Pos)
asTuple r = let Range p0 p1 = r in (p0, p1)
shiftRange :: Range -> Pos -> Range
(Range p0 p1) `shiftRange` p = Range (p0 `shift` p) (p1 `shift` p)
startPos :: Range -> Pos
startPos (Range s e) = s
endPos :: Range -> Pos
endPos (Range s e) = e
|
sixohsix/tak
|
src/Tak/Range.hs
|
mit
| 476
| 0
| 9
| 104
| 230
| 125
| 105
| 14
| 1
|
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Data.Functor.Compose.Compat"
-- from a globally unique namespace.
module Data.Functor.Compose.Compat.Repl (
module Data.Functor.Compose.Compat
) where
import "this" Data.Functor.Compose.Compat
|
haskell-compat/base-compat
|
base-compat/src/Data/Functor/Compose/Compat/Repl.hs
|
mit
| 312
| 0
| 5
| 31
| 31
| 24
| 7
| 5
| 0
|
{- |
Module: $Header$
Description: Testing the OpenTheory standard theory library
License: MIT
Maintainer: Joe Leslie-Hurd <joe@gilith.com>
Stability: provisional
Portability: portable
-}
module Main
( main )
where
import OpenTheory.Primitive.Natural
import OpenTheory.Primitive.Test
assertion0 :: Bool
assertion0 = True
assertion1 :: Bool
assertion1 = (2 :: Natural) + 2 == 4
proposition0 :: Bool -> Bool
proposition0 p = p || not p
proposition1 :: Natural -> Natural -> Bool
proposition1 m n = m + n == n + m
proposition2 :: Natural -> Natural -> Bool
proposition2 n k = shiftLeft n k == (2 ^ k) * n
proposition3 :: Natural -> Natural -> Bool
proposition3 n k = shiftRight n k == n `div` (2 ^ k)
main :: IO ()
main =
do assert "Assertion 0:\n T\n " assertion0
assert "Assertion 1:\n 2 + 2 = 4\n " assertion1
check "Proposition 0:\n !p. p \\/ ~p\n " proposition0
check "Proposition 1:\n !m n. m + n = n + m\n " proposition1
check "Proposition 2:\n !n k. shiftLeft n k = 2 ^ k * n\n " proposition2
check "Proposition 3:\n !n k. shiftRight n k = n div 2 ^ k\n " proposition3
return ()
|
gilith/opentheory
|
data/haskell/opentheory-primitive/src/Test.hs
|
mit
| 1,153
| 0
| 8
| 269
| 274
| 140
| 134
| 25
| 1
|
module AbstractInterpreter where
import LabeledAst
import Data.Set (Set, member, union, unions)
import qualified Data.Set as Set
import Data.Map (Map, singleton, (!), insert)
import qualified Data.Map as Map
import Control.Monad.State
import Parser
data Closure = Closure LAst ContextEnvironment
deriving (Eq, Ord)
data CFAState = CFAState LAst ContextEnvironment Store Context
deriving (Eq, Ord, Show)
type Context = [Label]
type Variable = String
type ContextEnvironment = Map Variable Context
type Bind = (Variable, Context)
type Store = Map Bind (Set Closure)
type Cache = Map Label (Map Context (Set Closure))
type AbstractInterpreter = State (Set CFAState, Cache)
k :: Int
k = 1
k_CFA :: String -> (LAst, Cache, Store)
k_CFA source = case jsparse source of
Right ts -> let (lAst, _) = convert ts
(states, cache) = execState (analysis Map.empty Map.empty [] lAst) (Set.empty, Map.empty)
stores = Set.foldl (\acc (CFAState _ _ s _) -> Set.insert s acc) Set.empty states
store = Map.unionsWith union $ Set.toList stores
in (lAst, cache, store)
Left e -> error $ show e
analysis :: ContextEnvironment -> -- ce
Store -> -- store
Context -> -- delta
LAst -> -- expr
AbstractInterpreter (Set Closure)
analysis ce store curr (Var x l) = let v = store ! (x, ce ! x)
in do updateCache l curr v
return v
analysis ce _ curr f@(Function _ _ l) =
let freeVars = fv f
ce' = Map.filterWithKey (\key _ -> key `member` freeVars) ce
closure = Set.singleton (Closure f ce')
in do updateCache l curr closure
return closure
analysis ce store curr call@(Application e1 e2 l) =
do seen <- getSeen
let theState = CFAState call ce store curr
if not $ theState `member` seen
then do addSeen theState
v1 <- analysis ce store curr e1
v2 <- analysis ce store curr e2
let next = nextContext curr l
each (Closure (Function x body _) env) =
let newCe = insert x next env
newStore = insert (x, next) v2 store
in analysis newCe newStore next body
vs' <- mapM each $ Set.toList v1
let v' = unions vs'
updateCache l curr v'
return v'
else return Set.empty
analysis ce store curr (IfExpr cond e1 e2 l) =
do analysis ce store curr cond
v1 <- analysis ce store curr e1
v2 <- analysis ce store curr e2
let v = v1 `union` v2
updateCache l curr v
return v
analysis ce store curr (LetRec bindings body l) =
do let vars = map fst bindings
newCeElems = map (\x -> (x,curr)) vars
newCePart = Map.fromList newCeElems
newCe = ce `Map.union` newCePart
newBinds = map (\x -> (x, curr)) vars
vs <- mapM (analysis newCe store curr) (map snd bindings)
let newStoreElems = zip newBinds vs
newStore = store `Map.union` Map.fromList newStoreElems
v <- analysis newCe newStore curr body
updateCache l curr v
return v
analysis ce store curr (BinaryExpr _ e1 e2 _) =
do analysis ce store curr e1
analysis ce store curr e2
return Set.empty
analysis _ _ _ _ = return Set.empty
updateCache :: Label -> Context -> Set Closure -> AbstractInterpreter ()
updateCache l curr value =
do cache <- getCache
let newC = cache `merge` (singleton l $ singleton curr value)
setCache newC
merge :: Cache -> Cache -> Cache
merge = Map.unionWith Map.union
getCache :: AbstractInterpreter Cache
getCache = do (_, c) <- get
return c
setCache :: Cache -> AbstractInterpreter ()
setCache newC = do (seen, _) <- get
put (seen, newC)
getSeen :: AbstractInterpreter (Set CFAState)
getSeen = do (s,_) <- get
return s
setSeen :: Set CFAState -> AbstractInterpreter ()
setSeen newSeen = do (_, c) <- get
put (newSeen, c)
addSeen :: CFAState -> AbstractInterpreter ()
addSeen new = do seen <- getSeen
setSeen (Set.insert new seen)
nextContext :: Context -> Label -> Context
nextContext callString l = take k $ l : callString
instance Show Closure where
show (Closure (Function x _ l) env) = "Closure function (" ++ x ++ ") ... @" ++ show l ++ " bind " ++ show (Map.toList env)
|
fiigii/AbstractInterpretation
|
AbstractInterpreter.hs
|
mit
| 4,550
| 0
| 18
| 1,427
| 1,677
| 844
| 833
| 110
| 2
|
module Examples.Combined where
import Control.Effects.Eff
import Control.Effects.Exception
import Control.Effects.Reader
import Control.Effects.Search
import Control.Monad
testPrg = do
v <- ask
x <- choose [1..(v :: Int)]
when (x < 5) $ searchFail (T :: T Int)
y <- choose [3,4]
let d = x - y
if d == 0
then throw $ "division by zero"
else return ((fromIntegral v) / (fromIntegral d))
testPrgRun n = runPure
. handle exceptionHandler
. handle (handleDFS :: Handler (Search Int) r a [a])
. handle (readerHandler n)
testPrgRes :: Int -> Either String [Float]
testPrgRes n = testPrgRun n testPrg
testPrgRun2 n = runPure
. handle (handleDFS :: Handler (Search Int) r a [a])
. handle exceptionHandler
. handle (readerHandler n)
testPrgRes2 :: Int -> [Either String Float]
testPrgRes2 n = testPrgRun2 n testPrg
|
edofic/effect-handlers
|
test/Examples/Combined.hs
|
mit
| 936
| 0
| 12
| 261
| 345
| 177
| 168
| 27
| 2
|
-- |
-- Module : $Header$
-- Description : The compiler pipeline
-- Copyright : (c) Sebastian Ertel and Justus Adam 2017. All Rights Reserved.
-- License : EPL-1.0
-- Maintainer : sebastian.ertel@gmail.com, dev@justus.science
-- Stability : experimental
-- Portability : portable
-- This source code is licensed under the terms described in the associated LICENSE.TXT file
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
module Ohua.Compile where
import Ohua.Prelude
import qualified Data.HashSet as HS
import Control.Lens (view)
import Ohua.ALang.Lang
import Ohua.ALang.Passes
import Ohua.ALang.Passes.SSA
import Ohua.Feature.TailRec (loadTailRecPasses)
import Ohua.ALang.Passes.Verify
import Ohua.ALang.Refs as Refs
import Ohua.Compile.Configuration
import Ohua.DFGraph
import Ohua.DFLang.PPrint ()
import Ohua.DFLang.Passes
import qualified Ohua.DFLang.Verify
import Ohua.Stage
import Ohua.Feature.TailRec.Passes.ALang (y)
forceLog :: (MonadLogger m, NFData a) => Text -> a -> m ()
forceLog msg a = a `deepseq` logDebugN msg
-- | The canonical order of transformations and lowerings performed in a full compilation.
pipeline :: CustomPasses env -> Expression -> OhuaM env OutGraph
pipeline CustomPasses {..} e = do
stage resolvedAlang e
ssaE <- performSSA e
stage ssaAlang ssaE
normalizedE <- normalize =<< passBeforeNormalize ssaE
stage normalizedAlang normalizedE
whenDebug $ do
checkProgramValidity normalizedE
checkHigherOrderFunctionSupport normalizedE
Ohua.ALang.Passes.SSA.checkSSA normalizedE
customAfterNorm <- passAfterNormalize normalizedE
stage customAlangPasses customAfterNorm
coreE <- Ohua.ALang.Passes.runCorePasses =<< normalize customAfterNorm
stage coreAlang coreE
whenDebug $ do
Ohua.ALang.Passes.SSA.checkSSA coreE
Ohua.ALang.Passes.Verify.checkInvariants coreE
dfE <- lowerALang =<< normalize coreE
stage initialDflang dfE
Ohua.DFLang.Verify.verify dfE
whenDebug $ Ohua.DFLang.Passes.checkSSAExpr dfE
dfAfterCustom <- passAfterDFLowering dfE
stage customDflang dfAfterCustom
coreDfE <- Ohua.DFLang.Passes.runCorePasses dfAfterCustom
stage coreDflang coreDfE
whenDebug $ Ohua.DFLang.Passes.checkSSAExpr coreDfE
pure $ toGraph coreDfE
-- | Run the pipeline in an arbitrary monad that supports error reporting.
compile ::
(MonadError Error m, MonadLoggerIO m)
=> Options
-> CustomPasses env
-> Expression
-> m OutGraph
compile opts passes exprs = do
logFn <- askLoggerIO
let passes' =
flip loadTailRecPasses passes $
view transformRecursiveFunctions opts
either throwError pure =<<
liftIO (runLoggingT (runFromExpr opts (pipeline passes') exprs) logFn)
stdHofNames :: HashSet QualifiedBinding
stdHofNames = HS.fromList [Refs.smap, Refs.ifThenElse, Refs.seq, Refs.recur, y]
-- | Verify that only higher order functions have lambdas as arguments
checkHigherOrderFunctionSupport :: MonadOhua m => Expression -> m ()
checkHigherOrderFunctionSupport (Let _ e rest) = do
void $ checkNestedExpr e
checkHigherOrderFunctionSupport rest
where
checkNestedExpr (Apply f arg) = do
supportsHOF <- checkNestedExpr f
when (isLambda arg && not supportsHOF) $
failWith $
"Lambdas may only be input to higher order functions, not " <>
show f
pure True
checkNestedExpr (PureFunction n _)
| HS.member n stdHofNames = pure True
| otherwise = HS.member n . (view $ options . higherOrderFunctions) <$> getEnvironment
checkNestedExpr (Var _) = pure False
checkNestedExpr (BindState _ m) = checkNestedExpr m
checkNestedExpr a = failWith $ "Expected var or apply expr, got " <> show a
isLambda (Lambda _ _) = True
isLambda _ = False
checkHigherOrderFunctionSupport (Var _) = pure ()
checkHigherOrderFunctionSupport a =
failWith $ "Expected let or var, got " <> show a
|
ohua-dev/ohua-core
|
core/src/Ohua/Compile.hs
|
epl-1.0
| 4,001
| 0
| 15
| 806
| 956
| 479
| 477
| -1
| -1
|
{-# LANGUAGE TemplateHaskell, StandaloneDeriving #-}
{-| Implementation of opcodes parameters.
These are defined in a separate module only due to TemplateHaskell
stage restrictions - expressions defined in the current module can't
be passed to splices. So we have to either parameters/repeat each
parameter definition multiple times, or separate them into this
module.
-}
{-
Copyright (C) 2012, 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.OpParams
( ReplaceDisksMode(..)
, DiskIndex
, mkDiskIndex
, unDiskIndex
, DiskAccess(..)
, INicParams(..)
, IDiskParams(..)
, RecreateDisksInfo(..)
, DdmOldChanges(..)
, SetParamsMods(..)
, ExportTarget(..)
, pInstanceName
, pInstallImage
, pInstanceCommunication
, pOptInstanceCommunication
, pInstanceUuid
, pInstances
, pName
, pTagsList
, pTagsObject
, pTagsName
, pOutputFields
, pShutdownTimeout
, pShutdownTimeout'
, pShutdownInstance
, pForce
, pIgnoreOfflineNodes
, pNodeName
, pNodeUuid
, pNodeNames
, pNodeUuids
, pGroupName
, pMigrationMode
, pMigrationLive
, pMigrationCleanup
, pForceVariant
, pWaitForSync
, pWaitForSyncFalse
, pIgnoreConsistency
, pStorageName
, pUseLocking
, pOpportunisticLocking
, pNameCheck
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pQueryWhat
, pEarlyRelease
, pIpCheck
, pIpConflictsCheck
, pNoRemember
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pMoveTargetNode
, pMoveTargetNodeUuid
, pMoveCompress
, pBackupCompress
, pStartupPaused
, pVerbose
, pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pOptGroupName
, pGroupDiskParams
, pHvState
, pDiskState
, pIgnoreIpolicy
, pHotplug
, pHotplugIfPossible
, pAllowRuntimeChgs
, pInstDisks
, pDiskTemplate
, pOptDiskTemplate
, pExtParams
, pFileDriver
, pFileStorageDir
, pClusterFileStorageDir
, pClusterSharedFileStorageDir
, pClusterGlusterStorageDir
, pInstanceCommunicationNetwork
, pZeroingImage
, pCompressionTools
, pVgName
, pEnabledHypervisors
, pHypervisor
, pClusterHvParams
, pInstHvParams
, pClusterBeParams
, pInstBeParams
, pResetDefaults
, pOsHvp
, pClusterOsParams
, pClusterOsParamsPrivate
, pInstOsParams
, pInstOsParamsPrivate
, pInstOsParamsSecret
, pCandidatePoolSize
, pMaxRunningJobs
, pMaxTrackedJobs
, pUidPool
, pAddUids
, pRemoveUids
, pMaintainNodeHealth
, pModifyEtcHosts
, pPreallocWipeDisks
, pNicParams
, pInstNics
, pNdParams
, pIpolicy
, pDrbdHelper
, pDefaultIAllocator
, pDefaultIAllocatorParams
, pMasterNetdev
, pMasterNetmask
, pReservedLvs
, pHiddenOs
, pBlacklistedOs
, pUseExternalMipScript
, pQueryFields
, pQueryFilter
, pQueryFieldsFields
, pOobCommand
, pOobTimeout
, pIgnoreStatus
, pPowerDelay
, pPrimaryIp
, pSecondaryIp
, pReadd
, pNodeGroup
, pMasterCapable
, pVmCapable
, pNames
, pNodes
, pRequiredNodes
, pRequiredNodeUuids
, pStorageType
, pOptStorageType
, pStorageChanges
, pMasterCandidate
, pOffline
, pDrained
, pAutoPromote
, pPowered
, pIallocator
, pRemoteNode
, pRemoteNodeUuid
, pEvacMode
, pInstCreateMode
, pNoInstall
, pInstOs
, pPrimaryNode
, pPrimaryNodeUuid
, pSecondaryNode
, pSecondaryNodeUuid
, pSourceHandshake
, pSourceInstance
, pSourceShutdownTimeout
, pSourceX509Ca
, pSrcNode
, pSrcNodeUuid
, pSrcPath
, pStartInstance
, pInstTags
, pMultiAllocInstances
, pTempOsParams
, pTempOsParamsPrivate
, pTempOsParamsSecret
, pTempHvParams
, pTempBeParams
, pIgnoreFailures
, pNewName
, pIgnoreSecondaries
, pRebootType
, pIgnoreDiskSize
, pRecreateDisksInfo
, pStatic
, pInstParamsNicChanges
, pInstParamsDiskChanges
, pRuntimeMem
, pOsNameChange
, pDiskIndex
, pDiskChgAmount
, pDiskChgAbsolute
, pTargetGroups
, pExportMode
, pExportTargetNode
, pExportTargetNodeUuid
, pRemoveInstance
, pIgnoreRemoveFailures
, pX509KeyName
, pX509DestCA
, pZeroFreeSpace
, pHelperStartupTimeout
, pHelperShutdownTimeout
, pZeroingTimeoutFixed
, pZeroingTimeoutPerMiB
, pTagSearchPattern
, pRestrictedCommand
, pReplaceDisksMode
, pReplaceDisksList
, pAllowFailover
, pForceFailover
, pDelayDuration
, pDelayOnMaster
, pDelayOnNodes
, pDelayOnNodeUuids
, pDelayRepeat
, pDelayInterruptible
, pDelayNoLocks
, pIAllocatorDirection
, pIAllocatorMode
, pIAllocatorReqName
, pIAllocatorNics
, pIAllocatorDisks
, pIAllocatorMemory
, pIAllocatorVCpus
, pIAllocatorOs
, pIAllocatorInstances
, pIAllocatorEvacMode
, pIAllocatorSpindleUse
, pIAllocatorCount
, pJQueueNotifyWaitLock
, pJQueueNotifyExec
, pJQueueLogMessages
, pJQueueFail
, pTestDummyResult
, pTestDummyMessages
, pTestDummyFail
, pTestDummySubmitJobs
, pNetworkName
, pNetworkAddress4
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, pNetworkAddRsvdIps
, pNetworkRemoveRsvdIps
, pNetworkMode
, pNetworkLink
, pNetworkVlan
, pDryRun
, pDebugLevel
, pOpPriority
, pDependencies
, pComment
, pReason
, pSequential
, pEnabledDiskTemplates
, pEnabledUserShutdown
, pAdminStateSource
) where
import Control.Monad (liftM, mplus)
import Text.JSON (JSON, JSValue(..), JSObject (..), readJSON, showJSON,
fromJSString, toJSObject)
import qualified Text.JSON
import Text.JSON.Pretty (pp_value)
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.THH
import Ganeti.THH.Field
import Ganeti.Utils
import Ganeti.JSON
import Ganeti.Types
import qualified Ganeti.Query.Language as Qlang
-- * Helper functions and types
-- | Build a boolean field.
booleanField :: String -> Field
booleanField = flip simpleField [t| Bool |]
-- | Default a field to 'False'.
defaultFalse :: String -> Field
defaultFalse = defaultField [| False |] . booleanField
-- | Default a field to 'True'.
defaultTrue :: String -> Field
defaultTrue = defaultField [| True |] . booleanField
-- | An alias for a 'String' field.
stringField :: String -> Field
stringField = flip simpleField [t| String |]
-- | An alias for an optional string field.
optionalStringField :: String -> Field
optionalStringField = optionalField . stringField
-- | An alias for an optional non-empty string field.
optionalNEStringField :: String -> Field
optionalNEStringField = optionalField . flip simpleField [t| NonEmptyString |]
-- | Function to force a non-negative value, without returning via a
-- monad. This is needed for, and should be used /only/ in the case of
-- forcing constants. In case the constant is wrong (< 0), this will
-- become a runtime error.
forceNonNeg :: (Num a, Ord a, Show a) => a -> NonNegative a
forceNonNeg i = case mkNonNegative i of
Ok n -> n
Bad msg -> error msg
-- ** Disks
-- | Disk index type (embedding constraints on the index value via a
-- smart constructor).
newtype DiskIndex = DiskIndex { unDiskIndex :: Int }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'DiskIndex'.
mkDiskIndex :: (Monad m) => Int -> m DiskIndex
mkDiskIndex i | i >= 0 && i < C.maxDisks = return (DiskIndex i)
| otherwise = fail $ "Invalid value for disk index '" ++
show i ++ "', required between 0 and " ++
show C.maxDisks
instance JSON DiskIndex where
readJSON v = readJSON v >>= mkDiskIndex
showJSON = showJSON . unDiskIndex
-- ** I* param types
-- | Type holding disk access modes.
$(declareSADT "DiskAccess"
[ ("DiskReadOnly", 'C.diskRdonly)
, ("DiskReadWrite", 'C.diskRdwr)
])
$(makeJSONInstance ''DiskAccess)
-- | NIC modification definition.
$(buildObject "INicParams" "inic"
[ optionalField $ simpleField C.inicMac [t| NonEmptyString |]
, optionalField $ simpleField C.inicIp [t| String |]
, optionalField $ simpleField C.inicMode [t| NonEmptyString |]
, optionalField $ simpleField C.inicLink [t| NonEmptyString |]
, optionalField $ simpleField C.inicName [t| NonEmptyString |]
, optionalField $ simpleField C.inicVlan [t| String |]
, optionalField $ simpleField C.inicBridge [t| NonEmptyString |]
, optionalField $ simpleField C.inicNetwork [t| NonEmptyString |]
])
deriving instance Ord INicParams
-- | Disk modification definition.
$(buildObject "IDiskParams" "idisk"
[ specialNumericalField 'parseUnitAssumeBinary . optionalField
$ simpleField C.idiskSize [t| Int |]
, optionalField $ simpleField C.idiskMode [t| DiskAccess |]
, optionalField $ simpleField C.idiskAdopt [t| NonEmptyString |]
, optionalField $ simpleField C.idiskVg [t| NonEmptyString |]
, optionalField $ simpleField C.idiskMetavg [t| NonEmptyString |]
, optionalField $ simpleField C.idiskName [t| NonEmptyString |]
, optionalField $ simpleField C.idiskProvider [t| NonEmptyString |]
, optionalField $ simpleField C.idiskSpindles [t| Int |]
, optionalField $ simpleField C.idiskAccess [t| NonEmptyString |]
, andRestArguments "opaque"
])
deriving instance Ord IDiskParams
-- | Disk changes type for OpInstanceRecreateDisks. This is a bit
-- strange, because the type in Python is something like Either
-- [DiskIndex] [DiskChanges], but we can't represent the type of an
-- empty list in JSON, so we have to add a custom case for the empty
-- list.
data RecreateDisksInfo
= RecreateDisksAll
| RecreateDisksIndices (NonEmpty DiskIndex)
| RecreateDisksParams (NonEmpty (DiskIndex, IDiskParams))
deriving (Eq, Show, Ord)
readRecreateDisks :: JSValue -> Text.JSON.Result RecreateDisksInfo
readRecreateDisks (JSArray []) = return RecreateDisksAll
readRecreateDisks v =
case readJSON v::Text.JSON.Result [DiskIndex] of
Text.JSON.Ok indices -> liftM RecreateDisksIndices (mkNonEmpty indices)
_ -> case readJSON v::Text.JSON.Result [(DiskIndex, IDiskParams)] of
Text.JSON.Ok params -> liftM RecreateDisksParams (mkNonEmpty params)
_ -> fail $ "Can't parse disk information as either list of disk"
++ " indices or list of disk parameters; value received:"
++ show (pp_value v)
instance JSON RecreateDisksInfo where
readJSON = readRecreateDisks
showJSON RecreateDisksAll = showJSON ()
showJSON (RecreateDisksIndices idx) = showJSON idx
showJSON (RecreateDisksParams params) = showJSON params
-- | Simple type for old-style ddm changes.
data DdmOldChanges = DdmOldIndex (NonNegative Int)
| DdmOldMod DdmSimple
deriving (Eq, Show, Ord)
readDdmOldChanges :: JSValue -> Text.JSON.Result DdmOldChanges
readDdmOldChanges v =
case readJSON v::Text.JSON.Result (NonNegative Int) of
Text.JSON.Ok nn -> return $ DdmOldIndex nn
_ -> case readJSON v::Text.JSON.Result DdmSimple of
Text.JSON.Ok ddms -> return $ DdmOldMod ddms
_ -> fail $ "Can't parse value '" ++ show (pp_value v) ++ "' as"
++ " either index or modification"
instance JSON DdmOldChanges where
showJSON (DdmOldIndex i) = showJSON i
showJSON (DdmOldMod m) = showJSON m
readJSON = readDdmOldChanges
-- | Instance disk or nic modifications.
data SetParamsMods a
= SetParamsEmpty
| SetParamsDeprecated (NonEmpty (DdmOldChanges, a))
| SetParamsNew (NonEmpty (DdmFull, Int, a))
| SetParamsNewName (NonEmpty (DdmFull, String, a))
deriving (Eq, Show, Ord)
-- | Custom deserialiser for 'SetParamsMods'.
readSetParams :: (JSON a) => JSValue -> Text.JSON.Result (SetParamsMods a)
readSetParams (JSArray []) = return SetParamsEmpty
readSetParams v =
liftM SetParamsDeprecated (readJSON v)
`mplus` liftM SetParamsNew (readJSON v)
`mplus` liftM SetParamsNewName (readJSON v)
instance (JSON a) => JSON (SetParamsMods a) where
showJSON SetParamsEmpty = showJSON ()
showJSON (SetParamsDeprecated v) = showJSON v
showJSON (SetParamsNew v) = showJSON v
showJSON (SetParamsNewName v) = showJSON v
readJSON = readSetParams
-- | Custom type for target_node parameter of OpBackupExport, which
-- varies depending on mode. FIXME: this uses an [JSValue] since
-- we don't care about individual rows (just like the Python code
-- tests). But the proper type could be parsed if we wanted.
data ExportTarget = ExportTargetLocal NonEmptyString
| ExportTargetRemote [JSValue]
deriving (Eq, Show, Ord)
-- | Custom reader for 'ExportTarget'.
readExportTarget :: JSValue -> Text.JSON.Result ExportTarget
readExportTarget (JSString s) = liftM ExportTargetLocal $
mkNonEmpty (fromJSString s)
readExportTarget (JSArray arr) = return $ ExportTargetRemote arr
readExportTarget v = fail $ "Invalid value received for 'target_node': " ++
show (pp_value v)
instance JSON ExportTarget where
showJSON (ExportTargetLocal s) = showJSON s
showJSON (ExportTargetRemote l) = showJSON l
readJSON = readExportTarget
-- * Common opcode parameters
pDryRun :: Field
pDryRun =
withDoc "Run checks only, don't execute" .
optionalField $ booleanField "dry_run"
pDebugLevel :: Field
pDebugLevel =
withDoc "Debug level" .
optionalField $ simpleField "debug_level" [t| NonNegative Int |]
pOpPriority :: Field
pOpPriority =
withDoc "Opcode priority. Note: python uses a separate constant,\
\ we're using the actual value we know it's the default" .
defaultField [| OpPrioNormal |] $
simpleField "priority" [t| OpSubmitPriority |]
pDependencies :: Field
pDependencies =
withDoc "Job dependencies" .
optionalNullSerField $ simpleField "depends" [t| [JobDependency] |]
pComment :: Field
pComment =
withDoc "Comment field" .
optionalNullSerField $ stringField "comment"
pReason :: Field
pReason =
withDoc "Reason trail field" $
simpleField C.opcodeReason [t| ReasonTrail |]
pSequential :: Field
pSequential =
withDoc "Sequential job execution" $
defaultFalse C.opcodeSequential
-- * Parameters
pDebugSimulateErrors :: Field
pDebugSimulateErrors =
withDoc "Whether to simulate errors (useful for debugging)" $
defaultFalse "debug_simulate_errors"
pErrorCodes :: Field
pErrorCodes =
withDoc "Error codes" $
defaultFalse "error_codes"
pSkipChecks :: Field
pSkipChecks =
withDoc "Which checks to skip" .
defaultField [| emptyListSet |] $
simpleField "skip_checks" [t| ListSet VerifyOptionalChecks |]
pIgnoreErrors :: Field
pIgnoreErrors =
withDoc "List of error codes that should be treated as warnings" .
defaultField [| emptyListSet |] $
simpleField "ignore_errors" [t| ListSet CVErrorCode |]
pVerbose :: Field
pVerbose =
withDoc "Verbose mode" $
defaultFalse "verbose"
pOptGroupName :: Field
pOptGroupName =
withDoc "Optional group name" .
renameField "OptGroupName" .
optionalField $ simpleField "group_name" [t| NonEmptyString |]
pGroupName :: Field
pGroupName =
withDoc "Group name" $
simpleField "group_name" [t| NonEmptyString |]
-- | Whether to hotplug device.
pHotplug :: Field
pHotplug = defaultFalse "hotplug"
pHotplugIfPossible :: Field
pHotplugIfPossible = defaultFalse "hotplug_if_possible"
pInstances :: Field
pInstances =
withDoc "List of instances" .
defaultField [| [] |] $
simpleField "instances" [t| [NonEmptyString] |]
pOutputFields :: Field
pOutputFields =
withDoc "Selected output fields" $
simpleField "output_fields" [t| [NonEmptyString] |]
pName :: Field
pName =
withDoc "A generic name" $
simpleField "name" [t| NonEmptyString |]
pForce :: Field
pForce =
withDoc "Whether to force the operation" $
defaultFalse "force"
pHvState :: Field
pHvState =
withDoc "Set hypervisor states" .
optionalField $ simpleField "hv_state" [t| JSObject JSValue |]
pDiskState :: Field
pDiskState =
withDoc "Set disk states" .
optionalField $ simpleField "disk_state" [t| JSObject JSValue |]
-- | Cluster-wide default directory for storing file-backed disks.
pClusterFileStorageDir :: Field
pClusterFileStorageDir =
renameField "ClusterFileStorageDir" $
optionalStringField "file_storage_dir"
-- | Cluster-wide default directory for storing shared-file-backed disks.
pClusterSharedFileStorageDir :: Field
pClusterSharedFileStorageDir =
renameField "ClusterSharedFileStorageDir" $
optionalStringField "shared_file_storage_dir"
-- | Cluster-wide default directory for storing Gluster-backed disks.
pClusterGlusterStorageDir :: Field
pClusterGlusterStorageDir =
renameField "ClusterGlusterStorageDir" $
optionalStringField "gluster_storage_dir"
pInstallImage :: Field
pInstallImage =
withDoc "OS image for running OS scripts in a safe environment" $
optionalStringField "install_image"
pInstanceCommunicationNetwork :: Field
pInstanceCommunicationNetwork =
optionalStringField "instance_communication_network"
-- | The OS to use when zeroing instance disks.
pZeroingImage :: Field
pZeroingImage =
optionalStringField "zeroing_image"
-- | The additional tools that can be used to compress data in transit
pCompressionTools :: Field
pCompressionTools =
withDoc "List of enabled compression tools" . optionalField $
simpleField "compression_tools" [t| [NonEmptyString] |]
-- | Volume group name.
pVgName :: Field
pVgName =
withDoc "Volume group name" $
optionalStringField "vg_name"
pEnabledHypervisors :: Field
pEnabledHypervisors =
withDoc "List of enabled hypervisors" .
optionalField $
simpleField "enabled_hypervisors" [t| [Hypervisor] |]
pClusterHvParams :: Field
pClusterHvParams =
withDoc "Cluster-wide hypervisor parameters, hypervisor-dependent" .
renameField "ClusterHvParams" .
optionalField $
simpleField "hvparams" [t| GenericContainer String (JSObject JSValue) |]
pClusterBeParams :: Field
pClusterBeParams =
withDoc "Cluster-wide backend parameter defaults" .
renameField "ClusterBeParams" .
optionalField $ simpleField "beparams" [t| JSObject JSValue |]
pOsHvp :: Field
pOsHvp =
withDoc "Cluster-wide per-OS hypervisor parameter defaults" .
optionalField $
simpleField "os_hvp" [t| GenericContainer String (JSObject JSValue) |]
pClusterOsParams :: Field
pClusterOsParams =
withDoc "Cluster-wide OS parameter defaults" .
renameField "ClusterOsParams" .
optionalField $
simpleField "osparams" [t| GenericContainer String (JSObject JSValue) |]
pClusterOsParamsPrivate :: Field
pClusterOsParamsPrivate =
withDoc "Cluster-wide private OS parameter defaults" .
renameField "ClusterOsParamsPrivate" .
optionalField $
-- This field needs an unique name to aid Python deserialization
simpleField "osparams_private_cluster"
[t| GenericContainer String (JSObject (Private JSValue)) |]
pGroupDiskParams :: Field
pGroupDiskParams =
withDoc "Disk templates' parameter defaults" .
optionalField $
simpleField "diskparams"
[t| GenericContainer DiskTemplate (JSObject JSValue) |]
pCandidatePoolSize :: Field
pCandidatePoolSize =
withDoc "Master candidate pool size" .
optionalField $ simpleField "candidate_pool_size" [t| Positive Int |]
pMaxRunningJobs :: Field
pMaxRunningJobs =
withDoc "Maximal number of jobs to run simultaneously" .
optionalField $ simpleField "max_running_jobs" [t| Positive Int |]
pMaxTrackedJobs :: Field
pMaxTrackedJobs =
withDoc "Maximal number of jobs tracked in the job queue" .
optionalField $ simpleField "max_tracked_jobs" [t| Positive Int |]
pUidPool :: Field
pUidPool =
withDoc "Set UID pool, must be list of lists describing UID ranges\
\ (two items, start and end inclusive)" .
optionalField $ simpleField "uid_pool" [t| [(Int, Int)] |]
pAddUids :: Field
pAddUids =
withDoc "Extend UID pool, must be list of lists describing UID\
\ ranges (two items, start and end inclusive)" .
optionalField $ simpleField "add_uids" [t| [(Int, Int)] |]
pRemoveUids :: Field
pRemoveUids =
withDoc "Shrink UID pool, must be list of lists describing UID\
\ ranges (two items, start and end inclusive) to be removed" .
optionalField $ simpleField "remove_uids" [t| [(Int, Int)] |]
pMaintainNodeHealth :: Field
pMaintainNodeHealth =
withDoc "Whether to automatically maintain node health" .
optionalField $ booleanField "maintain_node_health"
-- | Whether to modify and keep in sync the @/etc/hosts@ files of nodes.
pModifyEtcHosts :: Field
pModifyEtcHosts = optionalField $ booleanField "modify_etc_hosts"
-- | Whether to wipe disks before allocating them to instances.
pPreallocWipeDisks :: Field
pPreallocWipeDisks =
withDoc "Whether to wipe disks before allocating them to instances" .
optionalField $ booleanField "prealloc_wipe_disks"
pNicParams :: Field
pNicParams =
withDoc "Cluster-wide NIC parameter defaults" .
optionalField $ simpleField "nicparams" [t| INicParams |]
pIpolicy :: Field
pIpolicy =
withDoc "Ipolicy specs" .
optionalField $ simpleField "ipolicy" [t| JSObject JSValue |]
pDrbdHelper :: Field
pDrbdHelper =
withDoc "DRBD helper program" $
optionalStringField "drbd_helper"
pDefaultIAllocator :: Field
pDefaultIAllocator =
withDoc "Default iallocator for cluster" $
optionalStringField "default_iallocator"
pDefaultIAllocatorParams :: Field
pDefaultIAllocatorParams =
withDoc "Default iallocator parameters for cluster" . optionalField
$ simpleField "default_iallocator_params" [t| JSObject JSValue |]
pMasterNetdev :: Field
pMasterNetdev =
withDoc "Master network device" $
optionalStringField "master_netdev"
pMasterNetmask :: Field
pMasterNetmask =
withDoc "Netmask of the master IP" .
optionalField $ simpleField "master_netmask" [t| NonNegative Int |]
pReservedLvs :: Field
pReservedLvs =
withDoc "List of reserved LVs" .
optionalField $ simpleField "reserved_lvs" [t| [NonEmptyString] |]
pHiddenOs :: Field
pHiddenOs =
withDoc "Modify list of hidden operating systems: each modification\
\ must have two items, the operation and the OS name; the operation\
\ can be add or remove" .
optionalField $ simpleField "hidden_os" [t| [(DdmSimple, NonEmptyString)] |]
pBlacklistedOs :: Field
pBlacklistedOs =
withDoc "Modify list of blacklisted operating systems: each\
\ modification must have two items, the operation and the OS name;\
\ the operation can be add or remove" .
optionalField $
simpleField "blacklisted_os" [t| [(DdmSimple, NonEmptyString)] |]
pUseExternalMipScript :: Field
pUseExternalMipScript =
withDoc "Whether to use an external master IP address setup script" .
optionalField $ booleanField "use_external_mip_script"
pEnabledDiskTemplates :: Field
pEnabledDiskTemplates =
withDoc "List of enabled disk templates" .
optionalField $
simpleField "enabled_disk_templates" [t| [DiskTemplate] |]
pEnabledUserShutdown :: Field
pEnabledUserShutdown =
withDoc "Whether user shutdown is enabled cluster wide" .
optionalField $
simpleField "enabled_user_shutdown" [t| Bool |]
pQueryWhat :: Field
pQueryWhat =
withDoc "Resource(s) to query for" $
simpleField "what" [t| Qlang.QueryTypeOp |]
pUseLocking :: Field
pUseLocking =
withDoc "Whether to use synchronization" $
defaultFalse "use_locking"
pQueryFields :: Field
pQueryFields =
withDoc "Requested fields" $
simpleField "fields" [t| [NonEmptyString] |]
pQueryFilter :: Field
pQueryFilter =
withDoc "Query filter" .
optionalField $ simpleField "qfilter" [t| [JSValue] |]
pQueryFieldsFields :: Field
pQueryFieldsFields =
withDoc "Requested fields; if not given, all are returned" .
renameField "QueryFieldsFields" $
optionalField pQueryFields
pNodeNames :: Field
pNodeNames =
withDoc "List of node names to run the OOB command against" .
defaultField [| [] |] $ simpleField "node_names" [t| [NonEmptyString] |]
pNodeUuids :: Field
pNodeUuids =
withDoc "List of node UUIDs" .
optionalField $ simpleField "node_uuids" [t| [NonEmptyString] |]
pOobCommand :: Field
pOobCommand =
withDoc "OOB command to run" .
renameField "OobCommand" $ simpleField "command" [t| OobCommand |]
pOobTimeout :: Field
pOobTimeout =
withDoc "Timeout before the OOB helper will be terminated" .
defaultField [| C.oobTimeout |] .
renameField "OobTimeout" $ simpleField "timeout" [t| Int |]
pIgnoreStatus :: Field
pIgnoreStatus =
withDoc "Ignores the node offline status for power off" $
defaultFalse "ignore_status"
pPowerDelay :: Field
pPowerDelay =
-- FIXME: we can't use the proper type "NonNegative Double", since
-- the default constant is a plain Double, not a non-negative one.
-- And trying to fix the constant introduces a cyclic import.
withDoc "Time in seconds to wait between powering on nodes" .
defaultField [| C.oobPowerDelay |] $
simpleField "power_delay" [t| Double |]
pRequiredNodes :: Field
pRequiredNodes =
withDoc "Required list of node names" .
renameField "ReqNodes " $ simpleField "nodes" [t| [NonEmptyString] |]
pRequiredNodeUuids :: Field
pRequiredNodeUuids =
withDoc "Required list of node UUIDs" .
renameField "ReqNodeUuids " . optionalField $
simpleField "node_uuids" [t| [NonEmptyString] |]
pRestrictedCommand :: Field
pRestrictedCommand =
withDoc "Restricted command name" .
renameField "RestrictedCommand" $
simpleField "command" [t| NonEmptyString |]
pNodeName :: Field
pNodeName =
withDoc "A required node name (for single-node LUs)" $
simpleField "node_name" [t| NonEmptyString |]
pNodeUuid :: Field
pNodeUuid =
withDoc "A node UUID (for single-node LUs)" .
optionalField $ simpleField "node_uuid" [t| NonEmptyString |]
pPrimaryIp :: Field
pPrimaryIp =
withDoc "Primary IP address" .
optionalField $
simpleField "primary_ip" [t| NonEmptyString |]
pSecondaryIp :: Field
pSecondaryIp =
withDoc "Secondary IP address" $
optionalNEStringField "secondary_ip"
pReadd :: Field
pReadd =
withDoc "Whether node is re-added to cluster" $
defaultFalse "readd"
pNodeGroup :: Field
pNodeGroup =
withDoc "Initial node group" $
optionalNEStringField "group"
pMasterCapable :: Field
pMasterCapable =
withDoc "Whether node can become master or master candidate" .
optionalField $ booleanField "master_capable"
pVmCapable :: Field
pVmCapable =
withDoc "Whether node can host instances" .
optionalField $ booleanField "vm_capable"
pNdParams :: Field
pNdParams =
withDoc "Node parameters" .
renameField "genericNdParams" .
optionalField $ simpleField "ndparams" [t| JSObject JSValue |]
pNames :: Field
pNames =
withDoc "List of names" .
defaultField [| [] |] $ simpleField "names" [t| [NonEmptyString] |]
pNodes :: Field
pNodes =
withDoc "List of nodes" .
defaultField [| [] |] $ simpleField "nodes" [t| [NonEmptyString] |]
pStorageType :: Field
pStorageType =
withDoc "Storage type" $ simpleField "storage_type" [t| StorageType |]
pOptStorageType :: Field
pOptStorageType =
withDoc "Storage type" .
renameField "OptStorageType" .
optionalField $ simpleField "storage_type" [t| StorageType |]
pStorageName :: Field
pStorageName =
withDoc "Storage name" .
renameField "StorageName" .
optionalField $ simpleField "name" [t| NonEmptyString |]
pStorageChanges :: Field
pStorageChanges =
withDoc "Requested storage changes" $
simpleField "changes" [t| JSObject JSValue |]
pIgnoreConsistency :: Field
pIgnoreConsistency =
withDoc "Whether to ignore disk consistency" $
defaultFalse "ignore_consistency"
pMasterCandidate :: Field
pMasterCandidate =
withDoc "Whether the node should become a master candidate" .
optionalField $ booleanField "master_candidate"
pOffline :: Field
pOffline =
withDoc "Whether to mark the node or instance offline" .
optionalField $ booleanField "offline"
pDrained ::Field
pDrained =
withDoc "Whether to mark the node as drained" .
optionalField $ booleanField "drained"
pAutoPromote :: Field
pAutoPromote =
withDoc "Whether node(s) should be promoted to master candidate if\
\ necessary" $
defaultFalse "auto_promote"
pPowered :: Field
pPowered =
withDoc "Whether the node should be marked as powered" .
optionalField $ booleanField "powered"
pMigrationMode :: Field
pMigrationMode =
withDoc "Migration type (live/non-live)" .
renameField "MigrationMode" .
optionalField $
simpleField "mode" [t| MigrationMode |]
pMigrationLive :: Field
pMigrationLive =
withDoc "Obsolete \'live\' migration mode (do not use)" .
renameField "OldLiveMode" . optionalField $ booleanField "live"
pMigrationTargetNode :: Field
pMigrationTargetNode =
withDoc "Target node for instance migration/failover" $
optionalNEStringField "target_node"
pMigrationTargetNodeUuid :: Field
pMigrationTargetNodeUuid =
withDoc "Target node UUID for instance migration/failover" $
optionalNEStringField "target_node_uuid"
pAllowRuntimeChgs :: Field
pAllowRuntimeChgs =
withDoc "Whether to allow runtime changes while migrating" $
defaultTrue "allow_runtime_changes"
pIgnoreIpolicy :: Field
pIgnoreIpolicy =
withDoc "Whether to ignore ipolicy violations" $
defaultFalse "ignore_ipolicy"
pIallocator :: Field
pIallocator =
withDoc "Iallocator for deciding the target node for shared-storage\
\ instances" $
optionalNEStringField "iallocator"
pEarlyRelease :: Field
pEarlyRelease =
withDoc "Whether to release locks as soon as possible" $
defaultFalse "early_release"
pRemoteNode :: Field
pRemoteNode =
withDoc "New secondary node" $
optionalNEStringField "remote_node"
pRemoteNodeUuid :: Field
pRemoteNodeUuid =
withDoc "New secondary node UUID" $
optionalNEStringField "remote_node_uuid"
pEvacMode :: Field
pEvacMode =
withDoc "Node evacuation mode" .
renameField "EvacMode" $ simpleField "mode" [t| EvacMode |]
pInstanceName :: Field
pInstanceName =
withDoc "A required instance name (for single-instance LUs)" $
simpleField "instance_name" [t| String |]
pInstanceCommunication :: Field
pInstanceCommunication =
withDoc C.instanceCommunicationDoc $
defaultFalse "instance_communication"
pOptInstanceCommunication :: Field
pOptInstanceCommunication =
withDoc C.instanceCommunicationDoc .
renameField "OptInstanceCommunication" .
optionalField $
booleanField "instance_communication"
pForceVariant :: Field
pForceVariant =
withDoc "Whether to force an unknown OS variant" $
defaultFalse "force_variant"
pWaitForSync :: Field
pWaitForSync =
withDoc "Whether to wait for the disk to synchronize" $
defaultTrue "wait_for_sync"
pNameCheck :: Field
pNameCheck =
withDoc "Whether to check name" $
defaultTrue "name_check"
pInstBeParams :: Field
pInstBeParams =
withDoc "Backend parameters for instance" .
renameField "InstBeParams" .
defaultField [| toJSObject [] |] $
simpleField "beparams" [t| JSObject JSValue |]
pInstDisks :: Field
pInstDisks =
withDoc "List of instance disks" .
renameField "instDisks" $ simpleField "disks" [t| [IDiskParams] |]
pDiskTemplate :: Field
pDiskTemplate =
withDoc "Disk template" $
simpleField "disk_template" [t| DiskTemplate |]
pExtParams :: Field
pExtParams =
withDoc "List of ExtStorage parameters" .
renameField "InstExtParams" .
defaultField [| toJSObject [] |] $
simpleField "ext_params" [t| JSObject JSValue |]
pFileDriver :: Field
pFileDriver =
withDoc "Driver for file-backed disks" .
optionalField $ simpleField "file_driver" [t| FileDriver |]
pFileStorageDir :: Field
pFileStorageDir =
withDoc "Directory for storing file-backed disks" $
optionalNEStringField "file_storage_dir"
pInstHvParams :: Field
pInstHvParams =
withDoc "Hypervisor parameters for instance, hypervisor-dependent" .
renameField "InstHvParams" .
defaultField [| toJSObject [] |] $
simpleField "hvparams" [t| JSObject JSValue |]
pHypervisor :: Field
pHypervisor =
withDoc "Selected hypervisor for an instance" .
optionalField $
simpleField "hypervisor" [t| Hypervisor |]
pResetDefaults :: Field
pResetDefaults =
withDoc "Reset instance parameters to default if equal" $
defaultFalse "identify_defaults"
pIpCheck :: Field
pIpCheck =
withDoc "Whether to ensure instance's IP address is inactive" $
defaultTrue "ip_check"
pIpConflictsCheck :: Field
pIpConflictsCheck =
withDoc "Whether to check for conflicting IP addresses" $
defaultTrue "conflicts_check"
pInstCreateMode :: Field
pInstCreateMode =
withDoc "Instance creation mode" .
renameField "InstCreateMode" $ simpleField "mode" [t| InstCreateMode |]
pInstNics :: Field
pInstNics =
withDoc "List of NIC (network interface) definitions" $
simpleField "nics" [t| [INicParams] |]
pNoInstall :: Field
pNoInstall =
withDoc "Do not install the OS (will disable automatic start)" .
optionalField $ booleanField "no_install"
pInstOs :: Field
pInstOs =
withDoc "OS type for instance installation" $
optionalNEStringField "os_type"
pInstOsParams :: Field
pInstOsParams =
withDoc "OS parameters for instance" .
renameField "InstOsParams" .
defaultField [| toJSObject [] |] $
simpleField "osparams" [t| JSObject JSValue |]
pInstOsParamsPrivate :: Field
pInstOsParamsPrivate =
withDoc "Private OS parameters for instance" .
optionalField $
simpleField "osparams_private" [t| JSObject (Private JSValue) |]
pInstOsParamsSecret :: Field
pInstOsParamsSecret =
withDoc "Secret OS parameters for instance" .
optionalField $
simpleField "osparams_secret" [t| JSObject (Private JSValue) |]
pPrimaryNode :: Field
pPrimaryNode =
withDoc "Primary node for an instance" $
optionalNEStringField "pnode"
pPrimaryNodeUuid :: Field
pPrimaryNodeUuid =
withDoc "Primary node UUID for an instance" $
optionalNEStringField "pnode_uuid"
pSecondaryNode :: Field
pSecondaryNode =
withDoc "Secondary node for an instance" $
optionalNEStringField "snode"
pSecondaryNodeUuid :: Field
pSecondaryNodeUuid =
withDoc "Secondary node UUID for an instance" $
optionalNEStringField "snode_uuid"
pSourceHandshake :: Field
pSourceHandshake =
withDoc "Signed handshake from source (remote import only)" .
optionalField $ simpleField "source_handshake" [t| [JSValue] |]
pSourceInstance :: Field
pSourceInstance =
withDoc "Source instance name (remote import only)" $
optionalNEStringField "source_instance_name"
-- FIXME: non-negative int, whereas the constant is a plain int.
pSourceShutdownTimeout :: Field
pSourceShutdownTimeout =
withDoc "How long source instance was given to shut down (remote import\
\ only)" .
defaultField [| forceNonNeg C.defaultShutdownTimeout |] $
simpleField "source_shutdown_timeout" [t| NonNegative Int |]
pSourceX509Ca :: Field
pSourceX509Ca =
withDoc "Source X509 CA in PEM format (remote import only)" $
optionalNEStringField "source_x509_ca"
pSrcNode :: Field
pSrcNode =
withDoc "Source node for import" $
optionalNEStringField "src_node"
pSrcNodeUuid :: Field
pSrcNodeUuid =
withDoc "Source node UUID for import" $
optionalNEStringField "src_node_uuid"
pSrcPath :: Field
pSrcPath =
withDoc "Source directory for import" $
optionalNEStringField "src_path"
pStartInstance :: Field
pStartInstance =
withDoc "Whether to start instance after creation" $
defaultTrue "start"
-- FIXME: unify/simplify with pTags, once that migrates to NonEmpty String"
pInstTags :: Field
pInstTags =
withDoc "Instance tags" .
renameField "InstTags" .
defaultField [| [] |] $
simpleField "tags" [t| [NonEmptyString] |]
pMultiAllocInstances :: Field
pMultiAllocInstances =
withDoc "List of instance create opcodes describing the instances to\
\ allocate" .
renameField "InstMultiAlloc" .
defaultField [| [] |] $
simpleField "instances"[t| [JSValue] |]
pOpportunisticLocking :: Field
pOpportunisticLocking =
withDoc "Whether to employ opportunistic locking for nodes, meaning\
\ nodes already locked by another opcode won't be considered for\
\ instance allocation (only when an iallocator is used)" $
defaultFalse "opportunistic_locking"
pInstanceUuid :: Field
pInstanceUuid =
withDoc "An instance UUID (for single-instance LUs)" .
optionalField $ simpleField "instance_uuid" [t| NonEmptyString |]
pTempOsParams :: Field
pTempOsParams =
withDoc "Temporary OS parameters (currently only in reinstall, might be\
\ added to install as well)" .
renameField "TempOsParams" .
optionalField $ simpleField "osparams" [t| JSObject JSValue |]
pTempOsParamsPrivate :: Field
pTempOsParamsPrivate =
withDoc "Private OS parameters for instance reinstalls" .
optionalField $
simpleField "osparams_private" [t| JSObject (Private JSValue) |]
pTempOsParamsSecret :: Field
pTempOsParamsSecret =
withDoc "Secret OS parameters for instance reinstalls" .
optionalField $
simpleField "osparams_secret" [t| JSObject (Private JSValue) |]
pShutdownTimeout :: Field
pShutdownTimeout =
withDoc "How long to wait for instance to shut down" .
defaultField [| forceNonNeg C.defaultShutdownTimeout |] $
simpleField "shutdown_timeout" [t| NonNegative Int |]
-- | Another name for the shutdown timeout, because we like to be
-- inconsistent.
pShutdownTimeout' :: Field
pShutdownTimeout' =
withDoc "How long to wait for instance to shut down" .
renameField "InstShutdownTimeout" .
defaultField [| forceNonNeg C.defaultShutdownTimeout |] $
simpleField "timeout" [t| NonNegative Int |]
pIgnoreFailures :: Field
pIgnoreFailures =
withDoc "Whether to ignore failures during removal" $
defaultFalse "ignore_failures"
pNewName :: Field
pNewName =
withDoc "New group or instance name" $
simpleField "new_name" [t| NonEmptyString |]
pIgnoreOfflineNodes :: Field
pIgnoreOfflineNodes =
withDoc "Whether to ignore offline nodes" $
defaultFalse "ignore_offline_nodes"
pTempHvParams :: Field
pTempHvParams =
withDoc "Temporary hypervisor parameters, hypervisor-dependent" .
renameField "TempHvParams" .
defaultField [| toJSObject [] |] $
simpleField "hvparams" [t| JSObject JSValue |]
pTempBeParams :: Field
pTempBeParams =
withDoc "Temporary backend parameters" .
renameField "TempBeParams" .
defaultField [| toJSObject [] |] $
simpleField "beparams" [t| JSObject JSValue |]
pNoRemember :: Field
pNoRemember =
withDoc "Do not remember instance state changes" $
defaultFalse "no_remember"
pStartupPaused :: Field
pStartupPaused =
withDoc "Pause instance at startup" $
defaultFalse "startup_paused"
pIgnoreSecondaries :: Field
pIgnoreSecondaries =
withDoc "Whether to start the instance even if secondary disks are failing" $
defaultFalse "ignore_secondaries"
pRebootType :: Field
pRebootType =
withDoc "How to reboot the instance" $
simpleField "reboot_type" [t| RebootType |]
pReplaceDisksMode :: Field
pReplaceDisksMode =
withDoc "Replacement mode" .
renameField "ReplaceDisksMode" $ simpleField "mode" [t| ReplaceDisksMode |]
pReplaceDisksList :: Field
pReplaceDisksList =
withDoc "List of disk indices" .
renameField "ReplaceDisksList" .
defaultField [| [] |] $
simpleField "disks" [t| [DiskIndex] |]
pMigrationCleanup :: Field
pMigrationCleanup =
withDoc "Whether a previously failed migration should be cleaned up" .
renameField "MigrationCleanup" $ defaultFalse "cleanup"
pAllowFailover :: Field
pAllowFailover =
withDoc "Whether we can fallback to failover if migration is not possible" $
defaultFalse "allow_failover"
pForceFailover :: Field
pForceFailover =
withDoc "Disallow migration moves and always use failovers" $
defaultFalse "force_failover"
pMoveTargetNode :: Field
pMoveTargetNode =
withDoc "Target node for instance move" .
renameField "MoveTargetNode" $
simpleField "target_node" [t| NonEmptyString |]
pMoveTargetNodeUuid :: Field
pMoveTargetNodeUuid =
withDoc "Target node UUID for instance move" .
renameField "MoveTargetNodeUuid" . optionalField $
simpleField "target_node_uuid" [t| NonEmptyString |]
pMoveCompress :: Field
pMoveCompress =
withDoc "Compression mode to use during instance moves" .
defaultField [| C.iecNone |] $
simpleField "compress" [t| String |]
pBackupCompress :: Field
pBackupCompress =
withDoc "Compression mode to use for moves during backups/imports" .
defaultField [| C.iecNone |] $
simpleField "compress" [t| String |]
pIgnoreDiskSize :: Field
pIgnoreDiskSize =
withDoc "Whether to ignore recorded disk size" $
defaultFalse "ignore_size"
pWaitForSyncFalse :: Field
pWaitForSyncFalse =
withDoc "Whether to wait for the disk to synchronize (defaults to false)" $
defaultField [| False |] pWaitForSync
pRecreateDisksInfo :: Field
pRecreateDisksInfo =
withDoc "Disk list for recreate disks" .
renameField "RecreateDisksInfo" .
defaultField [| RecreateDisksAll |] $
simpleField "disks" [t| RecreateDisksInfo |]
pStatic :: Field
pStatic =
withDoc "Whether to only return configuration data without querying nodes" $
defaultFalse "static"
pInstParamsNicChanges :: Field
pInstParamsNicChanges =
withDoc "List of NIC changes" .
renameField "InstNicChanges" .
defaultField [| SetParamsEmpty |] $
simpleField "nics" [t| SetParamsMods INicParams |]
pInstParamsDiskChanges :: Field
pInstParamsDiskChanges =
withDoc "List of disk changes" .
renameField "InstDiskChanges" .
defaultField [| SetParamsEmpty |] $
simpleField "disks" [t| SetParamsMods IDiskParams |]
pRuntimeMem :: Field
pRuntimeMem =
withDoc "New runtime memory" .
optionalField $ simpleField "runtime_mem" [t| Positive Int |]
pOptDiskTemplate :: Field
pOptDiskTemplate =
withDoc "Instance disk template" .
optionalField .
renameField "OptDiskTemplate" $
simpleField "disk_template" [t| DiskTemplate |]
pOsNameChange :: Field
pOsNameChange =
withDoc "Change the instance's OS without reinstalling the instance" $
optionalNEStringField "os_name"
pDiskIndex :: Field
pDiskIndex =
withDoc "Disk index for e.g. grow disk" .
renameField "DiskIndex " $ simpleField "disk" [t| DiskIndex |]
pDiskChgAmount :: Field
pDiskChgAmount =
withDoc "Disk amount to add or grow to" .
renameField "DiskChgAmount" $ simpleField "amount" [t| NonNegative Int |]
pDiskChgAbsolute :: Field
pDiskChgAbsolute =
withDoc
"Whether the amount parameter is an absolute target or a relative one" .
renameField "DiskChkAbsolute" $ defaultFalse "absolute"
pTargetGroups :: Field
pTargetGroups =
withDoc
"Destination group names or UUIDs (defaults to \"all but current group\")" .
optionalField $ simpleField "target_groups" [t| [NonEmptyString] |]
pNodeGroupAllocPolicy :: Field
pNodeGroupAllocPolicy =
withDoc "Instance allocation policy" .
optionalField $
simpleField "alloc_policy" [t| AllocPolicy |]
pGroupNodeParams :: Field
pGroupNodeParams =
withDoc "Default node parameters for group" .
optionalField $ simpleField "ndparams" [t| JSObject JSValue |]
pExportMode :: Field
pExportMode =
withDoc "Export mode" .
renameField "ExportMode" $ simpleField "mode" [t| ExportMode |]
-- FIXME: Rename target_node as it changes meaning for different
-- export modes (e.g. "destination")
pExportTargetNode :: Field
pExportTargetNode =
withDoc "Target node (depends on export mode)" .
renameField "ExportTarget" $
simpleField "target_node" [t| ExportTarget |]
pExportTargetNodeUuid :: Field
pExportTargetNodeUuid =
withDoc "Target node UUID (if local export)" .
renameField "ExportTargetNodeUuid" . optionalField $
simpleField "target_node_uuid" [t| NonEmptyString |]
pShutdownInstance :: Field
pShutdownInstance =
withDoc "Whether to shutdown the instance before export" $
defaultTrue "shutdown"
pRemoveInstance :: Field
pRemoveInstance =
withDoc "Whether to remove instance after export" $
defaultFalse "remove_instance"
pIgnoreRemoveFailures :: Field
pIgnoreRemoveFailures =
withDoc "Whether to ignore failures while removing instances" $
defaultFalse "ignore_remove_failures"
pX509KeyName :: Field
pX509KeyName =
withDoc "Name of X509 key (remote export only)" .
optionalField $ simpleField "x509_key_name" [t| [JSValue] |]
pX509DestCA :: Field
pX509DestCA =
withDoc "Destination X509 CA (remote export only)" $
optionalNEStringField "destination_x509_ca"
pZeroFreeSpace :: Field
pZeroFreeSpace =
withDoc "Whether to zero the free space on the disks of the instance" $
defaultFalse "zero_free_space"
pHelperStartupTimeout :: Field
pHelperStartupTimeout =
withDoc "Startup timeout for the helper VM" .
optionalField $ simpleField "helper_startup_timeout" [t| Int |]
pHelperShutdownTimeout :: Field
pHelperShutdownTimeout =
withDoc "Shutdown timeout for the helper VM" .
optionalField $ simpleField "helper_shutdown_timeout" [t| Int |]
pZeroingTimeoutFixed :: Field
pZeroingTimeoutFixed =
withDoc "The fixed part of time to wait before declaring the zeroing\
\ operation to have failed" .
optionalField $ simpleField "zeroing_timeout_fixed" [t| Int |]
pZeroingTimeoutPerMiB :: Field
pZeroingTimeoutPerMiB =
withDoc "The variable part of time to wait before declaring the zeroing\
\ operation to have failed, dependent on total size of disks" .
optionalField $ simpleField "zeroing_timeout_per_mib" [t| Double |]
pTagsObject :: Field
pTagsObject =
withDoc "Tag kind" $
simpleField "kind" [t| TagKind |]
pTagsName :: Field
pTagsName =
withDoc "Name of object" .
renameField "TagsGetName" .
optionalField $ simpleField "name" [t| String |]
pTagsList :: Field
pTagsList =
withDoc "List of tag names" .
renameField "TagsList" $
simpleField "tags" [t| [String] |]
-- FIXME: this should be compiled at load time?
pTagSearchPattern :: Field
pTagSearchPattern =
withDoc "Search pattern (regular expression)" .
renameField "TagSearchPattern" $
simpleField "pattern" [t| NonEmptyString |]
pDelayDuration :: Field
pDelayDuration =
withDoc "Duration parameter for 'OpTestDelay'" .
renameField "DelayDuration" $
simpleField "duration" [t| Double |]
pDelayOnMaster :: Field
pDelayOnMaster =
withDoc "on_master field for 'OpTestDelay'" .
renameField "DelayOnMaster" $
defaultTrue "on_master"
pDelayOnNodes :: Field
pDelayOnNodes =
withDoc "on_nodes field for 'OpTestDelay'" .
renameField "DelayOnNodes" .
defaultField [| [] |] $
simpleField "on_nodes" [t| [NonEmptyString] |]
pDelayOnNodeUuids :: Field
pDelayOnNodeUuids =
withDoc "on_node_uuids field for 'OpTestDelay'" .
renameField "DelayOnNodeUuids" . optionalField $
simpleField "on_node_uuids" [t| [NonEmptyString] |]
pDelayRepeat :: Field
pDelayRepeat =
withDoc "Repeat parameter for OpTestDelay" .
renameField "DelayRepeat" .
defaultField [| forceNonNeg (0::Int) |] $
simpleField "repeat" [t| NonNegative Int |]
pDelayInterruptible :: Field
pDelayInterruptible =
withDoc "Allows socket-based interruption of a running OpTestDelay" .
renameField "DelayInterruptible" .
defaultField [| False |] $
simpleField "interruptible" [t| Bool |]
pDelayNoLocks :: Field
pDelayNoLocks =
withDoc "Don't take locks during the delay" .
renameField "DelayNoLocks" $
defaultTrue "no_locks"
pIAllocatorDirection :: Field
pIAllocatorDirection =
withDoc "IAllocator test direction" .
renameField "IAllocatorDirection" $
simpleField "direction" [t| IAllocatorTestDir |]
pIAllocatorMode :: Field
pIAllocatorMode =
withDoc "IAllocator test mode" .
renameField "IAllocatorMode" $
simpleField "mode" [t| IAllocatorMode |]
pIAllocatorReqName :: Field
pIAllocatorReqName =
withDoc "IAllocator target name (new instance, node to evac, etc.)" .
renameField "IAllocatorReqName" $ simpleField "name" [t| NonEmptyString |]
pIAllocatorNics :: Field
pIAllocatorNics =
withDoc "Custom OpTestIAllocator nics" .
renameField "IAllocatorNics" .
optionalField $ simpleField "nics" [t| [INicParams] |]
pIAllocatorDisks :: Field
pIAllocatorDisks =
withDoc "Custom OpTestAllocator disks" .
renameField "IAllocatorDisks" .
optionalField $ simpleField "disks" [t| [JSValue] |]
pIAllocatorMemory :: Field
pIAllocatorMemory =
withDoc "IAllocator memory field" .
renameField "IAllocatorMem" .
optionalField $
simpleField "memory" [t| NonNegative Int |]
pIAllocatorVCpus :: Field
pIAllocatorVCpus =
withDoc "IAllocator vcpus field" .
renameField "IAllocatorVCpus" .
optionalField $
simpleField "vcpus" [t| NonNegative Int |]
pIAllocatorOs :: Field
pIAllocatorOs =
withDoc "IAllocator os field" .
renameField "IAllocatorOs" $ optionalNEStringField "os"
pIAllocatorInstances :: Field
pIAllocatorInstances =
withDoc "IAllocator instances field" .
renameField "IAllocatorInstances " .
optionalField $
simpleField "instances" [t| [NonEmptyString] |]
pIAllocatorEvacMode :: Field
pIAllocatorEvacMode =
withDoc "IAllocator evac mode" .
renameField "IAllocatorEvacMode" .
optionalField $
simpleField "evac_mode" [t| EvacMode |]
pIAllocatorSpindleUse :: Field
pIAllocatorSpindleUse =
withDoc "IAllocator spindle use" .
renameField "IAllocatorSpindleUse" .
defaultField [| forceNonNeg (1::Int) |] $
simpleField "spindle_use" [t| NonNegative Int |]
pIAllocatorCount :: Field
pIAllocatorCount =
withDoc "IAllocator count field" .
renameField "IAllocatorCount" .
defaultField [| forceNonNeg (1::Int) |] $
simpleField "count" [t| NonNegative Int |]
pJQueueNotifyWaitLock :: Field
pJQueueNotifyWaitLock =
withDoc "'OpTestJqueue' notify_waitlock" $
defaultFalse "notify_waitlock"
pJQueueNotifyExec :: Field
pJQueueNotifyExec =
withDoc "'OpTestJQueue' notify_exec" $
defaultFalse "notify_exec"
pJQueueLogMessages :: Field
pJQueueLogMessages =
withDoc "'OpTestJQueue' log_messages" .
defaultField [| [] |] $ simpleField "log_messages" [t| [String] |]
pJQueueFail :: Field
pJQueueFail =
withDoc "'OpTestJQueue' fail attribute" .
renameField "JQueueFail" $ defaultFalse "fail"
pTestDummyResult :: Field
pTestDummyResult =
withDoc "'OpTestDummy' result field" .
renameField "TestDummyResult" $ simpleField "result" [t| JSValue |]
pTestDummyMessages :: Field
pTestDummyMessages =
withDoc "'OpTestDummy' messages field" .
renameField "TestDummyMessages" $
simpleField "messages" [t| JSValue |]
pTestDummyFail :: Field
pTestDummyFail =
withDoc "'OpTestDummy' fail field" .
renameField "TestDummyFail" $ simpleField "fail" [t| JSValue |]
pTestDummySubmitJobs :: Field
pTestDummySubmitJobs =
withDoc "'OpTestDummy' submit_jobs field" .
renameField "TestDummySubmitJobs" $
simpleField "submit_jobs" [t| JSValue |]
pNetworkName :: Field
pNetworkName =
withDoc "Network name" $
simpleField "network_name" [t| NonEmptyString |]
pNetworkAddress4 :: Field
pNetworkAddress4 =
withDoc "Network address (IPv4 subnet)" .
renameField "NetworkAddress4" $
simpleField "network" [t| IPv4Network |]
pNetworkGateway4 :: Field
pNetworkGateway4 =
withDoc "Network gateway (IPv4 address)" .
renameField "NetworkGateway4" .
optionalField $ simpleField "gateway" [t| IPv4Address |]
pNetworkAddress6 :: Field
pNetworkAddress6 =
withDoc "Network address (IPv6 subnet)" .
renameField "NetworkAddress6" .
optionalField $ simpleField "network6" [t| IPv6Network |]
pNetworkGateway6 :: Field
pNetworkGateway6 =
withDoc "Network gateway (IPv6 address)" .
renameField "NetworkGateway6" .
optionalField $ simpleField "gateway6" [t| IPv6Address |]
pNetworkMacPrefix :: Field
pNetworkMacPrefix =
withDoc "Network specific mac prefix (that overrides the cluster one)" .
renameField "NetMacPrefix" $
optionalNEStringField "mac_prefix"
pNetworkAddRsvdIps :: Field
pNetworkAddRsvdIps =
withDoc "Which IP addresses to reserve" .
renameField "NetworkAddRsvdIps" .
optionalField $
simpleField "add_reserved_ips" [t| [IPv4Address] |]
pNetworkRemoveRsvdIps :: Field
pNetworkRemoveRsvdIps =
withDoc "Which external IP addresses to release" .
renameField "NetworkRemoveRsvdIps" .
optionalField $
simpleField "remove_reserved_ips" [t| [IPv4Address] |]
pNetworkMode :: Field
pNetworkMode =
withDoc "Network mode when connecting to a group" $
simpleField "network_mode" [t| NICMode |]
pNetworkLink :: Field
pNetworkLink =
withDoc "Network link when connecting to a group" $
simpleField "network_link" [t| NonEmptyString |]
pAdminStateSource :: Field
pAdminStateSource =
withDoc "Who last changed the instance admin state" .
optionalField $
simpleField "admin_state_source" [t| AdminStateSource |]
pNetworkVlan :: Field
pNetworkVlan =
withDoc "Network vlan when connecting to a group" .
defaultField [| "" |] $ stringField "network_vlan"
|
ribag/ganeti-experiments
|
src/Ganeti/OpParams.hs
|
gpl-2.0
| 53,110
| 0
| 16
| 9,229
| 9,528
| 5,287
| 4,241
| 1,464
| 3
|
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-}
module Turing.Machine where
-- $Id$
import Machine.Class
import Machine.Akzeptieren
import qualified Challenger as C
import qualified Machine.Acceptor.Type as A
import Turing.Type
import Turing.Konfiguration
import Turing.Nachfolger ( folgekonfigurationen )
import Autolib.Reporter
import Autolib.Reporter.Set
import qualified Autolib.Reporter.Checker
-- import qualified Autolib.Subset
import Autolib.Set
import Autolib.Size
import Autolib.ToDoc
instance TuringC y z => Compute ( Turing y z ) ( Konfiguration y z ) where
next m k = folgekonfigurationen m k
accepting m k = zustand k `elementOf` endzustandsmenge m
depth m k = schritt k
instance TuringC y z => In ( Turing y z ) [ y ] ( Konfiguration y z ) where
input_reporter m ys = do
silent $ do
inform $ text "Die Eingabe ist:" <+> toDoc ys
Autolib.Reporter.Set.subeq
( text "benutzte Eingabezeichen" , mkSet ys )
( text "Eingabealphabet Ihrer Maschine", eingabealphabet m )
return $ start_konfiguration m ys
instance TuringC y z => Out ( Turing y z ) [ y ] ( Konfiguration y z ) where
output_reporter m k = return $ bandinhalt m k
instance Encode String where
-- unär
encode xs = do
x <- xs
replicate ( fromIntegral x) '1' ++ "." -- nicht das leerzeichen!
instance Decode String where
decode m = fromIntegral $ length m -- eigentlich prüfen, welche zeichen
{-
instance TuringC y z =>
C.Partial A.Acceptor ( A.Type ( Turing y z ) [y] )
( Turing y z )
where
describe p i = vcat
[ text "Gesucht ist eine Maschine/ein Programm,"
, nest 4 $ A.machine_info i
, text "das diese Sprache akzeptiert:"
, nest 4 $ A.data_info i
, text "diese Eingaben sollen akzeptiert werden:"
, nest 4 $ toDoc $ A.yeah i
, text "diese Eingaben sollen nicht akzeptiert werden:"
, nest 4 $ toDoc $ A.noh i
]
initial p i = A.start i
partial p i b = Autolib.Reporter.Checker.run ( A.check i ) b
total p i b = do
positiv_liste (A.cut i) b $ A.yeah i
negativ_liste (A.cut i) b $ A.noh i
return () -- größe der maschine (hier) ignorieren
-}
|
Erdwolf/autotool-bonn
|
src/Turing/Machine.hs
|
gpl-2.0
| 2,214
| 5
| 15
| 533
| 426
| 222
| 204
| 35
| 0
|
module Language where
import Control.Spoon -- spoon
---- Grammatical states ----
data Number = S | P deriving (Show, Eq)
data Gender = M | F | N deriving (Show, Eq)
data Person = FirstPerson | SecondPerson | SecondPersonFormal | ThirdPerson deriving (Show, Eq)
-- This typeclass links the cases of different languages
class Case c
---- Typedefs ----
type NounPhrase c = (c -> [String], Number, Gender, Person)
type Noun c = Number -> NounPhrase c
type Modifier c = NounPhrase c -> NounPhrase c
type Adjective c = Number -> Gender -> c -> [String]
type Verb = Number -> Person -> [String]
type Clause c = NounPhrase c -> Verb -> [NounPhrase c] -> [String]
applyCase :: Case c => c -> NounPhrase c -> [String]
applyCase c (object, _, _, _) = object c
noun :: Case c => String -> String -> Gender -> Noun c
noun single plural gender =
\number ->
(\_ -> case number of
S -> [single]
P -> [plural],
number, gender, ThirdPerson)
-- Extend a NounPhrase by providing a function that maps between the old and new sentence fragment
extendNP :: Case c => (Number -> Gender -> c -> [String] -> [String]) -> Modifier c
extendNP f (object, n, g, p) = (\c -> f n g c $ object c, n, g, p)
-- A simpler form for functions that just give a word to be added before the fragment
modifier :: Case c => (Number -> Gender -> c -> String) -> Modifier c
modifier f = extendNP (\n g c o -> f n g c : o)
---- Morphology helpers ----
addSpecialSuffix :: String -> (String -> Maybe String) -> String -> String
addSpecialSuffix standard _ "" = standard
addSpecialSuffix standard special stem@(c:cs) = case special stem of
Just suffix -> suffix
Nothing -> c : addSpecialSuffix standard special cs
addSuffix :: String -> String -> String
addSuffix suffix = addSpecialSuffix suffix (\_ -> Nothing)
maybeize f = \x -> spoon $ f x
---- ----
write phrase = print (intercalate " " phrase)
|
Fedjmike/ngen
|
language.hs
|
gpl-3.0
| 1,892
| 6
| 12
| 389
| 697
| 380
| 317
| -1
| -1
|
-- Copyright (c) 2014 Contributors as noted in the AUTHORS file
--
-- This file is part of frp-arduino.
--
-- frp-arduino is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- frp-arduino is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with frp-arduino. If not, see <http://www.gnu.org/licenses/>.
module Arduino.Internal.CodeGen.C
( streamsToC
) where
import Arduino.Internal.CodeGen.BlockDoc
import Arduino.Internal.DAG
import Control.Monad
import qualified Control.Monad.Fail as Fail
import Data.List (intersperse)
import qualified Data.Map as M
import Data.Functor.Identity as Identity
data ResultValue = Value String CType Storage (Maybe String)
| FilterVariable String CType String
| ToFlatVariable String CType
| Void
data Storage = Variable
| Literal
data CType = CBit
| CByte
| CWord
| CVoid
| CList CType
| CTuple [CType]
deriving (Eq, Show)
instance Fail.MonadFail Identity.Identity where
fail = error "assumption failed in pattern matching"
listSizeCType :: CType
listSizeCType = CByte
argIndexCType :: CType
argIndexCType = CByte
streamsToC :: Streams -> String
streamsToC = runGen . genStreamsCFile
genStreamsCFile :: Streams -> Gen ()
genStreamsCFile streams = do
header "// This file is automatically generated."
header ""
header "#include <avr/io.h>"
header "#include <util/delay_basic.h>"
header "#include <stdbool.h>"
header ""
genCTypes
genStreamCFunctions (sortStreams streams) M.empty
line ""
block "int main(void) {" $ do
mapM genInit (streamsInTree streams)
mapM genInputCall (filter isBootupStream (streamsInTree streams))
block "while (1) {" $ do
mapM genInputCall (filter isInputStream (streamsInTree streams))
line "}"
line "return 0;"
line "}"
isBootupStream :: Stream -> Bool
isBootupStream stream = case body stream of
Bootup -> True
_ -> False
isInputStream :: Stream -> Bool
isInputStream stream = null (inputs stream) && not (isBootupStream stream)
genCTypes :: Gen ()
genCTypes = do
header $ "struct list {"
header $ " " ++ cTypeStr listSizeCType ++ " size;"
header $ " void* values;"
header $ "};"
forM_ [2, 6] $ \n -> do
header $ ""
header $ "struct tuple" ++ show n ++ " {"
forM_ [0..n-1] $ \value -> do
header $ " void* value" ++ show value ++ ";"
header $ "};"
genStreamCFunctions :: [Stream] -> M.Map String CType -> Gen ()
genStreamCFunctions streams streamTypeMap = case streams of
[] -> return ()
(stream:restStreams) -> do
cType <- genStreamCFunction streamTypeMap stream
let updateStreamTypeMap = M.insert (name stream) cType streamTypeMap
genStreamCFunctions restStreams updateStreamTypeMap
genStreamCFunction :: M.Map String CType -> Stream -> Gen CType
genStreamCFunction streamTypes stream = do
let inputTypes = map (streamTypes M.!) (inputs stream)
let inputMap = M.fromList $ zip [0..] inputTypes
let args = streamArguments streamTypes stream
let declaration = ("static void " ++ name stream ++
"(" ++ streamToArgumentList streamTypes stream ++ ")")
cFunction declaration $ do
genStreamInputParsing stream args
outputNames <- genStreamBody inputMap (body stream)
genStreamOutputCalling outputNames stream
return $ resultType outputNames
streamArguments :: M.Map String CType -> Stream -> [(String, String, Int)]
streamArguments streamTypes =
map (\(input, cType) -> ("input_" ++ show input, cTypeStr cType, input)) .
zip [0..] .
map (streamTypes M.!) .
inputs
streamToArgumentList :: M.Map String CType -> Stream -> String
streamToArgumentList streamTypes stream
| length (inputs stream) < 1 = ""
| otherwise = cTypeStr argIndexCType ++ " arg, void* value"
genStreamInputParsing :: Stream -> [(String, String, Int)] -> Gen ()
genStreamInputParsing stream args
| length args == 1 || isMerge stream = do
let (name, cType, _):_ = args
header $ cType ++ " " ++ name ++ " = *((" ++ cType ++ "*)value);"
| length args > 1 = do
forM_ args $ \(name, cType, _) -> do
header $ "static " ++ cType ++ " " ++ name ++ ";"
block "switch (arg) {" $ do
forM_ args $ \(name, cType, n) -> do
block ("case " ++ show n ++ ":") $ do
line $ name ++ " = *((" ++ cType ++ "*)value);"
line $ "break;"
line $ "}"
| otherwise = do
return ()
isMerge :: Stream -> Bool
isMerge stream = case body stream of
(Merge _) -> True
_ -> False
genStreamBody :: M.Map Int CType -> Body -> Gen [ResultValue]
genStreamBody inputMap body = case body of
(Map expression) -> do
fmap (:[]) $ genExpression inputMap False expression
(MapMany values) -> do
mapM (genExpression inputMap False) values
(Fold expression startValue) -> do
(Value cStartValue cTypeStartValue _ Nothing) <-
genExpression inputMap True startValue
header $ concat [ "static "
, cTypeStr cTypeStartValue
, " input_1 = "
, cStartValue
, ";"
]
(Value cExpression cType _ Nothing) <-
let inputMapWithStartState = M.insert 1 cTypeStartValue inputMap
in genExpression inputMapWithStartState False expression
genCopy "input_1" cExpression cTypeStartValue
fmap (:[]) $ variable "input_1" cType
(Filter condition) -> do
(Value cCondition CBit _ Nothing) <-
genExpression inputMap False condition
(Value cValue cType _ Nothing) <-
genExpression inputMap False (Input 0)
return [FilterVariable cValue cType cCondition]
(DelayMicroseconds delay expression) -> do
(Value cDelay CWord _ Nothing) <-
genExpression inputMap False delay
(Value cExpression cType storage Nothing) <-
genExpression inputMap False expression
return [Value cExpression cType storage (Just cDelay)]
(Flatten expression) -> do
(Value cExpression (CList cTypeItem) _ Nothing) <-
genExpression inputMap False expression
return [ToFlatVariable cExpression cTypeItem]
(Driver _ _ bodyLLI) -> do
fmap (:[]) $ genLLI bodyLLI
(Merge expression) -> do
fmap (:[]) $ genExpression inputMap False expression
Bootup -> do
return [Value "0" CBit Literal Nothing]
genExpression :: M.Map Int CType -> Bool -> Expression -> Gen ResultValue
genExpression inputMap static expression = case expression of
(Not operand) -> do
(Value cOperand CBit _ Nothing) <-
genExpression inputMap static operand
literal CBit $ "!(" ++ cOperand ++ ")"
(Even operand) -> do
(Value cOperand CWord _ Nothing) <-
genExpression inputMap static operand
literal CBit $ "(" ++ cOperand ++ ") % 2 == 0"
(Greater left right) -> do
(Value cLeft CWord _ Nothing) <- genExpression inputMap static left
(Value cRight CWord _ Nothing) <- genExpression inputMap static right
literal CBit $ "(" ++ cLeft ++ " > " ++ cRight ++ ")"
(Add left right) -> do
(Value cLeft CWord _ Nothing) <- genExpression inputMap static left
(Value cRight CWord _ Nothing) <- genExpression inputMap static right
literal CWord $ "(" ++ cLeft ++ " + " ++ cRight ++ ")"
(Sub left right) -> do
(Value cLeft CWord _ Nothing) <- genExpression inputMap static left
(Value cRight CWord _ Nothing) <- genExpression inputMap static right
literal CWord $ "(" ++ cLeft ++ " - " ++ cRight ++ ")"
(Mul left right) -> do
(Value cLeft CWord _ Nothing) <- genExpression inputMap static left
(Value cRight CWord _ Nothing) <- genExpression inputMap static right
literal CWord $ "(" ++ cLeft ++ " * " ++ cRight ++ ")"
(Input value) -> do
variable ("input_" ++ show value) (inputMap M.! value)
Unit -> do
return $ Value "0" CBit Literal Nothing
(ByteConstant value) -> do
literal CByte $ show value
(BoolToBit operand) -> do
genExpression inputMap static operand
(IsHigh operand) -> do
genExpression inputMap static operand
(BitConstant value) -> do
case value of
High -> literal CBit "true"
Low -> literal CBit "false"
(ListConstant values) -> do
exprs <- mapM (genExpression inputMap static) values
temp <- genCVariable "struct list"
v <- label
header $ cTypeStr (resultType exprs) ++ " " ++ v ++ "[" ++ show (length exprs) ++ "];"
forM (zip [0..] exprs) $ \(i, (Value x _ _ Nothing)) -> do
line $ v ++ "[" ++ show i ++ "] = " ++ x ++ ";"
line $ temp ++ ".size = " ++ show (length exprs) ++ ";"
line $ temp ++ ".values = (void*)" ++ v ++ ";"
variable temp (CList $ resultType exprs)
(TupleValue n tuple) -> do
(Value name (CTuple cTypes) _ Nothing) <- genExpression inputMap static tuple
let cType = cTypes !! n
let res = concat [ "*"
, "((" ++ cTypeStr cType ++ "*)"
, name
, ".value"
, show n
, ")"
]
variable res cType
(TupleConstant values) -> do
if static
then do
valueVariables <- forM values $ \value -> do
(Value cExpression cType _ Nothing) <- genExpression inputMap static value
name <- genStaticCVariable (cTypeStr cType) cExpression
return $ Value name cType Variable Nothing
let res = concat (
[ "{ "
]
++
intersperse ", " (map (\(n, (Value name _ _ _)) -> ".value" ++ show n ++ " = (void*)&" ++ name) (zip [0..] valueVariables))
++
[ " }"
]
)
variable res (CTuple $ map extract valueVariables)
else do
valueVariables <- forM values $ \value -> do
(Value cExpression cType _ _) <- genExpression inputMap static value
wrap cExpression cType
name <- genCVariable ("struct tuple" ++ show (length valueVariables))
forM_ (zip [0..] valueVariables) $ \(n, (Value x _ _ _)) ->
line $ name ++ ".value" ++ show n ++ " = (void*)&" ++ x ++ ";"
variable name (CTuple $ map extract valueVariables)
(NumberToByteArray operand) -> do
(Value r CWord _ _) <- genExpression inputMap static operand
charBuf <- label
header $ cTypeStr CByte ++ " " ++ charBuf ++ "[20];"
line $ "snprintf(" ++ charBuf ++ ", 20, \"%d\", " ++ r ++ ");"
temp <- genCVariable "struct list"
line $ temp ++ ".size = strlen(" ++ charBuf ++ ");"
line $ temp ++ ".values = " ++ charBuf ++ ";"
variable temp (CList CByte)
(WordConstant value) -> do
literal CWord $ show value
(Equal leftExpression rightExpression) -> do
(Value cLeft _ _ _) <-
genExpression inputMap static leftExpression
(Value cRight _ _ _) <-
genExpression inputMap static rightExpression
literal CBit $ cLeft ++ " == " ++ cRight
(If conditionExpression trueExpression falseExpression) -> do
(Value cCondition CBit _ _) <-
genExpression inputMap static conditionExpression
(Value cTrue cType _ _) <-
genExpression inputMap static trueExpression
(Value cFalse cType _ _) <-
genExpression inputMap static falseExpression
temp <- genCVariable (cTypeStr cType)
block ("if (" ++ cCondition ++ ") {") $ do
line $ temp ++ " = " ++ cTrue ++ ";"
block "} else {" $ do
line $ temp ++ " = " ++ cFalse ++ ";"
line $ "}"
variable temp cType
genCopy :: String -> String -> CType -> Gen ()
genCopy destination source cType = case cType of
CTuple items -> forM_ (zip [0..] items) $ \(n, itemType) -> do
let drill x = concat [ "*"
, "("
, "(" ++ cTypeStr itemType ++ "*)"
, x
, ".value"
, show n
, ")"
]
genCopy (drill destination) (drill source) itemType
_ -> line $ destination ++ " = " ++ source ++ ";"
genLLI :: LLI -> Gen ResultValue
genLLI lli = case lli of
(WriteBit register bit value next) -> do
case value of
ConstBit High -> do
line (register ++ " |= (1 << " ++ bit ++ ");")
ConstBit Low -> do
line (register ++ " &= ~(1 << " ++ bit ++ ");")
_ -> do
(Value x cType _ _) <- genLLI value
block ("if (" ++ x ++ ") {") $ do
line (register ++ " |= (1 << " ++ bit ++ ");")
block "} else {" $ do
line (register ++ " &= ~(1 << " ++ bit ++ ");")
line "}"
genLLI next
(WriteByte register value next) -> do
(Value x cType _ _) <- genLLI value
line (register ++ " = " ++ x ++ ";")
genLLI next
(WriteWord register value next) -> do
(Value x cType _ _) <- genLLI value
line (register ++ " = " ++ x ++ ";")
genLLI next
(ReadBit register bit) -> do
x <- genCVariable "bool"
line $ x ++ " = (" ++ register ++ " & (1 << " ++ bit ++ ")) == 0U;"
variable x CBit
(ReadWord register next) -> do
x <- genCVariable (cTypeStr CWord)
line $ x ++ " = " ++ register ++ ";"
genLLI next
variable x CWord
(ReadTwoPartWord lowRegister highRegister next) -> do
cLow <- genCVariable (cTypeStr CByte)
cHigh <- genCVariable (cTypeStr CByte)
cWord <- genCVariable (cTypeStr CWord)
line $ cLow ++ " = " ++ lowRegister ++ ";"
line $ cHigh ++ " = " ++ highRegister ++ ";"
line $ cWord ++ " = " ++ cLow ++ " | (" ++ cHigh ++ " << 8);"
genLLI next
variable cWord CWord
(WaitBit register bit value next) -> do
case value of
High -> do
line $ "while ((" ++ register ++ " & (1 << " ++ bit ++ ")) == 0) {"
line $ "}"
Low -> do
line $ "while ((" ++ register ++ " & (1 << " ++ bit ++ ")) != 0) {"
line $ "}"
genLLI next
(Const x) -> do
literal CBit x
(ConstBit x) -> do
case x of
High -> literal CBit "true"
Low -> literal CBit "false"
InputValue -> do
variable "input_0" CBit
End -> do
return Void
genStreamOutputCalling :: [ResultValue] -> Stream -> Gen ()
genStreamOutputCalling results stream = do
wrappedResults <- forM results $ \result -> case result of
(Value name cType Literal delay) -> do
(Value wrappedName wrappedCType Variable _) <- wrap name cType
return $ Value wrappedName wrappedCType Variable delay
_ -> do
return result
forM_ wrappedResults $ \result -> case result of
(Value name cType _ delay) -> do
forM_ (outputs stream) $ \outputStreamName -> do
generateCall outputStreamName name
case delay of
Just x -> do
line $ "// Delay assumes a 16MHz clock"
line $ "_delay_loop_2(" ++ x ++ ");"
line $ "_delay_loop_2(" ++ x ++ ");"
line $ "_delay_loop_2(" ++ x ++ ");"
line $ "_delay_loop_2(" ++ x ++ ");"
_ -> return ()
(FilterVariable name cType condition) -> do
forM_ (outputs stream) $ \outputStreamName -> do
block ("if (" ++ condition ++ ") {") $ do
generateCall outputStreamName name
line "}"
(ToFlatVariable name cType) -> do
forM_ (outputs stream) $ \outputStreamName -> do
i <- genCVariable (cTypeStr listSizeCType)
block ("for (" ++ i ++ " = 0; " ++ i ++ " < " ++ name ++ ".size; " ++ i ++ "++) {") $ do
generateCall outputStreamName ("((" ++ cTypeStr cType ++ "*)" ++ name ++ ".values)[" ++ i ++ "]")
line "}"
Void -> do
return ()
where
generateCall (n, outputStreamName) resultVariable = do
line (outputStreamName ++ "(" ++ show n ++ ", (void*)(&" ++ resultVariable ++ "));")
genInit :: Stream -> Gen ()
genInit stream = case body stream of
(Driver _ initLLI _) -> do
genLLI initLLI
return ()
_ -> do
return ()
genInputCall :: Stream -> Gen ()
genInputCall stream = do
line (name stream ++ "();")
wrap :: String -> CType -> Gen ResultValue
wrap expression cType = do
name <- genCVariable (cTypeStr cType)
line $ name ++ " = " ++ expression ++ ";"
variable name cType
variable :: String -> CType -> Gen ResultValue
variable name cType = return $ Value name cType Variable Nothing
literal :: CType -> String -> Gen ResultValue
literal cType name = return $ Value name cType Literal Nothing
resultType :: [ResultValue] -> CType
resultType vars = case vars of
(x:y:rest) -> if extract x == extract y
then resultType (y:rest)
else error "different c types"
[var] -> extract var
[] -> CVoid
extract (Value _ cType _ _) = cType
extract (FilterVariable _ cType _) = cType
extract (ToFlatVariable _ cType) = cType
cTypeStr :: CType -> String
cTypeStr cType = case cType of
CBit -> "bool"
CByte -> "uint8_t"
CWord -> "uint16_t"
CVoid -> "void"
CList _ -> "struct list"
CTuple itemTypes -> "struct tuple" ++ show (length itemTypes)
genCVariable :: String -> Gen String
genCVariable cType = do
l <- label
header $ cType ++ " " ++ l ++ ";"
return l
genStaticCVariable :: String -> String -> Gen String
genStaticCVariable cType value = do
l <- label
header $ "static " ++ cType ++ " " ++ l ++ " = " ++ value ++ ";"
return l
cFunction :: String -> Gen a -> Gen a
cFunction declaration gen = do
header $ ""
header $ declaration ++ ";"
line $ ""
x <- block (declaration ++ " {") gen
line $ "}"
return x
|
frp-arduino/frp-arduino
|
src/Arduino/Internal/CodeGen/C.hs
|
gpl-3.0
| 19,583
| 0
| 29
| 6,618
| 6,102
| 2,911
| 3,191
| 433
| 21
|
module ATP.Util.Tuple
( map2
, map3
, map4
, map9
, map10
)
where
map2 :: (a -> b) -> (a, a) -> (b, b)
map2 f (a1, a2) = (f a1, f a2)
map3 :: (a -> b) -> (a, a, a) -> (b, b, b)
map3 f (a1, a2, a3) = (f a1, f a2, f a3)
map4 :: (a -> b) -> (a, a, a, a) -> (b, b, b, b)
map4 f (a1, a2, a3, a4) = (f a1, f a2, f a3, f a4)
map9 :: (a -> b) -> (a, a, a, a, a, a, a, a, a) -> (b, b, b, b, b, b, b, b, b)
map9 f (a1, a2, a3, a4, a5, a6, a7, a8, a9) = (f a1, f a2, f a3, f a4, f a5, f a6, f a7, f a8, f a9)
map10 :: (a -> b) -> (a, a, a, a, a, a, a, a, a, a) -> (b, b, b, b, b, b, b, b, b, b)
map10 f (a1, a2, a3, a4, a5, a6, a7, a8, a9, a10) = (f a1, f a2, f a3, f a4, f a5, f a6, f a7, f a8, f a9, f a10)
|
andre-artus/handbook-of-practical-logic-and-automated-reasoning-haskell
|
src/ATP/Util/Tuple.hs
|
gpl-3.0
| 722
| 0
| 7
| 228
| 594
| 348
| 246
| 16
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.UnassignInstance
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Unassigns a registered instance from all of it's layers. The instance remains
-- in the stack as an unassigned instance and can be assigned to another layer,
-- as needed. You cannot use this action with instances that were created with
-- AWS OpsWorks.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_UnassignInstance.html>
module Network.AWS.OpsWorks.UnassignInstance
(
-- * Request
UnassignInstance
-- ** Request constructor
, unassignInstance
-- ** Request lenses
, ui1InstanceId
-- * Response
, UnassignInstanceResponse
-- ** Response constructor
, unassignInstanceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype UnassignInstance = UnassignInstance
{ _ui1InstanceId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'UnassignInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ui1InstanceId' @::@ 'Text'
--
unassignInstance :: Text -- ^ 'ui1InstanceId'
-> UnassignInstance
unassignInstance p1 = UnassignInstance
{ _ui1InstanceId = p1
}
-- | The instance ID.
ui1InstanceId :: Lens' UnassignInstance Text
ui1InstanceId = lens _ui1InstanceId (\s a -> s { _ui1InstanceId = a })
data UnassignInstanceResponse = UnassignInstanceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'UnassignInstanceResponse' constructor.
unassignInstanceResponse :: UnassignInstanceResponse
unassignInstanceResponse = UnassignInstanceResponse
instance ToPath UnassignInstance where
toPath = const "/"
instance ToQuery UnassignInstance where
toQuery = const mempty
instance ToHeaders UnassignInstance
instance ToJSON UnassignInstance where
toJSON UnassignInstance{..} = object
[ "InstanceId" .= _ui1InstanceId
]
instance AWSRequest UnassignInstance where
type Sv UnassignInstance = OpsWorks
type Rs UnassignInstance = UnassignInstanceResponse
request = post "UnassignInstance"
response = nullResponse UnassignInstanceResponse
|
dysinger/amazonka
|
amazonka-opsworks/gen/Network/AWS/OpsWorks/UnassignInstance.hs
|
mpl-2.0
| 3,498
| 0
| 9
| 711
| 352
| 217
| 135
| 47
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Genomics.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Genomics.Types.Product where
import Network.Google.Genomics.Types.Sum
import Network.Google.Prelude
-- | A mapping between info field keys and the InfoMergeOperations to be
-- performed on them. This is plumbed down to the MergeVariantRequests
-- generated by the resulting import job.
--
-- /See:/ 'importVariantsRequestInfoMergeConfig' smart constructor.
newtype ImportVariantsRequestInfoMergeConfig = ImportVariantsRequestInfoMergeConfig'
{ _ivrimcAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportVariantsRequestInfoMergeConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ivrimcAddtional'
importVariantsRequestInfoMergeConfig
:: HashMap Text Text -- ^ 'ivrimcAddtional'
-> ImportVariantsRequestInfoMergeConfig
importVariantsRequestInfoMergeConfig pIvrimcAddtional_ =
ImportVariantsRequestInfoMergeConfig'
{ _ivrimcAddtional = _Coerce # pIvrimcAddtional_
}
ivrimcAddtional :: Lens' ImportVariantsRequestInfoMergeConfig (HashMap Text Text)
ivrimcAddtional
= lens _ivrimcAddtional
(\ s a -> s{_ivrimcAddtional = a})
. _Coerce
instance FromJSON
ImportVariantsRequestInfoMergeConfig where
parseJSON
= withObject "ImportVariantsRequestInfoMergeConfig"
(\ o ->
ImportVariantsRequestInfoMergeConfig' <$>
(parseJSONObject o))
instance ToJSON ImportVariantsRequestInfoMergeConfig
where
toJSON = toJSON . _ivrimcAddtional
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
--
-- /See:/ 'readInfo' smart constructor.
newtype ReadInfo = ReadInfo'
{ _riAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReadInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'riAddtional'
readInfo
:: HashMap Text [JSONValue] -- ^ 'riAddtional'
-> ReadInfo
readInfo pRiAddtional_ =
ReadInfo'
{ _riAddtional = _Coerce # pRiAddtional_
}
riAddtional :: Lens' ReadInfo (HashMap Text [JSONValue])
riAddtional
= lens _riAddtional (\ s a -> s{_riAddtional = a}) .
_Coerce
instance FromJSON ReadInfo where
parseJSON
= withObject "ReadInfo"
(\ o -> ReadInfo' <$> (parseJSONObject o))
instance ToJSON ReadInfo where
toJSON = toJSON . _riAddtional
--
-- /See:/ 'exon' smart constructor.
data Exon = Exon'
{ _eStart :: !(Maybe (Textual Int64))
, _eEnd :: !(Maybe (Textual Int64))
, _eFrame :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Exon' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eStart'
--
-- * 'eEnd'
--
-- * 'eFrame'
exon
:: Exon
exon =
Exon'
{ _eStart = Nothing
, _eEnd = Nothing
, _eFrame = Nothing
}
-- | The start position of the exon on this annotation\'s reference sequence,
-- 0-based inclusive. Note that this is relative to the reference start,
-- and **not** the containing annotation start.
eStart :: Lens' Exon (Maybe Int64)
eStart
= lens _eStart (\ s a -> s{_eStart = a}) .
mapping _Coerce
-- | The end position of the exon on this annotation\'s reference sequence,
-- 0-based exclusive. Note that this is relative to the reference start,
-- and *not* the containing annotation start.
eEnd :: Lens' Exon (Maybe Int64)
eEnd
= lens _eEnd (\ s a -> s{_eEnd = a}) .
mapping _Coerce
-- | The frame of this exon. Contains a value of 0, 1, or 2, which indicates
-- the offset of the first coding base of the exon within the reading frame
-- of the coding DNA sequence, if any. This field is dependent on the
-- strandedness of this annotation (see Annotation.reverse_strand). For
-- forward stranded annotations, this offset is relative to the exon.start.
-- For reverse strand annotations, this offset is relative to the exon.end
-- \`- 1\`. Unset if this exon does not intersect the coding sequence. Upon
-- creation of a transcript, the frame must be populated for all or none of
-- the coding exons.
eFrame :: Lens' Exon (Maybe Int32)
eFrame
= lens _eFrame (\ s a -> s{_eFrame = a}) .
mapping _Coerce
instance FromJSON Exon where
parseJSON
= withObject "Exon"
(\ o ->
Exon' <$>
(o .:? "start") <*> (o .:? "end") <*>
(o .:? "frame"))
instance ToJSON Exon where
toJSON Exon'{..}
= object
(catMaybes
[("start" .=) <$> _eStart, ("end" .=) <$> _eEnd,
("frame" .=) <$> _eFrame])
-- | The \`Status\` type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by [gRPC](https:\/\/github.com\/grpc). The error model is
-- designed to be: - Simple to use and understand for most users - Flexible
-- enough to meet unexpected needs # Overview The \`Status\` message
-- contains three pieces of data: error code, error message, and error
-- details. The error code should be an enum value of google.rpc.Code, but
-- it may accept additional error codes if needed. The error message should
-- be a developer-facing English message that helps developers *understand*
-- and *resolve* the error. If a localized user-facing error message is
-- needed, put the localized message in the error details or localize it in
-- the client. The optional error details may contain arbitrary information
-- about the error. There is a predefined set of error detail types in the
-- package \`google.rpc\` which can be used for common error conditions. #
-- Language mapping The \`Status\` message is the logical representation of
-- the error model, but it is not necessarily the actual wire format. When
-- the \`Status\` message is exposed in different client libraries and
-- different wire protocols, it can be mapped differently. For example, it
-- will likely be mapped to some exceptions in Java, but more likely mapped
-- to some error codes in C. # Other uses The error model and the
-- \`Status\` message can be used in a variety of environments, either with
-- or without APIs, to provide a consistent developer experience across
-- different environments. Example uses of this error model include: -
-- Partial errors. If a service needs to return partial errors to the
-- client, it may embed the \`Status\` in the normal response to indicate
-- the partial errors. - Workflow errors. A typical workflow has multiple
-- steps. Each step may have a \`Status\` message for error reporting
-- purpose. - Batch operations. If a client uses batch request and batch
-- response, the \`Status\` message should be used directly inside batch
-- response, one for each error sub-response. - Asynchronous operations. If
-- an API call embeds asynchronous operation results in its response, the
-- status of those operations should be represented directly using the
-- \`Status\` message. - Logging. If some API errors are stored in logs,
-- the message \`Status\` could be used directly after any stripping needed
-- for security\/privacy reasons.
--
-- /See:/ 'status' smart constructor.
data Status = Status'
{ _sDetails :: !(Maybe [StatusDetailsItem])
, _sCode :: !(Maybe (Textual Int32))
, _sMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sDetails'
--
-- * 'sCode'
--
-- * 'sMessage'
status
:: Status
status =
Status'
{ _sDetails = Nothing
, _sCode = Nothing
, _sMessage = Nothing
}
-- | A list of messages that carry the error details. There will be a common
-- set of message types for APIs to use.
sDetails :: Lens' Status [StatusDetailsItem]
sDetails
= lens _sDetails (\ s a -> s{_sDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
sCode :: Lens' Status (Maybe Int32)
sCode
= lens _sCode (\ s a -> s{_sCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
sMessage :: Lens' Status (Maybe Text)
sMessage = lens _sMessage (\ s a -> s{_sMessage = a})
instance FromJSON Status where
parseJSON
= withObject "Status"
(\ o ->
Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON Status where
toJSON Status'{..}
= object
(catMaybes
[("details" .=) <$> _sDetails,
("code" .=) <$> _sCode,
("message" .=) <$> _sMessage])
-- | An OperationMetadata object. This will always be returned with the
-- Operation.
--
-- /See:/ 'operationSchema' smart constructor.
newtype OperationSchema = OperationSchema'
{ _osAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationSchema' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'osAddtional'
operationSchema
:: HashMap Text JSONValue -- ^ 'osAddtional'
-> OperationSchema
operationSchema pOsAddtional_ =
OperationSchema'
{ _osAddtional = _Coerce # pOsAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
osAddtional :: Lens' OperationSchema (HashMap Text JSONValue)
osAddtional
= lens _osAddtional (\ s a -> s{_osAddtional = a}) .
_Coerce
instance FromJSON OperationSchema where
parseJSON
= withObject "OperationSchema"
(\ o -> OperationSchema' <$> (parseJSONObject o))
instance ToJSON OperationSchema where
toJSON = toJSON . _osAddtional
-- | A variant represents a change in DNA sequence relative to a reference
-- sequence. For example, a variant could represent a SNP or an insertion.
-- Variants belong to a variant set. For more genomics resource
-- definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- Each of the calls on a variant represent a determination of genotype
-- with respect to that variant. For example, a call might assign
-- probability of 0.32 to the occurrence of a SNP named rs1234 in a sample
-- named NA12345. A call belongs to a call set, which contains related
-- calls typically from one sample.
--
-- /See:/ 'variant' smart constructor.
data Variant = Variant'
{ _vVariantSetId :: !(Maybe Text)
, _vCreated :: !(Maybe (Textual Int64))
, _vStart :: !(Maybe (Textual Int64))
, _vAlternateBases :: !(Maybe [Text])
, _vReferenceName :: !(Maybe Text)
, _vNames :: !(Maybe [Text])
, _vEnd :: !(Maybe (Textual Int64))
, _vReferenceBases :: !(Maybe Text)
, _vId :: !(Maybe Text)
, _vQuality :: !(Maybe (Textual Double))
, _vFilter :: !(Maybe [Text])
, _vInfo :: !(Maybe VariantInfo)
, _vCalls :: !(Maybe [VariantCall])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Variant' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vVariantSetId'
--
-- * 'vCreated'
--
-- * 'vStart'
--
-- * 'vAlternateBases'
--
-- * 'vReferenceName'
--
-- * 'vNames'
--
-- * 'vEnd'
--
-- * 'vReferenceBases'
--
-- * 'vId'
--
-- * 'vQuality'
--
-- * 'vFilter'
--
-- * 'vInfo'
--
-- * 'vCalls'
variant
:: Variant
variant =
Variant'
{ _vVariantSetId = Nothing
, _vCreated = Nothing
, _vStart = Nothing
, _vAlternateBases = Nothing
, _vReferenceName = Nothing
, _vNames = Nothing
, _vEnd = Nothing
, _vReferenceBases = Nothing
, _vId = Nothing
, _vQuality = Nothing
, _vFilter = Nothing
, _vInfo = Nothing
, _vCalls = Nothing
}
-- | The ID of the variant set this variant belongs to.
vVariantSetId :: Lens' Variant (Maybe Text)
vVariantSetId
= lens _vVariantSetId
(\ s a -> s{_vVariantSetId = a})
-- | The date this variant was created, in milliseconds from the epoch.
vCreated :: Lens' Variant (Maybe Int64)
vCreated
= lens _vCreated (\ s a -> s{_vCreated = a}) .
mapping _Coerce
-- | The position at which this variant occurs (0-based). This corresponds to
-- the first base of the string of reference bases.
vStart :: Lens' Variant (Maybe Int64)
vStart
= lens _vStart (\ s a -> s{_vStart = a}) .
mapping _Coerce
-- | The bases that appear instead of the reference bases.
vAlternateBases :: Lens' Variant [Text]
vAlternateBases
= lens _vAlternateBases
(\ s a -> s{_vAlternateBases = a})
. _Default
. _Coerce
-- | The reference on which this variant occurs. (such as \`chr20\` or \`X\`)
vReferenceName :: Lens' Variant (Maybe Text)
vReferenceName
= lens _vReferenceName
(\ s a -> s{_vReferenceName = a})
-- | Names for the variant, for example a RefSNP ID.
vNames :: Lens' Variant [Text]
vNames
= lens _vNames (\ s a -> s{_vNames = a}) . _Default .
_Coerce
-- | The end position (0-based) of this variant. This corresponds to the
-- first base after the last base in the reference allele. So, the length
-- of the reference allele is (end - start). This is useful for variants
-- that don\'t explicitly give alternate bases, for example large
-- deletions.
vEnd :: Lens' Variant (Maybe Int64)
vEnd
= lens _vEnd (\ s a -> s{_vEnd = a}) .
mapping _Coerce
-- | The reference bases for this variant. They start at the given position.
vReferenceBases :: Lens' Variant (Maybe Text)
vReferenceBases
= lens _vReferenceBases
(\ s a -> s{_vReferenceBases = a})
-- | The server-generated variant ID, unique across all variants.
vId :: Lens' Variant (Maybe Text)
vId = lens _vId (\ s a -> s{_vId = a})
-- | A measure of how likely this variant is to be real. A higher value is
-- better.
vQuality :: Lens' Variant (Maybe Double)
vQuality
= lens _vQuality (\ s a -> s{_vQuality = a}) .
mapping _Coerce
-- | A list of filters (normally quality filters) this variant has failed.
-- \`PASS\` indicates this variant has passed all filters.
vFilter :: Lens' Variant [Text]
vFilter
= lens _vFilter (\ s a -> s{_vFilter = a}) . _Default
. _Coerce
-- | A map of additional variant information. This must be of the form map
-- (string key mapping to a list of string values).
vInfo :: Lens' Variant (Maybe VariantInfo)
vInfo = lens _vInfo (\ s a -> s{_vInfo = a})
-- | The variant calls for this particular variant. Each one represents the
-- determination of genotype with respect to this variant.
vCalls :: Lens' Variant [VariantCall]
vCalls
= lens _vCalls (\ s a -> s{_vCalls = a}) . _Default .
_Coerce
instance FromJSON Variant where
parseJSON
= withObject "Variant"
(\ o ->
Variant' <$>
(o .:? "variantSetId") <*> (o .:? "created") <*>
(o .:? "start")
<*> (o .:? "alternateBases" .!= mempty)
<*> (o .:? "referenceName")
<*> (o .:? "names" .!= mempty)
<*> (o .:? "end")
<*> (o .:? "referenceBases")
<*> (o .:? "id")
<*> (o .:? "quality")
<*> (o .:? "filter" .!= mempty)
<*> (o .:? "info")
<*> (o .:? "calls" .!= mempty))
instance ToJSON Variant where
toJSON Variant'{..}
= object
(catMaybes
[("variantSetId" .=) <$> _vVariantSetId,
("created" .=) <$> _vCreated,
("start" .=) <$> _vStart,
("alternateBases" .=) <$> _vAlternateBases,
("referenceName" .=) <$> _vReferenceName,
("names" .=) <$> _vNames, ("end" .=) <$> _vEnd,
("referenceBases" .=) <$> _vReferenceBases,
("id" .=) <$> _vId, ("quality" .=) <$> _vQuality,
("filter" .=) <$> _vFilter, ("info" .=) <$> _vInfo,
("calls" .=) <$> _vCalls])
-- | An annotation describes a region of reference genome. The value of an
-- annotation may be one of several canonical types, supplemented by
-- arbitrary info tags. An annotation is not inherently associated with a
-- specific sample or individual (though a client could choose to use
-- annotations in this way). Example canonical annotation types are
-- \`GENE\` and \`VARIANT\`.
--
-- /See:/ 'annotation' smart constructor.
data Annotation = Annotation'
{ _aVariant :: !(Maybe VariantAnnotation)
, _aAnnotationSetId :: !(Maybe Text)
, _aStart :: !(Maybe (Textual Int64))
, _aReverseStrand :: !(Maybe Bool)
, _aReferenceId :: !(Maybe Text)
, _aReferenceName :: !(Maybe Text)
, _aName :: !(Maybe Text)
, _aEnd :: !(Maybe (Textual Int64))
, _aId :: !(Maybe Text)
, _aType :: !(Maybe AnnotationType)
, _aTranscript :: !(Maybe Transcript)
, _aInfo :: !(Maybe AnnotationInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Annotation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aVariant'
--
-- * 'aAnnotationSetId'
--
-- * 'aStart'
--
-- * 'aReverseStrand'
--
-- * 'aReferenceId'
--
-- * 'aReferenceName'
--
-- * 'aName'
--
-- * 'aEnd'
--
-- * 'aId'
--
-- * 'aType'
--
-- * 'aTranscript'
--
-- * 'aInfo'
annotation
:: Annotation
annotation =
Annotation'
{ _aVariant = Nothing
, _aAnnotationSetId = Nothing
, _aStart = Nothing
, _aReverseStrand = Nothing
, _aReferenceId = Nothing
, _aReferenceName = Nothing
, _aName = Nothing
, _aEnd = Nothing
, _aId = Nothing
, _aType = Nothing
, _aTranscript = Nothing
, _aInfo = Nothing
}
-- | A variant annotation, which describes the effect of a variant on the
-- genome, the coding sequence, and\/or higher level consequences at the
-- organism level e.g. pathogenicity. This field is only set for
-- annotations of type \`VARIANT\`.
aVariant :: Lens' Annotation (Maybe VariantAnnotation)
aVariant = lens _aVariant (\ s a -> s{_aVariant = a})
-- | The annotation set to which this annotation belongs.
aAnnotationSetId :: Lens' Annotation (Maybe Text)
aAnnotationSetId
= lens _aAnnotationSetId
(\ s a -> s{_aAnnotationSetId = a})
-- | The start position of the range on the reference, 0-based inclusive.
aStart :: Lens' Annotation (Maybe Int64)
aStart
= lens _aStart (\ s a -> s{_aStart = a}) .
mapping _Coerce
-- | Whether this range refers to the reverse strand, as opposed to the
-- forward strand. Note that regardless of this field, the start\/end
-- position of the range always refer to the forward strand.
aReverseStrand :: Lens' Annotation (Maybe Bool)
aReverseStrand
= lens _aReverseStrand
(\ s a -> s{_aReverseStrand = a})
-- | The ID of the Google Genomics reference associated with this range.
aReferenceId :: Lens' Annotation (Maybe Text)
aReferenceId
= lens _aReferenceId (\ s a -> s{_aReferenceId = a})
-- | The display name corresponding to the reference specified by
-- \`referenceId\`, for example \`chr1\`, \`1\`, or \`chrX\`.
aReferenceName :: Lens' Annotation (Maybe Text)
aReferenceName
= lens _aReferenceName
(\ s a -> s{_aReferenceName = a})
-- | The display name of this annotation.
aName :: Lens' Annotation (Maybe Text)
aName = lens _aName (\ s a -> s{_aName = a})
-- | The end position of the range on the reference, 0-based exclusive.
aEnd :: Lens' Annotation (Maybe Int64)
aEnd
= lens _aEnd (\ s a -> s{_aEnd = a}) .
mapping _Coerce
-- | The server-generated annotation ID, unique across all annotations.
aId :: Lens' Annotation (Maybe Text)
aId = lens _aId (\ s a -> s{_aId = a})
-- | The data type for this annotation. Must match the containing annotation
-- set\'s type.
aType :: Lens' Annotation (Maybe AnnotationType)
aType = lens _aType (\ s a -> s{_aType = a})
-- | A transcript value represents the assertion that a particular region of
-- the reference genome may be transcribed as RNA. An alternative splicing
-- pattern would be represented as a separate transcript object. This field
-- is only set for annotations of type \`TRANSCRIPT\`.
aTranscript :: Lens' Annotation (Maybe Transcript)
aTranscript
= lens _aTranscript (\ s a -> s{_aTranscript = a})
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
aInfo :: Lens' Annotation (Maybe AnnotationInfo)
aInfo = lens _aInfo (\ s a -> s{_aInfo = a})
instance FromJSON Annotation where
parseJSON
= withObject "Annotation"
(\ o ->
Annotation' <$>
(o .:? "variant") <*> (o .:? "annotationSetId") <*>
(o .:? "start")
<*> (o .:? "reverseStrand")
<*> (o .:? "referenceId")
<*> (o .:? "referenceName")
<*> (o .:? "name")
<*> (o .:? "end")
<*> (o .:? "id")
<*> (o .:? "type")
<*> (o .:? "transcript")
<*> (o .:? "info"))
instance ToJSON Annotation where
toJSON Annotation'{..}
= object
(catMaybes
[("variant" .=) <$> _aVariant,
("annotationSetId" .=) <$> _aAnnotationSetId,
("start" .=) <$> _aStart,
("reverseStrand" .=) <$> _aReverseStrand,
("referenceId" .=) <$> _aReferenceId,
("referenceName" .=) <$> _aReferenceName,
("name" .=) <$> _aName, ("end" .=) <$> _aEnd,
("id" .=) <$> _aId, ("type" .=) <$> _aType,
("transcript" .=) <$> _aTranscript,
("info" .=) <$> _aInfo])
--
-- /See:/ 'listBasesResponse' smart constructor.
data ListBasesResponse = ListBasesResponse'
{ _lbrNextPageToken :: !(Maybe Text)
, _lbrOffSet :: !(Maybe (Textual Int64))
, _lbrSequence :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListBasesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lbrNextPageToken'
--
-- * 'lbrOffSet'
--
-- * 'lbrSequence'
listBasesResponse
:: ListBasesResponse
listBasesResponse =
ListBasesResponse'
{ _lbrNextPageToken = Nothing
, _lbrOffSet = Nothing
, _lbrSequence = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
lbrNextPageToken :: Lens' ListBasesResponse (Maybe Text)
lbrNextPageToken
= lens _lbrNextPageToken
(\ s a -> s{_lbrNextPageToken = a})
-- | The offset position (0-based) of the given \`sequence\` from the start
-- of this \`Reference\`. This value will differ for each page in a
-- paginated request.
lbrOffSet :: Lens' ListBasesResponse (Maybe Int64)
lbrOffSet
= lens _lbrOffSet (\ s a -> s{_lbrOffSet = a}) .
mapping _Coerce
-- | A substring of the bases that make up this reference.
lbrSequence :: Lens' ListBasesResponse (Maybe Text)
lbrSequence
= lens _lbrSequence (\ s a -> s{_lbrSequence = a})
instance FromJSON ListBasesResponse where
parseJSON
= withObject "ListBasesResponse"
(\ o ->
ListBasesResponse' <$>
(o .:? "nextPageToken") <*> (o .:? "offset") <*>
(o .:? "sequence"))
instance ToJSON ListBasesResponse where
toJSON ListBasesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lbrNextPageToken,
("offset" .=) <$> _lbrOffSet,
("sequence" .=) <$> _lbrSequence])
-- | The response message for Operations.ListOperations.
--
-- /See:/ 'listOperationsResponse' smart constructor.
data ListOperationsResponse = ListOperationsResponse'
{ _lorNextPageToken :: !(Maybe Text)
, _lorOperations :: !(Maybe [Operation])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListOperationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lorNextPageToken'
--
-- * 'lorOperations'
listOperationsResponse
:: ListOperationsResponse
listOperationsResponse =
ListOperationsResponse'
{ _lorNextPageToken = Nothing
, _lorOperations = Nothing
}
-- | The standard List next-page token.
lorNextPageToken :: Lens' ListOperationsResponse (Maybe Text)
lorNextPageToken
= lens _lorNextPageToken
(\ s a -> s{_lorNextPageToken = a})
-- | A list of operations that matches the specified filter in the request.
lorOperations :: Lens' ListOperationsResponse [Operation]
lorOperations
= lens _lorOperations
(\ s a -> s{_lorOperations = a})
. _Default
. _Coerce
instance FromJSON ListOperationsResponse where
parseJSON
= withObject "ListOperationsResponse"
(\ o ->
ListOperationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "operations" .!= mempty))
instance ToJSON ListOperationsResponse where
toJSON ListOperationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lorNextPageToken,
("operations" .=) <$> _lorOperations])
-- | Request message for \`GetIamPolicy\` method.
--
-- /See:/ 'getIAMPolicyRequest' smart constructor.
data GetIAMPolicyRequest =
GetIAMPolicyRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetIAMPolicyRequest' with the minimum fields required to make a request.
--
getIAMPolicyRequest
:: GetIAMPolicyRequest
getIAMPolicyRequest = GetIAMPolicyRequest'
instance FromJSON GetIAMPolicyRequest where
parseJSON
= withObject "GetIAMPolicyRequest"
(\ o -> pure GetIAMPolicyRequest')
instance ToJSON GetIAMPolicyRequest where
toJSON = const emptyObject
-- | The request message for Operations.CancelOperation.
--
-- /See:/ 'cancelOperationRequest' smart constructor.
data CancelOperationRequest =
CancelOperationRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CancelOperationRequest' with the minimum fields required to make a request.
--
cancelOperationRequest
:: CancelOperationRequest
cancelOperationRequest = CancelOperationRequest'
instance FromJSON CancelOperationRequest where
parseJSON
= withObject "CancelOperationRequest"
(\ o -> pure CancelOperationRequest')
instance ToJSON CancelOperationRequest where
toJSON = const emptyObject
-- | A Dataset is a collection of genomic data. For more genomics resource
-- definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'dataSet' smart constructor.
data DataSet = DataSet'
{ _dsName :: !(Maybe Text)
, _dsId :: !(Maybe Text)
, _dsProjectId :: !(Maybe Text)
, _dsCreateTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DataSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsName'
--
-- * 'dsId'
--
-- * 'dsProjectId'
--
-- * 'dsCreateTime'
dataSet
:: DataSet
dataSet =
DataSet'
{ _dsName = Nothing
, _dsId = Nothing
, _dsProjectId = Nothing
, _dsCreateTime = Nothing
}
-- | The dataset name.
dsName :: Lens' DataSet (Maybe Text)
dsName = lens _dsName (\ s a -> s{_dsName = a})
-- | The server-generated dataset ID, unique across all datasets.
dsId :: Lens' DataSet (Maybe Text)
dsId = lens _dsId (\ s a -> s{_dsId = a})
-- | The Google Cloud project ID that this dataset belongs to.
dsProjectId :: Lens' DataSet (Maybe Text)
dsProjectId
= lens _dsProjectId (\ s a -> s{_dsProjectId = a})
-- | The time this dataset was created, in seconds from the epoch.
dsCreateTime :: Lens' DataSet (Maybe UTCTime)
dsCreateTime
= lens _dsCreateTime (\ s a -> s{_dsCreateTime = a})
. mapping _DateTime
instance FromJSON DataSet where
parseJSON
= withObject "DataSet"
(\ o ->
DataSet' <$>
(o .:? "name") <*> (o .:? "id") <*>
(o .:? "projectId")
<*> (o .:? "createTime"))
instance ToJSON DataSet where
toJSON DataSet'{..}
= object
(catMaybes
[("name" .=) <$> _dsName, ("id" .=) <$> _dsId,
("projectId" .=) <$> _dsProjectId,
("createTime" .=) <$> _dsCreateTime])
-- | A read alignment describes a linear alignment of a string of DNA to a
-- reference sequence, in addition to metadata about the fragment (the
-- molecule of DNA sequenced) and the read (the bases which were read by
-- the sequencer). A read is equivalent to a line in a SAM file. A read
-- belongs to exactly one read group and exactly one read group set. For
-- more genomics resource definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
-- ### Reverse-stranded reads Mapped reads (reads having a non-null
-- \`alignment\`) can be aligned to either the forward or the reverse
-- strand of their associated reference. Strandedness of a mapped read is
-- encoded by \`alignment.position.reverseStrand\`. If we consider the
-- reference to be a forward-stranded coordinate space of \`[0,
-- reference.length)\` with \`0\` as the left-most position and
-- \`reference.length\` as the right-most position, reads are always
-- aligned left to right. That is, \`alignment.position.position\` always
-- refers to the left-most reference coordinate and \`alignment.cigar\`
-- describes the alignment of this read to the reference from left to
-- right. All per-base fields such as \`alignedSequence\` and
-- \`alignedQuality\` share this same left-to-right orientation; this is
-- true of reads which are aligned to either strand. For reverse-stranded
-- reads, this means that \`alignedSequence\` is the reverse complement of
-- the bases that were originally reported by the sequencing machine. ###
-- Generating a reference-aligned sequence string When interacting with
-- mapped reads, it\'s often useful to produce a string representing the
-- local alignment of the read to reference. The following pseudocode
-- demonstrates one way of doing this: out = \"\" offset = 0 for c in
-- read.alignment.cigar { switch c.operation { case \"ALIGNMENT_MATCH\",
-- \"SEQUENCE_MATCH\", \"SEQUENCE_MISMATCH\": out +=
-- read.alignedSequence[offset:offset+c.operationLength] offset +=
-- c.operationLength break case \"CLIP_SOFT\", \"INSERT\": offset +=
-- c.operationLength break case \"PAD\": out += repeat(\"*\",
-- c.operationLength) break case \"DELETE\": out += repeat(\"-\",
-- c.operationLength) break case \"SKIP\": out += repeat(\" \",
-- c.operationLength) break case \"CLIP_HARD\": break } } return out ###
-- Converting to SAM\'s CIGAR string The following pseudocode generates a
-- SAM CIGAR string from the \`cigar\` field. Note that this is a lossy
-- conversion (\`cigar.referenceSequence\` is lost). cigarMap = {
-- \"ALIGNMENT_MATCH\": \"M\", \"INSERT\": \"I\", \"DELETE\": \"D\",
-- \"SKIP\": \"N\", \"CLIP_SOFT\": \"S\", \"CLIP_HARD\": \"H\", \"PAD\":
-- \"P\", \"SEQUENCE_MATCH\": \"=\", \"SEQUENCE_MISMATCH\": \"X\", }
-- cigarStr = \"\" for c in read.alignment.cigar { cigarStr +=
-- c.operationLength + cigarMap[c.operation] } return cigarStr
--
-- /See:/ 'read'' smart constructor.
data Read' = Read''
{ _rFragmentLength :: !(Maybe (Textual Int32))
, _rDuplicateFragment :: !(Maybe Bool)
, _rReadGroupSetId :: !(Maybe Text)
, _rNextMatePosition :: !(Maybe Position)
, _rFailedVendorQualityChecks :: !(Maybe Bool)
, _rAlignment :: !(Maybe LinearAlignment)
, _rFragmentName :: !(Maybe Text)
, _rNumberReads :: !(Maybe (Textual Int32))
, _rId :: !(Maybe Text)
, _rSecondaryAlignment :: !(Maybe Bool)
, _rReadGroupId :: !(Maybe Text)
, _rSupplementaryAlignment :: !(Maybe Bool)
, _rAlignedSequence :: !(Maybe Text)
, _rProperPlacement :: !(Maybe Bool)
, _rInfo :: !(Maybe ReadInfo)
, _rReadNumber :: !(Maybe (Textual Int32))
, _rAlignedQuality :: !(Maybe [Textual Int32])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Read' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rFragmentLength'
--
-- * 'rDuplicateFragment'
--
-- * 'rReadGroupSetId'
--
-- * 'rNextMatePosition'
--
-- * 'rFailedVendorQualityChecks'
--
-- * 'rAlignment'
--
-- * 'rFragmentName'
--
-- * 'rNumberReads'
--
-- * 'rId'
--
-- * 'rSecondaryAlignment'
--
-- * 'rReadGroupId'
--
-- * 'rSupplementaryAlignment'
--
-- * 'rAlignedSequence'
--
-- * 'rProperPlacement'
--
-- * 'rInfo'
--
-- * 'rReadNumber'
--
-- * 'rAlignedQuality'
read'
:: Read'
read' =
Read''
{ _rFragmentLength = Nothing
, _rDuplicateFragment = Nothing
, _rReadGroupSetId = Nothing
, _rNextMatePosition = Nothing
, _rFailedVendorQualityChecks = Nothing
, _rAlignment = Nothing
, _rFragmentName = Nothing
, _rNumberReads = Nothing
, _rId = Nothing
, _rSecondaryAlignment = Nothing
, _rReadGroupId = Nothing
, _rSupplementaryAlignment = Nothing
, _rAlignedSequence = Nothing
, _rProperPlacement = Nothing
, _rInfo = Nothing
, _rReadNumber = Nothing
, _rAlignedQuality = Nothing
}
-- | The observed length of the fragment, equivalent to TLEN in SAM.
rFragmentLength :: Lens' Read' (Maybe Int32)
rFragmentLength
= lens _rFragmentLength
(\ s a -> s{_rFragmentLength = a})
. mapping _Coerce
-- | The fragment is a PCR or optical duplicate (SAM flag 0x400).
rDuplicateFragment :: Lens' Read' (Maybe Bool)
rDuplicateFragment
= lens _rDuplicateFragment
(\ s a -> s{_rDuplicateFragment = a})
-- | The ID of the read group set this read belongs to. A read belongs to
-- exactly one read group set.
rReadGroupSetId :: Lens' Read' (Maybe Text)
rReadGroupSetId
= lens _rReadGroupSetId
(\ s a -> s{_rReadGroupSetId = a})
-- | The mapping of the primary alignment of the
-- \`(readNumber+1)%numberReads\` read in the fragment. It replaces mate
-- position and mate strand in SAM.
rNextMatePosition :: Lens' Read' (Maybe Position)
rNextMatePosition
= lens _rNextMatePosition
(\ s a -> s{_rNextMatePosition = a})
-- | Whether this read did not pass filters, such as platform or vendor
-- quality controls (SAM flag 0x200).
rFailedVendorQualityChecks :: Lens' Read' (Maybe Bool)
rFailedVendorQualityChecks
= lens _rFailedVendorQualityChecks
(\ s a -> s{_rFailedVendorQualityChecks = a})
-- | The linear alignment for this alignment record. This field is null for
-- unmapped reads.
rAlignment :: Lens' Read' (Maybe LinearAlignment)
rAlignment
= lens _rAlignment (\ s a -> s{_rAlignment = a})
-- | The fragment name. Equivalent to QNAME (query template name) in SAM.
rFragmentName :: Lens' Read' (Maybe Text)
rFragmentName
= lens _rFragmentName
(\ s a -> s{_rFragmentName = a})
-- | The number of reads in the fragment (extension to SAM flag 0x1).
rNumberReads :: Lens' Read' (Maybe Int32)
rNumberReads
= lens _rNumberReads (\ s a -> s{_rNumberReads = a})
. mapping _Coerce
-- | The server-generated read ID, unique across all reads. This is different
-- from the \`fragmentName\`.
rId :: Lens' Read' (Maybe Text)
rId = lens _rId (\ s a -> s{_rId = a})
-- | Whether this alignment is secondary. Equivalent to SAM flag 0x100. A
-- secondary alignment represents an alternative to the primary alignment
-- for this read. Aligners may return secondary alignments if a read can
-- map ambiguously to multiple coordinates in the genome. By convention,
-- each read has one and only one alignment where both
-- \`secondaryAlignment\` and \`supplementaryAlignment\` are false.
rSecondaryAlignment :: Lens' Read' (Maybe Bool)
rSecondaryAlignment
= lens _rSecondaryAlignment
(\ s a -> s{_rSecondaryAlignment = a})
-- | The ID of the read group this read belongs to. A read belongs to exactly
-- one read group. This is a server-generated ID which is distinct from
-- SAM\'s RG tag (for that value, see ReadGroup.name).
rReadGroupId :: Lens' Read' (Maybe Text)
rReadGroupId
= lens _rReadGroupId (\ s a -> s{_rReadGroupId = a})
-- | Whether this alignment is supplementary. Equivalent to SAM flag 0x800.
-- Supplementary alignments are used in the representation of a chimeric
-- alignment. In a chimeric alignment, a read is split into multiple linear
-- alignments that map to different reference contigs. The first linear
-- alignment in the read will be designated as the representative
-- alignment; the remaining linear alignments will be designated as
-- supplementary alignments. These alignments may have different mapping
-- quality scores. In each linear alignment in a chimeric alignment, the
-- read will be hard clipped. The \`alignedSequence\` and
-- \`alignedQuality\` fields in the alignment record will only represent
-- the bases for its respective linear alignment.
rSupplementaryAlignment :: Lens' Read' (Maybe Bool)
rSupplementaryAlignment
= lens _rSupplementaryAlignment
(\ s a -> s{_rSupplementaryAlignment = a})
-- | The bases of the read sequence contained in this alignment record,
-- **without CIGAR operations applied** (equivalent to SEQ in SAM).
-- \`alignedSequence\` and \`alignedQuality\` may be shorter than the full
-- read sequence and quality. This will occur if the alignment is part of a
-- chimeric alignment, or if the read was trimmed. When this occurs, the
-- CIGAR for this read will begin\/end with a hard clip operator that will
-- indicate the length of the excised sequence.
rAlignedSequence :: Lens' Read' (Maybe Text)
rAlignedSequence
= lens _rAlignedSequence
(\ s a -> s{_rAlignedSequence = a})
-- | The orientation and the distance between reads from the fragment are
-- consistent with the sequencing protocol (SAM flag 0x2).
rProperPlacement :: Lens' Read' (Maybe Bool)
rProperPlacement
= lens _rProperPlacement
(\ s a -> s{_rProperPlacement = a})
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
rInfo :: Lens' Read' (Maybe ReadInfo)
rInfo = lens _rInfo (\ s a -> s{_rInfo = a})
-- | The read number in sequencing. 0-based and less than numberReads. This
-- field replaces SAM flag 0x40 and 0x80.
rReadNumber :: Lens' Read' (Maybe Int32)
rReadNumber
= lens _rReadNumber (\ s a -> s{_rReadNumber = a}) .
mapping _Coerce
-- | The quality of the read sequence contained in this alignment record
-- (equivalent to QUAL in SAM). \`alignedSequence\` and \`alignedQuality\`
-- may be shorter than the full read sequence and quality. This will occur
-- if the alignment is part of a chimeric alignment, or if the read was
-- trimmed. When this occurs, the CIGAR for this read will begin\/end with
-- a hard clip operator that will indicate the length of the excised
-- sequence.
rAlignedQuality :: Lens' Read' [Int32]
rAlignedQuality
= lens _rAlignedQuality
(\ s a -> s{_rAlignedQuality = a})
. _Default
. _Coerce
instance FromJSON Read' where
parseJSON
= withObject "Read"
(\ o ->
Read'' <$>
(o .:? "fragmentLength") <*>
(o .:? "duplicateFragment")
<*> (o .:? "readGroupSetId")
<*> (o .:? "nextMatePosition")
<*> (o .:? "failedVendorQualityChecks")
<*> (o .:? "alignment")
<*> (o .:? "fragmentName")
<*> (o .:? "numberReads")
<*> (o .:? "id")
<*> (o .:? "secondaryAlignment")
<*> (o .:? "readGroupId")
<*> (o .:? "supplementaryAlignment")
<*> (o .:? "alignedSequence")
<*> (o .:? "properPlacement")
<*> (o .:? "info")
<*> (o .:? "readNumber")
<*> (o .:? "alignedQuality" .!= mempty))
instance ToJSON Read' where
toJSON Read''{..}
= object
(catMaybes
[("fragmentLength" .=) <$> _rFragmentLength,
("duplicateFragment" .=) <$> _rDuplicateFragment,
("readGroupSetId" .=) <$> _rReadGroupSetId,
("nextMatePosition" .=) <$> _rNextMatePosition,
("failedVendorQualityChecks" .=) <$>
_rFailedVendorQualityChecks,
("alignment" .=) <$> _rAlignment,
("fragmentName" .=) <$> _rFragmentName,
("numberReads" .=) <$> _rNumberReads,
("id" .=) <$> _rId,
("secondaryAlignment" .=) <$> _rSecondaryAlignment,
("readGroupId" .=) <$> _rReadGroupId,
("supplementaryAlignment" .=) <$>
_rSupplementaryAlignment,
("alignedSequence" .=) <$> _rAlignedSequence,
("properPlacement" .=) <$> _rProperPlacement,
("info" .=) <$> _rInfo,
("readNumber" .=) <$> _rReadNumber,
("alignedQuality" .=) <$> _rAlignedQuality])
-- | Optionally provided by the caller when submitting the request that
-- creates the operation.
--
-- /See:/ 'operationMetadataLabels' smart constructor.
newtype OperationMetadataLabels = OperationMetadataLabels'
{ _omlAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataLabels' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omlAddtional'
operationMetadataLabels
:: HashMap Text Text -- ^ 'omlAddtional'
-> OperationMetadataLabels
operationMetadataLabels pOmlAddtional_ =
OperationMetadataLabels'
{ _omlAddtional = _Coerce # pOmlAddtional_
}
omlAddtional :: Lens' OperationMetadataLabels (HashMap Text Text)
omlAddtional
= lens _omlAddtional (\ s a -> s{_omlAddtional = a})
. _Coerce
instance FromJSON OperationMetadataLabels where
parseJSON
= withObject "OperationMetadataLabels"
(\ o ->
OperationMetadataLabels' <$> (parseJSONObject o))
instance ToJSON OperationMetadataLabels where
toJSON = toJSON . _omlAddtional
-- | A call represents the determination of genotype with respect to a
-- particular variant. It may include associated information such as
-- quality and phasing. For example, a call might assign a probability of
-- 0.32 to the occurrence of a SNP named rs1234 in a call set with the name
-- NA12345.
--
-- /See:/ 'variantCall' smart constructor.
data VariantCall = VariantCall'
{ _vcGenotypeLikelihood :: !(Maybe [Textual Double])
, _vcCallSetName :: !(Maybe Text)
, _vcPhaseset :: !(Maybe Text)
, _vcCallSetId :: !(Maybe Text)
, _vcGenotype :: !(Maybe [Textual Int32])
, _vcInfo :: !(Maybe VariantCallInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantCall' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vcGenotypeLikelihood'
--
-- * 'vcCallSetName'
--
-- * 'vcPhaseset'
--
-- * 'vcCallSetId'
--
-- * 'vcGenotype'
--
-- * 'vcInfo'
variantCall
:: VariantCall
variantCall =
VariantCall'
{ _vcGenotypeLikelihood = Nothing
, _vcCallSetName = Nothing
, _vcPhaseset = Nothing
, _vcCallSetId = Nothing
, _vcGenotype = Nothing
, _vcInfo = Nothing
}
-- | The genotype likelihoods for this variant call. Each array entry
-- represents how likely a specific genotype is for this call. The value
-- ordering is defined by the GL tag in the VCF spec. If Phred-scaled
-- genotype likelihood scores (PL) are available and log10(P) genotype
-- likelihood scores (GL) are not, PL scores are converted to GL scores. If
-- both are available, PL scores are stored in \`info\`.
vcGenotypeLikelihood :: Lens' VariantCall [Double]
vcGenotypeLikelihood
= lens _vcGenotypeLikelihood
(\ s a -> s{_vcGenotypeLikelihood = a})
. _Default
. _Coerce
-- | The name of the call set this variant call belongs to.
vcCallSetName :: Lens' VariantCall (Maybe Text)
vcCallSetName
= lens _vcCallSetName
(\ s a -> s{_vcCallSetName = a})
-- | If this field is present, this variant call\'s genotype ordering implies
-- the phase of the bases and is consistent with any other variant calls in
-- the same reference sequence which have the same phaseset value. When
-- importing data from VCF, if the genotype data was phased but no phase
-- set was specified this field will be set to \`*\`.
vcPhaseset :: Lens' VariantCall (Maybe Text)
vcPhaseset
= lens _vcPhaseset (\ s a -> s{_vcPhaseset = a})
-- | The ID of the call set this variant call belongs to.
vcCallSetId :: Lens' VariantCall (Maybe Text)
vcCallSetId
= lens _vcCallSetId (\ s a -> s{_vcCallSetId = a})
-- | The genotype of this variant call. Each value represents either the
-- value of the \`referenceBases\` field or a 1-based index into
-- \`alternateBases\`. If a variant had a \`referenceBases\` value of \`T\`
-- and an \`alternateBases\` value of \`[\"A\", \"C\"]\`, and the
-- \`genotype\` was \`[2, 1]\`, that would mean the call represented the
-- heterozygous value \`CA\` for this variant. If the \`genotype\` was
-- instead \`[0, 1]\`, the represented value would be \`TA\`. Ordering of
-- the genotype values is important if the \`phaseset\` is present. If a
-- genotype is not called (that is, a \`.\` is present in the GT string) -1
-- is returned.
vcGenotype :: Lens' VariantCall [Int32]
vcGenotype
= lens _vcGenotype (\ s a -> s{_vcGenotype = a}) .
_Default
. _Coerce
-- | A map of additional variant call information. This must be of the form
-- map (string key mapping to a list of string values).
vcInfo :: Lens' VariantCall (Maybe VariantCallInfo)
vcInfo = lens _vcInfo (\ s a -> s{_vcInfo = a})
instance FromJSON VariantCall where
parseJSON
= withObject "VariantCall"
(\ o ->
VariantCall' <$>
(o .:? "genotypeLikelihood" .!= mempty) <*>
(o .:? "callSetName")
<*> (o .:? "phaseset")
<*> (o .:? "callSetId")
<*> (o .:? "genotype" .!= mempty)
<*> (o .:? "info"))
instance ToJSON VariantCall where
toJSON VariantCall'{..}
= object
(catMaybes
[("genotypeLikelihood" .=) <$> _vcGenotypeLikelihood,
("callSetName" .=) <$> _vcCallSetName,
("phaseset" .=) <$> _vcPhaseset,
("callSetId" .=) <$> _vcCallSetId,
("genotype" .=) <$> _vcGenotype,
("info" .=) <$> _vcInfo])
--
-- /See:/ 'batchCreateAnnotationsRequest' smart constructor.
data BatchCreateAnnotationsRequest = BatchCreateAnnotationsRequest'
{ _bcarAnnotations :: !(Maybe [Annotation])
, _bcarRequestId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BatchCreateAnnotationsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bcarAnnotations'
--
-- * 'bcarRequestId'
batchCreateAnnotationsRequest
:: BatchCreateAnnotationsRequest
batchCreateAnnotationsRequest =
BatchCreateAnnotationsRequest'
{ _bcarAnnotations = Nothing
, _bcarRequestId = Nothing
}
-- | The annotations to be created. At most 4096 can be specified in a single
-- request.
bcarAnnotations :: Lens' BatchCreateAnnotationsRequest [Annotation]
bcarAnnotations
= lens _bcarAnnotations
(\ s a -> s{_bcarAnnotations = a})
. _Default
. _Coerce
-- | A unique request ID which enables the server to detect duplicated
-- requests. If provided, duplicated requests will result in the same
-- response; if not provided, duplicated requests may result in duplicated
-- data. For a given annotation set, callers should not reuse
-- \`request_id\`s when writing different batches of annotations - behavior
-- in this case is undefined. A common approach is to use a UUID. For batch
-- jobs where worker crashes are a possibility, consider using some unique
-- variant of a worker or run ID.
bcarRequestId :: Lens' BatchCreateAnnotationsRequest (Maybe Text)
bcarRequestId
= lens _bcarRequestId
(\ s a -> s{_bcarRequestId = a})
instance FromJSON BatchCreateAnnotationsRequest where
parseJSON
= withObject "BatchCreateAnnotationsRequest"
(\ o ->
BatchCreateAnnotationsRequest' <$>
(o .:? "annotations" .!= mempty) <*>
(o .:? "requestId"))
instance ToJSON BatchCreateAnnotationsRequest where
toJSON BatchCreateAnnotationsRequest'{..}
= object
(catMaybes
[("annotations" .=) <$> _bcarAnnotations,
("requestId" .=) <$> _bcarRequestId])
--
-- /See:/ 'mergeVariantsRequest' smart constructor.
data MergeVariantsRequest = MergeVariantsRequest'
{ _mvrVariants :: !(Maybe [Variant])
, _mvrVariantSetId :: !(Maybe Text)
, _mvrInfoMergeConfig :: !(Maybe MergeVariantsRequestInfoMergeConfig)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MergeVariantsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mvrVariants'
--
-- * 'mvrVariantSetId'
--
-- * 'mvrInfoMergeConfig'
mergeVariantsRequest
:: MergeVariantsRequest
mergeVariantsRequest =
MergeVariantsRequest'
{ _mvrVariants = Nothing
, _mvrVariantSetId = Nothing
, _mvrInfoMergeConfig = Nothing
}
-- | The variants to be merged with existing variants.
mvrVariants :: Lens' MergeVariantsRequest [Variant]
mvrVariants
= lens _mvrVariants (\ s a -> s{_mvrVariants = a}) .
_Default
. _Coerce
-- | The destination variant set.
mvrVariantSetId :: Lens' MergeVariantsRequest (Maybe Text)
mvrVariantSetId
= lens _mvrVariantSetId
(\ s a -> s{_mvrVariantSetId = a})
-- | A mapping between info field keys and the InfoMergeOperations to be
-- performed on them.
mvrInfoMergeConfig :: Lens' MergeVariantsRequest (Maybe MergeVariantsRequestInfoMergeConfig)
mvrInfoMergeConfig
= lens _mvrInfoMergeConfig
(\ s a -> s{_mvrInfoMergeConfig = a})
instance FromJSON MergeVariantsRequest where
parseJSON
= withObject "MergeVariantsRequest"
(\ o ->
MergeVariantsRequest' <$>
(o .:? "variants" .!= mempty) <*>
(o .:? "variantSetId")
<*> (o .:? "infoMergeConfig"))
instance ToJSON MergeVariantsRequest where
toJSON MergeVariantsRequest'{..}
= object
(catMaybes
[("variants" .=) <$> _mvrVariants,
("variantSetId" .=) <$> _mvrVariantSetId,
("infoMergeConfig" .=) <$> _mvrInfoMergeConfig])
-- | A read group is all the data that\'s processed the same way by the
-- sequencer.
--
-- /See:/ 'readGroup' smart constructor.
data ReadGroup = ReadGroup'
{ _reaReferenceSetId :: !(Maybe Text)
, _reaPrograms :: !(Maybe [Program])
, _reaExperiment :: !(Maybe Experiment)
, _reaName :: !(Maybe Text)
, _reaDataSetId :: !(Maybe Text)
, _reaId :: !(Maybe Text)
, _reaSampleId :: !(Maybe Text)
, _reaPredictedInsertSize :: !(Maybe (Textual Int32))
, _reaDescription :: !(Maybe Text)
, _reaInfo :: !(Maybe ReadGroupInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReadGroup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'reaReferenceSetId'
--
-- * 'reaPrograms'
--
-- * 'reaExperiment'
--
-- * 'reaName'
--
-- * 'reaDataSetId'
--
-- * 'reaId'
--
-- * 'reaSampleId'
--
-- * 'reaPredictedInsertSize'
--
-- * 'reaDescription'
--
-- * 'reaInfo'
readGroup
:: ReadGroup
readGroup =
ReadGroup'
{ _reaReferenceSetId = Nothing
, _reaPrograms = Nothing
, _reaExperiment = Nothing
, _reaName = Nothing
, _reaDataSetId = Nothing
, _reaId = Nothing
, _reaSampleId = Nothing
, _reaPredictedInsertSize = Nothing
, _reaDescription = Nothing
, _reaInfo = Nothing
}
-- | The reference set the reads in this read group are aligned to.
reaReferenceSetId :: Lens' ReadGroup (Maybe Text)
reaReferenceSetId
= lens _reaReferenceSetId
(\ s a -> s{_reaReferenceSetId = a})
-- | The programs used to generate this read group. Programs are always
-- identical for all read groups within a read group set. For this reason,
-- only the first read group in a returned set will have this field
-- populated.
reaPrograms :: Lens' ReadGroup [Program]
reaPrograms
= lens _reaPrograms (\ s a -> s{_reaPrograms = a}) .
_Default
. _Coerce
-- | The experiment used to generate this read group.
reaExperiment :: Lens' ReadGroup (Maybe Experiment)
reaExperiment
= lens _reaExperiment
(\ s a -> s{_reaExperiment = a})
-- | The read group name. This corresponds to the \'RG ID field in the SAM
-- spec.
reaName :: Lens' ReadGroup (Maybe Text)
reaName = lens _reaName (\ s a -> s{_reaName = a})
-- | The dataset to which this read group belongs.
reaDataSetId :: Lens' ReadGroup (Maybe Text)
reaDataSetId
= lens _reaDataSetId (\ s a -> s{_reaDataSetId = a})
-- | The server-generated read group ID, unique for all read groups. Note:
-- This is different than the \'RG ID field in the SAM spec. For that
-- value, see name.
reaId :: Lens' ReadGroup (Maybe Text)
reaId = lens _reaId (\ s a -> s{_reaId = a})
-- | A client-supplied sample identifier for the reads in this read group.
reaSampleId :: Lens' ReadGroup (Maybe Text)
reaSampleId
= lens _reaSampleId (\ s a -> s{_reaSampleId = a})
-- | The predicted insert size of this read group. The insert size is the
-- length the sequenced DNA fragment from end-to-end, not including the
-- adapters.
reaPredictedInsertSize :: Lens' ReadGroup (Maybe Int32)
reaPredictedInsertSize
= lens _reaPredictedInsertSize
(\ s a -> s{_reaPredictedInsertSize = a})
. mapping _Coerce
-- | A free-form text description of this read group.
reaDescription :: Lens' ReadGroup (Maybe Text)
reaDescription
= lens _reaDescription
(\ s a -> s{_reaDescription = a})
-- | A map of additional read group information. This must be of the form map
-- (string key mapping to a list of string values).
reaInfo :: Lens' ReadGroup (Maybe ReadGroupInfo)
reaInfo = lens _reaInfo (\ s a -> s{_reaInfo = a})
instance FromJSON ReadGroup where
parseJSON
= withObject "ReadGroup"
(\ o ->
ReadGroup' <$>
(o .:? "referenceSetId") <*>
(o .:? "programs" .!= mempty)
<*> (o .:? "experiment")
<*> (o .:? "name")
<*> (o .:? "datasetId")
<*> (o .:? "id")
<*> (o .:? "sampleId")
<*> (o .:? "predictedInsertSize")
<*> (o .:? "description")
<*> (o .:? "info"))
instance ToJSON ReadGroup where
toJSON ReadGroup'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _reaReferenceSetId,
("programs" .=) <$> _reaPrograms,
("experiment" .=) <$> _reaExperiment,
("name" .=) <$> _reaName,
("datasetId" .=) <$> _reaDataSetId,
("id" .=) <$> _reaId,
("sampleId" .=) <$> _reaSampleId,
("predictedInsertSize" .=) <$>
_reaPredictedInsertSize,
("description" .=) <$> _reaDescription,
("info" .=) <$> _reaInfo])
-- | This resource represents a long-running operation that is the result of
-- a network API call.
--
-- /See:/ 'operation' smart constructor.
data Operation = Operation'
{ _oDone :: !(Maybe Bool)
, _oError :: !(Maybe Status)
, _oResponse :: !(Maybe OperationResponse)
, _oName :: !(Maybe Text)
, _oMetadata :: !(Maybe OperationSchema)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Operation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oDone'
--
-- * 'oError'
--
-- * 'oResponse'
--
-- * 'oName'
--
-- * 'oMetadata'
operation
:: Operation
operation =
Operation'
{ _oDone = Nothing
, _oError = Nothing
, _oResponse = Nothing
, _oName = Nothing
, _oMetadata = Nothing
}
-- | If the value is \`false\`, it means the operation is still in progress.
-- If true, the operation is completed, and either \`error\` or
-- \`response\` is available.
oDone :: Lens' Operation (Maybe Bool)
oDone = lens _oDone (\ s a -> s{_oDone = a})
-- | The error result of the operation in case of failure or cancellation.
oError :: Lens' Operation (Maybe Status)
oError = lens _oError (\ s a -> s{_oError = a})
-- | If importing ReadGroupSets, an ImportReadGroupSetsResponse is returned.
-- If importing Variants, an ImportVariantsResponse is returned. For
-- pipelines and exports, an empty response is returned.
oResponse :: Lens' Operation (Maybe OperationResponse)
oResponse
= lens _oResponse (\ s a -> s{_oResponse = a})
-- | The server-assigned name, which is only unique within the same service
-- that originally returns it. For example:
-- \`operations\/CJHU7Oi_ChDrveSpBRjfuL-qzoWAgEw\`
oName :: Lens' Operation (Maybe Text)
oName = lens _oName (\ s a -> s{_oName = a})
-- | An OperationMetadata object. This will always be returned with the
-- Operation.
oMetadata :: Lens' Operation (Maybe OperationSchema)
oMetadata
= lens _oMetadata (\ s a -> s{_oMetadata = a})
instance FromJSON Operation where
parseJSON
= withObject "Operation"
(\ o ->
Operation' <$>
(o .:? "done") <*> (o .:? "error") <*>
(o .:? "response")
<*> (o .:? "name")
<*> (o .:? "metadata"))
instance ToJSON Operation where
toJSON Operation'{..}
= object
(catMaybes
[("done" .=) <$> _oDone, ("error" .=) <$> _oError,
("response" .=) <$> _oResponse,
("name" .=) <$> _oName,
("metadata" .=) <$> _oMetadata])
--
-- /See:/ 'searchReferenceSetsRequest' smart constructor.
data SearchReferenceSetsRequest = SearchReferenceSetsRequest'
{ _srsrMD5checksums :: !(Maybe [Text])
, _srsrAccessions :: !(Maybe [Text])
, _srsrPageToken :: !(Maybe Text)
, _srsrAssemblyId :: !(Maybe Text)
, _srsrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReferenceSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srsrMD5checksums'
--
-- * 'srsrAccessions'
--
-- * 'srsrPageToken'
--
-- * 'srsrAssemblyId'
--
-- * 'srsrPageSize'
searchReferenceSetsRequest
:: SearchReferenceSetsRequest
searchReferenceSetsRequest =
SearchReferenceSetsRequest'
{ _srsrMD5checksums = Nothing
, _srsrAccessions = Nothing
, _srsrPageToken = Nothing
, _srsrAssemblyId = Nothing
, _srsrPageSize = Nothing
}
-- | If present, return reference sets for which the md5checksum matches
-- exactly.
srsrMD5checksums :: Lens' SearchReferenceSetsRequest [Text]
srsrMD5checksums
= lens _srsrMD5checksums
(\ s a -> s{_srsrMD5checksums = a})
. _Default
. _Coerce
-- | If present, return reference sets for which a prefix of any of
-- sourceAccessions match any of these strings. Accession numbers typically
-- have a main number and a version, for example \`NC_000001.11\`.
srsrAccessions :: Lens' SearchReferenceSetsRequest [Text]
srsrAccessions
= lens _srsrAccessions
(\ s a -> s{_srsrAccessions = a})
. _Default
. _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
srsrPageToken :: Lens' SearchReferenceSetsRequest (Maybe Text)
srsrPageToken
= lens _srsrPageToken
(\ s a -> s{_srsrPageToken = a})
-- | If present, return reference sets for which a substring of their
-- \`assemblyId\` matches this string (case insensitive).
srsrAssemblyId :: Lens' SearchReferenceSetsRequest (Maybe Text)
srsrAssemblyId
= lens _srsrAssemblyId
(\ s a -> s{_srsrAssemblyId = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 1024. The maximum value is 4096.
srsrPageSize :: Lens' SearchReferenceSetsRequest (Maybe Int32)
srsrPageSize
= lens _srsrPageSize (\ s a -> s{_srsrPageSize = a})
. mapping _Coerce
instance FromJSON SearchReferenceSetsRequest where
parseJSON
= withObject "SearchReferenceSetsRequest"
(\ o ->
SearchReferenceSetsRequest' <$>
(o .:? "md5checksums" .!= mempty) <*>
(o .:? "accessions" .!= mempty)
<*> (o .:? "pageToken")
<*> (o .:? "assemblyId")
<*> (o .:? "pageSize"))
instance ToJSON SearchReferenceSetsRequest where
toJSON SearchReferenceSetsRequest'{..}
= object
(catMaybes
[("md5checksums" .=) <$> _srsrMD5checksums,
("accessions" .=) <$> _srsrAccessions,
("pageToken" .=) <$> _srsrPageToken,
("assemblyId" .=) <$> _srsrAssemblyId,
("pageSize" .=) <$> _srsrPageSize])
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'empty' smart constructor.
data Empty =
Empty'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Empty' with the minimum fields required to make a request.
--
empty
:: Empty
empty = Empty'
instance FromJSON Empty where
parseJSON = withObject "Empty" (\ o -> pure Empty')
instance ToJSON Empty where
toJSON = const emptyObject
--
-- /See:/ 'searchReferencesResponse' smart constructor.
data SearchReferencesResponse = SearchReferencesResponse'
{ _srrNextPageToken :: !(Maybe Text)
, _srrReferences :: !(Maybe [Reference])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReferencesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srrNextPageToken'
--
-- * 'srrReferences'
searchReferencesResponse
:: SearchReferencesResponse
searchReferencesResponse =
SearchReferencesResponse'
{ _srrNextPageToken = Nothing
, _srrReferences = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
srrNextPageToken :: Lens' SearchReferencesResponse (Maybe Text)
srrNextPageToken
= lens _srrNextPageToken
(\ s a -> s{_srrNextPageToken = a})
-- | The matching references.
srrReferences :: Lens' SearchReferencesResponse [Reference]
srrReferences
= lens _srrReferences
(\ s a -> s{_srrReferences = a})
. _Default
. _Coerce
instance FromJSON SearchReferencesResponse where
parseJSON
= withObject "SearchReferencesResponse"
(\ o ->
SearchReferencesResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "references" .!= mempty))
instance ToJSON SearchReferencesResponse where
toJSON SearchReferencesResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _srrNextPageToken,
("references" .=) <$> _srrReferences])
-- | Metadata describes a single piece of variant call metadata. These data
-- include a top level key and either a single value string (value) or a
-- list of key-value pairs (info.) Value and info are mutually exclusive.
--
-- /See:/ 'variantSetMetadata' smart constructor.
data VariantSetMetadata = VariantSetMetadata'
{ _vsmValue :: !(Maybe Text)
, _vsmKey :: !(Maybe Text)
, _vsmId :: !(Maybe Text)
, _vsmType :: !(Maybe VariantSetMetadataType)
, _vsmNumber :: !(Maybe Text)
, _vsmDescription :: !(Maybe Text)
, _vsmInfo :: !(Maybe VariantSetMetadataInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantSetMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vsmValue'
--
-- * 'vsmKey'
--
-- * 'vsmId'
--
-- * 'vsmType'
--
-- * 'vsmNumber'
--
-- * 'vsmDescription'
--
-- * 'vsmInfo'
variantSetMetadata
:: VariantSetMetadata
variantSetMetadata =
VariantSetMetadata'
{ _vsmValue = Nothing
, _vsmKey = Nothing
, _vsmId = Nothing
, _vsmType = Nothing
, _vsmNumber = Nothing
, _vsmDescription = Nothing
, _vsmInfo = Nothing
}
-- | The value field for simple metadata
vsmValue :: Lens' VariantSetMetadata (Maybe Text)
vsmValue = lens _vsmValue (\ s a -> s{_vsmValue = a})
-- | The top-level key.
vsmKey :: Lens' VariantSetMetadata (Maybe Text)
vsmKey = lens _vsmKey (\ s a -> s{_vsmKey = a})
-- | User-provided ID field, not enforced by this API. Two or more pieces of
-- structured metadata with identical id and key fields are considered
-- equivalent.
vsmId :: Lens' VariantSetMetadata (Maybe Text)
vsmId = lens _vsmId (\ s a -> s{_vsmId = a})
-- | The type of data. Possible types include: Integer, Float, Flag,
-- Character, and String.
vsmType :: Lens' VariantSetMetadata (Maybe VariantSetMetadataType)
vsmType = lens _vsmType (\ s a -> s{_vsmType = a})
-- | The number of values that can be included in a field described by this
-- metadata.
vsmNumber :: Lens' VariantSetMetadata (Maybe Text)
vsmNumber
= lens _vsmNumber (\ s a -> s{_vsmNumber = a})
-- | A textual description of this metadata.
vsmDescription :: Lens' VariantSetMetadata (Maybe Text)
vsmDescription
= lens _vsmDescription
(\ s a -> s{_vsmDescription = a})
-- | Remaining structured metadata key-value pairs. This must be of the form
-- map (string key mapping to a list of string values).
vsmInfo :: Lens' VariantSetMetadata (Maybe VariantSetMetadataInfo)
vsmInfo = lens _vsmInfo (\ s a -> s{_vsmInfo = a})
instance FromJSON VariantSetMetadata where
parseJSON
= withObject "VariantSetMetadata"
(\ o ->
VariantSetMetadata' <$>
(o .:? "value") <*> (o .:? "key") <*> (o .:? "id")
<*> (o .:? "type")
<*> (o .:? "number")
<*> (o .:? "description")
<*> (o .:? "info"))
instance ToJSON VariantSetMetadata where
toJSON VariantSetMetadata'{..}
= object
(catMaybes
[("value" .=) <$> _vsmValue, ("key" .=) <$> _vsmKey,
("id" .=) <$> _vsmId, ("type" .=) <$> _vsmType,
("number" .=) <$> _vsmNumber,
("description" .=) <$> _vsmDescription,
("info" .=) <$> _vsmInfo])
-- | A call set is a collection of variant calls, typically for one sample.
-- It belongs to a variant set. For more genomics resource definitions, see
-- [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'callSet' smart constructor.
data CallSet = CallSet'
{ _csCreated :: !(Maybe (Textual Int64))
, _csName :: !(Maybe Text)
, _csId :: !(Maybe Text)
, _csSampleId :: !(Maybe Text)
, _csVariantSetIds :: !(Maybe [Text])
, _csInfo :: !(Maybe CallSetInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CallSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csCreated'
--
-- * 'csName'
--
-- * 'csId'
--
-- * 'csSampleId'
--
-- * 'csVariantSetIds'
--
-- * 'csInfo'
callSet
:: CallSet
callSet =
CallSet'
{ _csCreated = Nothing
, _csName = Nothing
, _csId = Nothing
, _csSampleId = Nothing
, _csVariantSetIds = Nothing
, _csInfo = Nothing
}
-- | The date this call set was created in milliseconds from the epoch.
csCreated :: Lens' CallSet (Maybe Int64)
csCreated
= lens _csCreated (\ s a -> s{_csCreated = a}) .
mapping _Coerce
-- | The call set name.
csName :: Lens' CallSet (Maybe Text)
csName = lens _csName (\ s a -> s{_csName = a})
-- | The server-generated call set ID, unique across all call sets.
csId :: Lens' CallSet (Maybe Text)
csId = lens _csId (\ s a -> s{_csId = a})
-- | The sample ID this call set corresponds to.
csSampleId :: Lens' CallSet (Maybe Text)
csSampleId
= lens _csSampleId (\ s a -> s{_csSampleId = a})
-- | The IDs of the variant sets this call set belongs to. This field must
-- have exactly length one, as a call set belongs to a single variant set.
-- This field is repeated for compatibility with the [GA4GH 0.5.1
-- API](https:\/\/github.com\/ga4gh\/schemas\/blob\/v0.5.1\/src\/main\/resources\/avro\/variants.avdl#L76).
csVariantSetIds :: Lens' CallSet [Text]
csVariantSetIds
= lens _csVariantSetIds
(\ s a -> s{_csVariantSetIds = a})
. _Default
. _Coerce
-- | A map of additional call set information. This must be of the form map
-- (string key mapping to a list of string values).
csInfo :: Lens' CallSet (Maybe CallSetInfo)
csInfo = lens _csInfo (\ s a -> s{_csInfo = a})
instance FromJSON CallSet where
parseJSON
= withObject "CallSet"
(\ o ->
CallSet' <$>
(o .:? "created") <*> (o .:? "name") <*> (o .:? "id")
<*> (o .:? "sampleId")
<*> (o .:? "variantSetIds" .!= mempty)
<*> (o .:? "info"))
instance ToJSON CallSet where
toJSON CallSet'{..}
= object
(catMaybes
[("created" .=) <$> _csCreated,
("name" .=) <$> _csName, ("id" .=) <$> _csId,
("sampleId" .=) <$> _csSampleId,
("variantSetIds" .=) <$> _csVariantSetIds,
("info" .=) <$> _csInfo])
-- | A bucket over which read coverage has been precomputed. A bucket
-- corresponds to a specific range of the reference sequence.
--
-- /See:/ 'coverageBucket' smart constructor.
data CoverageBucket = CoverageBucket'
{ _cbRange :: !(Maybe Range)
, _cbMeanCoverage :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CoverageBucket' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cbRange'
--
-- * 'cbMeanCoverage'
coverageBucket
:: CoverageBucket
coverageBucket =
CoverageBucket'
{ _cbRange = Nothing
, _cbMeanCoverage = Nothing
}
-- | The genomic coordinate range spanned by this bucket.
cbRange :: Lens' CoverageBucket (Maybe Range)
cbRange = lens _cbRange (\ s a -> s{_cbRange = a})
-- | The average number of reads which are aligned to each individual
-- reference base in this bucket.
cbMeanCoverage :: Lens' CoverageBucket (Maybe Double)
cbMeanCoverage
= lens _cbMeanCoverage
(\ s a -> s{_cbMeanCoverage = a})
. mapping _Coerce
instance FromJSON CoverageBucket where
parseJSON
= withObject "CoverageBucket"
(\ o ->
CoverageBucket' <$>
(o .:? "range") <*> (o .:? "meanCoverage"))
instance ToJSON CoverageBucket where
toJSON CoverageBucket'{..}
= object
(catMaybes
[("range" .=) <$> _cbRange,
("meanCoverage" .=) <$> _cbMeanCoverage])
--
-- /See:/ 'variantAnnotation' smart constructor.
data VariantAnnotation = VariantAnnotation'
{ _vaEffect :: !(Maybe VariantAnnotationEffect)
, _vaClinicalSignificance :: !(Maybe VariantAnnotationClinicalSignificance)
, _vaAlternateBases :: !(Maybe Text)
, _vaGeneId :: !(Maybe Text)
, _vaConditions :: !(Maybe [ClinicalCondition])
, _vaType :: !(Maybe VariantAnnotationType)
, _vaTranscriptIds :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantAnnotation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vaEffect'
--
-- * 'vaClinicalSignificance'
--
-- * 'vaAlternateBases'
--
-- * 'vaGeneId'
--
-- * 'vaConditions'
--
-- * 'vaType'
--
-- * 'vaTranscriptIds'
variantAnnotation
:: VariantAnnotation
variantAnnotation =
VariantAnnotation'
{ _vaEffect = Nothing
, _vaClinicalSignificance = Nothing
, _vaAlternateBases = Nothing
, _vaGeneId = Nothing
, _vaConditions = Nothing
, _vaType = Nothing
, _vaTranscriptIds = Nothing
}
-- | Effect of the variant on the coding sequence.
vaEffect :: Lens' VariantAnnotation (Maybe VariantAnnotationEffect)
vaEffect = lens _vaEffect (\ s a -> s{_vaEffect = a})
-- | Describes the clinical significance of a variant. It is adapted from the
-- ClinVar controlled vocabulary for clinical significance described at:
-- http:\/\/www.ncbi.nlm.nih.gov\/clinvar\/docs\/clinsig\/
vaClinicalSignificance :: Lens' VariantAnnotation (Maybe VariantAnnotationClinicalSignificance)
vaClinicalSignificance
= lens _vaClinicalSignificance
(\ s a -> s{_vaClinicalSignificance = a})
-- | The alternate allele for this variant. If multiple alternate alleles
-- exist at this location, create a separate variant for each one, as they
-- may represent distinct conditions.
vaAlternateBases :: Lens' VariantAnnotation (Maybe Text)
vaAlternateBases
= lens _vaAlternateBases
(\ s a -> s{_vaAlternateBases = a})
-- | Google annotation ID of the gene affected by this variant. This should
-- be provided when the variant is created.
vaGeneId :: Lens' VariantAnnotation (Maybe Text)
vaGeneId = lens _vaGeneId (\ s a -> s{_vaGeneId = a})
-- | The set of conditions associated with this variant. A condition
-- describes the way a variant influences human health.
vaConditions :: Lens' VariantAnnotation [ClinicalCondition]
vaConditions
= lens _vaConditions (\ s a -> s{_vaConditions = a})
. _Default
. _Coerce
-- | Type has been adapted from ClinVar\'s list of variant types.
vaType :: Lens' VariantAnnotation (Maybe VariantAnnotationType)
vaType = lens _vaType (\ s a -> s{_vaType = a})
-- | Google annotation IDs of the transcripts affected by this variant. These
-- should be provided when the variant is created.
vaTranscriptIds :: Lens' VariantAnnotation [Text]
vaTranscriptIds
= lens _vaTranscriptIds
(\ s a -> s{_vaTranscriptIds = a})
. _Default
. _Coerce
instance FromJSON VariantAnnotation where
parseJSON
= withObject "VariantAnnotation"
(\ o ->
VariantAnnotation' <$>
(o .:? "effect") <*> (o .:? "clinicalSignificance")
<*> (o .:? "alternateBases")
<*> (o .:? "geneId")
<*> (o .:? "conditions" .!= mempty)
<*> (o .:? "type")
<*> (o .:? "transcriptIds" .!= mempty))
instance ToJSON VariantAnnotation where
toJSON VariantAnnotation'{..}
= object
(catMaybes
[("effect" .=) <$> _vaEffect,
("clinicalSignificance" .=) <$>
_vaClinicalSignificance,
("alternateBases" .=) <$> _vaAlternateBases,
("geneId" .=) <$> _vaGeneId,
("conditions" .=) <$> _vaConditions,
("type" .=) <$> _vaType,
("transcriptIds" .=) <$> _vaTranscriptIds])
-- | The read group set search request.
--
-- /See:/ 'searchReadGroupSetsRequest' smart constructor.
data SearchReadGroupSetsRequest = SearchReadGroupSetsRequest'
{ _srgsrDataSetIds :: !(Maybe [Text])
, _srgsrName :: !(Maybe Text)
, _srgsrPageToken :: !(Maybe Text)
, _srgsrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReadGroupSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srgsrDataSetIds'
--
-- * 'srgsrName'
--
-- * 'srgsrPageToken'
--
-- * 'srgsrPageSize'
searchReadGroupSetsRequest
:: SearchReadGroupSetsRequest
searchReadGroupSetsRequest =
SearchReadGroupSetsRequest'
{ _srgsrDataSetIds = Nothing
, _srgsrName = Nothing
, _srgsrPageToken = Nothing
, _srgsrPageSize = Nothing
}
-- | Restricts this query to read group sets within the given datasets. At
-- least one ID must be provided.
srgsrDataSetIds :: Lens' SearchReadGroupSetsRequest [Text]
srgsrDataSetIds
= lens _srgsrDataSetIds
(\ s a -> s{_srgsrDataSetIds = a})
. _Default
. _Coerce
-- | Only return read group sets for which a substring of the name matches
-- this string.
srgsrName :: Lens' SearchReadGroupSetsRequest (Maybe Text)
srgsrName
= lens _srgsrName (\ s a -> s{_srgsrName = a})
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
srgsrPageToken :: Lens' SearchReadGroupSetsRequest (Maybe Text)
srgsrPageToken
= lens _srgsrPageToken
(\ s a -> s{_srgsrPageToken = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 256. The maximum value is 1024.
srgsrPageSize :: Lens' SearchReadGroupSetsRequest (Maybe Int32)
srgsrPageSize
= lens _srgsrPageSize
(\ s a -> s{_srgsrPageSize = a})
. mapping _Coerce
instance FromJSON SearchReadGroupSetsRequest where
parseJSON
= withObject "SearchReadGroupSetsRequest"
(\ o ->
SearchReadGroupSetsRequest' <$>
(o .:? "datasetIds" .!= mempty) <*> (o .:? "name")
<*> (o .:? "pageToken")
<*> (o .:? "pageSize"))
instance ToJSON SearchReadGroupSetsRequest where
toJSON SearchReadGroupSetsRequest'{..}
= object
(catMaybes
[("datasetIds" .=) <$> _srgsrDataSetIds,
("name" .=) <$> _srgsrName,
("pageToken" .=) <$> _srgsrPageToken,
("pageSize" .=) <$> _srgsrPageSize])
-- | A reference is a canonical assembled DNA sequence, intended to act as a
-- reference coordinate space for other genomic annotations. A single
-- reference might represent the human chromosome 1 or mitochandrial DNA,
-- for instance. A reference belongs to one or more reference sets. For
-- more genomics resource definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'reference' smart constructor.
data Reference = Reference'
{ _refLength :: !(Maybe (Textual Int64))
, _refSourceAccessions :: !(Maybe [Text])
, _refMD5checksum :: !(Maybe Text)
, _refName :: !(Maybe Text)
, _refNcbiTaxonId :: !(Maybe (Textual Int32))
, _refId :: !(Maybe Text)
, _refSourceURI :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Reference' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'refLength'
--
-- * 'refSourceAccessions'
--
-- * 'refMD5checksum'
--
-- * 'refName'
--
-- * 'refNcbiTaxonId'
--
-- * 'refId'
--
-- * 'refSourceURI'
reference
:: Reference
reference =
Reference'
{ _refLength = Nothing
, _refSourceAccessions = Nothing
, _refMD5checksum = Nothing
, _refName = Nothing
, _refNcbiTaxonId = Nothing
, _refId = Nothing
, _refSourceURI = Nothing
}
-- | The length of this reference\'s sequence.
refLength :: Lens' Reference (Maybe Int64)
refLength
= lens _refLength (\ s a -> s{_refLength = a}) .
mapping _Coerce
-- | All known corresponding accession IDs in INSDC (GenBank\/ENA\/DDBJ)
-- ideally with a version number, for example \`GCF_000001405.26\`.
refSourceAccessions :: Lens' Reference [Text]
refSourceAccessions
= lens _refSourceAccessions
(\ s a -> s{_refSourceAccessions = a})
. _Default
. _Coerce
-- | MD5 of the upper-case sequence excluding all whitespace characters (this
-- is equivalent to SQ:M5 in SAM). This value is represented in lower case
-- hexadecimal format.
refMD5checksum :: Lens' Reference (Maybe Text)
refMD5checksum
= lens _refMD5checksum
(\ s a -> s{_refMD5checksum = a})
-- | The name of this reference, for example \`22\`.
refName :: Lens' Reference (Maybe Text)
refName = lens _refName (\ s a -> s{_refName = a})
-- | ID from http:\/\/www.ncbi.nlm.nih.gov\/taxonomy. For example, 9606 for
-- human.
refNcbiTaxonId :: Lens' Reference (Maybe Int32)
refNcbiTaxonId
= lens _refNcbiTaxonId
(\ s a -> s{_refNcbiTaxonId = a})
. mapping _Coerce
-- | The server-generated reference ID, unique across all references.
refId :: Lens' Reference (Maybe Text)
refId = lens _refId (\ s a -> s{_refId = a})
-- | The URI from which the sequence was obtained. Typically specifies a
-- FASTA format file.
refSourceURI :: Lens' Reference (Maybe Text)
refSourceURI
= lens _refSourceURI (\ s a -> s{_refSourceURI = a})
instance FromJSON Reference where
parseJSON
= withObject "Reference"
(\ o ->
Reference' <$>
(o .:? "length") <*>
(o .:? "sourceAccessions" .!= mempty)
<*> (o .:? "md5checksum")
<*> (o .:? "name")
<*> (o .:? "ncbiTaxonId")
<*> (o .:? "id")
<*> (o .:? "sourceUri"))
instance ToJSON Reference where
toJSON Reference'{..}
= object
(catMaybes
[("length" .=) <$> _refLength,
("sourceAccessions" .=) <$> _refSourceAccessions,
("md5checksum" .=) <$> _refMD5checksum,
("name" .=) <$> _refName,
("ncbiTaxonId" .=) <$> _refNcbiTaxonId,
("id" .=) <$> _refId,
("sourceUri" .=) <$> _refSourceURI])
-- | A map of additional variant call information. This must be of the form
-- map (string key mapping to a list of string values).
--
-- /See:/ 'variantCallInfo' smart constructor.
newtype VariantCallInfo = VariantCallInfo'
{ _vciAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantCallInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vciAddtional'
variantCallInfo
:: HashMap Text [JSONValue] -- ^ 'vciAddtional'
-> VariantCallInfo
variantCallInfo pVciAddtional_ =
VariantCallInfo'
{ _vciAddtional = _Coerce # pVciAddtional_
}
vciAddtional :: Lens' VariantCallInfo (HashMap Text [JSONValue])
vciAddtional
= lens _vciAddtional (\ s a -> s{_vciAddtional = a})
. _Coerce
instance FromJSON VariantCallInfo where
parseJSON
= withObject "VariantCallInfo"
(\ o -> VariantCallInfo' <$> (parseJSONObject o))
instance ToJSON VariantCallInfo where
toJSON = toJSON . _vciAddtional
-- | A map of additional read group information. This must be of the form map
-- (string key mapping to a list of string values).
--
-- /See:/ 'readGroupInfo' smart constructor.
newtype ReadGroupInfo = ReadGroupInfo'
{ _rgiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReadGroupInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgiAddtional'
readGroupInfo
:: HashMap Text [JSONValue] -- ^ 'rgiAddtional'
-> ReadGroupInfo
readGroupInfo pRgiAddtional_ =
ReadGroupInfo'
{ _rgiAddtional = _Coerce # pRgiAddtional_
}
rgiAddtional :: Lens' ReadGroupInfo (HashMap Text [JSONValue])
rgiAddtional
= lens _rgiAddtional (\ s a -> s{_rgiAddtional = a})
. _Coerce
instance FromJSON ReadGroupInfo where
parseJSON
= withObject "ReadGroupInfo"
(\ o -> ReadGroupInfo' <$> (parseJSONObject o))
instance ToJSON ReadGroupInfo where
toJSON = toJSON . _rgiAddtional
--
-- /See:/ 'statusDetailsItem' smart constructor.
newtype StatusDetailsItem = StatusDetailsItem'
{ _sdiAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdiAddtional'
statusDetailsItem
:: HashMap Text JSONValue -- ^ 'sdiAddtional'
-> StatusDetailsItem
statusDetailsItem pSdiAddtional_ =
StatusDetailsItem'
{ _sdiAddtional = _Coerce # pSdiAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
sdiAddtional :: Lens' StatusDetailsItem (HashMap Text JSONValue)
sdiAddtional
= lens _sdiAddtional (\ s a -> s{_sdiAddtional = a})
. _Coerce
instance FromJSON StatusDetailsItem where
parseJSON
= withObject "StatusDetailsItem"
(\ o -> StatusDetailsItem' <$> (parseJSONObject o))
instance ToJSON StatusDetailsItem where
toJSON = toJSON . _sdiAddtional
-- | The call set search response.
--
-- /See:/ 'searchCallSetsResponse' smart constructor.
data SearchCallSetsResponse = SearchCallSetsResponse'
{ _scsrNextPageToken :: !(Maybe Text)
, _scsrCallSets :: !(Maybe [CallSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchCallSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scsrNextPageToken'
--
-- * 'scsrCallSets'
searchCallSetsResponse
:: SearchCallSetsResponse
searchCallSetsResponse =
SearchCallSetsResponse'
{ _scsrNextPageToken = Nothing
, _scsrCallSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
scsrNextPageToken :: Lens' SearchCallSetsResponse (Maybe Text)
scsrNextPageToken
= lens _scsrNextPageToken
(\ s a -> s{_scsrNextPageToken = a})
-- | The list of matching call sets.
scsrCallSets :: Lens' SearchCallSetsResponse [CallSet]
scsrCallSets
= lens _scsrCallSets (\ s a -> s{_scsrCallSets = a})
. _Default
. _Coerce
instance FromJSON SearchCallSetsResponse where
parseJSON
= withObject "SearchCallSetsResponse"
(\ o ->
SearchCallSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "callSets" .!= mempty))
instance ToJSON SearchCallSetsResponse where
toJSON SearchCallSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _scsrNextPageToken,
("callSets" .=) <$> _scsrCallSets])
-- | Request message for \`SetIamPolicy\` method.
--
-- /See:/ 'setIAMPolicyRequest' smart constructor.
newtype SetIAMPolicyRequest = SetIAMPolicyRequest'
{ _siprPolicy :: Maybe Policy
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SetIAMPolicyRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'siprPolicy'
setIAMPolicyRequest
:: SetIAMPolicyRequest
setIAMPolicyRequest =
SetIAMPolicyRequest'
{ _siprPolicy = Nothing
}
-- | REQUIRED: The complete policy to be applied to the \`resource\`. The
-- size of the policy is limited to a few 10s of KB. An empty policy is a
-- valid policy but certain Cloud Platform services (such as Projects)
-- might reject them.
siprPolicy :: Lens' SetIAMPolicyRequest (Maybe Policy)
siprPolicy
= lens _siprPolicy (\ s a -> s{_siprPolicy = a})
instance FromJSON SetIAMPolicyRequest where
parseJSON
= withObject "SetIAMPolicyRequest"
(\ o -> SetIAMPolicyRequest' <$> (o .:? "policy"))
instance ToJSON SetIAMPolicyRequest where
toJSON SetIAMPolicyRequest'{..}
= object (catMaybes [("policy" .=) <$> _siprPolicy])
-- | The read search request.
--
-- /See:/ 'searchReadsRequest' smart constructor.
data SearchReadsRequest = SearchReadsRequest'
{ _srrStart :: !(Maybe (Textual Int64))
, _srrReadGroupIds :: !(Maybe [Text])
, _srrReferenceName :: !(Maybe Text)
, _srrEnd :: !(Maybe (Textual Int64))
, _srrPageToken :: !(Maybe Text)
, _srrPageSize :: !(Maybe (Textual Int32))
, _srrReadGroupSetIds :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReadsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srrStart'
--
-- * 'srrReadGroupIds'
--
-- * 'srrReferenceName'
--
-- * 'srrEnd'
--
-- * 'srrPageToken'
--
-- * 'srrPageSize'
--
-- * 'srrReadGroupSetIds'
searchReadsRequest
:: SearchReadsRequest
searchReadsRequest =
SearchReadsRequest'
{ _srrStart = Nothing
, _srrReadGroupIds = Nothing
, _srrReferenceName = Nothing
, _srrEnd = Nothing
, _srrPageToken = Nothing
, _srrPageSize = Nothing
, _srrReadGroupSetIds = Nothing
}
-- | The start position of the range on the reference, 0-based inclusive. If
-- specified, \`referenceName\` must also be specified.
srrStart :: Lens' SearchReadsRequest (Maybe Int64)
srrStart
= lens _srrStart (\ s a -> s{_srrStart = a}) .
mapping _Coerce
-- | The IDs of the read groups within which to search for reads. All
-- specified read groups must belong to the same read group sets. Must
-- specify one of \`readGroupSetIds\` or \`readGroupIds\`.
srrReadGroupIds :: Lens' SearchReadsRequest [Text]
srrReadGroupIds
= lens _srrReadGroupIds
(\ s a -> s{_srrReadGroupIds = a})
. _Default
. _Coerce
-- | The reference sequence name, for example \`chr1\`, \`1\`, or \`chrX\`.
-- If set to \`*\`, only unmapped reads are returned. If unspecified, all
-- reads (mapped and unmapped) are returned.
srrReferenceName :: Lens' SearchReadsRequest (Maybe Text)
srrReferenceName
= lens _srrReferenceName
(\ s a -> s{_srrReferenceName = a})
-- | The end position of the range on the reference, 0-based exclusive. If
-- specified, \`referenceName\` must also be specified.
srrEnd :: Lens' SearchReadsRequest (Maybe Int64)
srrEnd
= lens _srrEnd (\ s a -> s{_srrEnd = a}) .
mapping _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
srrPageToken :: Lens' SearchReadsRequest (Maybe Text)
srrPageToken
= lens _srrPageToken (\ s a -> s{_srrPageToken = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 256. The maximum value is 2048.
srrPageSize :: Lens' SearchReadsRequest (Maybe Int32)
srrPageSize
= lens _srrPageSize (\ s a -> s{_srrPageSize = a}) .
mapping _Coerce
-- | The IDs of the read groups sets within which to search for reads. All
-- specified read group sets must be aligned against a common set of
-- reference sequences; this defines the genomic coordinates for the query.
-- Must specify one of \`readGroupSetIds\` or \`readGroupIds\`.
srrReadGroupSetIds :: Lens' SearchReadsRequest [Text]
srrReadGroupSetIds
= lens _srrReadGroupSetIds
(\ s a -> s{_srrReadGroupSetIds = a})
. _Default
. _Coerce
instance FromJSON SearchReadsRequest where
parseJSON
= withObject "SearchReadsRequest"
(\ o ->
SearchReadsRequest' <$>
(o .:? "start") <*> (o .:? "readGroupIds" .!= mempty)
<*> (o .:? "referenceName")
<*> (o .:? "end")
<*> (o .:? "pageToken")
<*> (o .:? "pageSize")
<*> (o .:? "readGroupSetIds" .!= mempty))
instance ToJSON SearchReadsRequest where
toJSON SearchReadsRequest'{..}
= object
(catMaybes
[("start" .=) <$> _srrStart,
("readGroupIds" .=) <$> _srrReadGroupIds,
("referenceName" .=) <$> _srrReferenceName,
("end" .=) <$> _srrEnd,
("pageToken" .=) <$> _srrPageToken,
("pageSize" .=) <$> _srrPageSize,
("readGroupSetIds" .=) <$> _srrReadGroupSetIds])
-- | The original request that started the operation. Note that this will be
-- in current version of the API. If the operation was started with v1beta2
-- API and a GetOperation is performed on v1 API, a v1 request will be
-- returned.
--
-- /See:/ 'operationMetadataRequest' smart constructor.
newtype OperationMetadataRequest = OperationMetadataRequest'
{ _omrAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omrAddtional'
operationMetadataRequest
:: HashMap Text JSONValue -- ^ 'omrAddtional'
-> OperationMetadataRequest
operationMetadataRequest pOmrAddtional_ =
OperationMetadataRequest'
{ _omrAddtional = _Coerce # pOmrAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
omrAddtional :: Lens' OperationMetadataRequest (HashMap Text JSONValue)
omrAddtional
= lens _omrAddtional (\ s a -> s{_omrAddtional = a})
. _Coerce
instance FromJSON OperationMetadataRequest where
parseJSON
= withObject "OperationMetadataRequest"
(\ o ->
OperationMetadataRequest' <$> (parseJSONObject o))
instance ToJSON OperationMetadataRequest where
toJSON = toJSON . _omrAddtional
-- | A mapping between info field keys and the InfoMergeOperations to be
-- performed on them.
--
-- /See:/ 'mergeVariantsRequestInfoMergeConfig' smart constructor.
newtype MergeVariantsRequestInfoMergeConfig = MergeVariantsRequestInfoMergeConfig'
{ _mvrimcAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MergeVariantsRequestInfoMergeConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mvrimcAddtional'
mergeVariantsRequestInfoMergeConfig
:: HashMap Text Text -- ^ 'mvrimcAddtional'
-> MergeVariantsRequestInfoMergeConfig
mergeVariantsRequestInfoMergeConfig pMvrimcAddtional_ =
MergeVariantsRequestInfoMergeConfig'
{ _mvrimcAddtional = _Coerce # pMvrimcAddtional_
}
mvrimcAddtional :: Lens' MergeVariantsRequestInfoMergeConfig (HashMap Text Text)
mvrimcAddtional
= lens _mvrimcAddtional
(\ s a -> s{_mvrimcAddtional = a})
. _Coerce
instance FromJSON MergeVariantsRequestInfoMergeConfig
where
parseJSON
= withObject "MergeVariantsRequestInfoMergeConfig"
(\ o ->
MergeVariantsRequestInfoMergeConfig' <$>
(parseJSONObject o))
instance ToJSON MergeVariantsRequestInfoMergeConfig
where
toJSON = toJSON . _mvrimcAddtional
-- | A map of additional variant information. This must be of the form map
-- (string key mapping to a list of string values).
--
-- /See:/ 'variantInfo' smart constructor.
newtype VariantInfo = VariantInfo'
{ _viAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'viAddtional'
variantInfo
:: HashMap Text [JSONValue] -- ^ 'viAddtional'
-> VariantInfo
variantInfo pViAddtional_ =
VariantInfo'
{ _viAddtional = _Coerce # pViAddtional_
}
viAddtional :: Lens' VariantInfo (HashMap Text [JSONValue])
viAddtional
= lens _viAddtional (\ s a -> s{_viAddtional = a}) .
_Coerce
instance FromJSON VariantInfo where
parseJSON
= withObject "VariantInfo"
(\ o -> VariantInfo' <$> (parseJSONObject o))
instance ToJSON VariantInfo where
toJSON = toJSON . _viAddtional
--
-- /See:/ 'experiment' smart constructor.
data Experiment = Experiment'
{ _eInstrumentModel :: !(Maybe Text)
, _ePlatformUnit :: !(Maybe Text)
, _eSequencingCenter :: !(Maybe Text)
, _eLibraryId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Experiment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eInstrumentModel'
--
-- * 'ePlatformUnit'
--
-- * 'eSequencingCenter'
--
-- * 'eLibraryId'
experiment
:: Experiment
experiment =
Experiment'
{ _eInstrumentModel = Nothing
, _ePlatformUnit = Nothing
, _eSequencingCenter = Nothing
, _eLibraryId = Nothing
}
-- | The instrument model used as part of this experiment. This maps to
-- sequencing technology in the SAM spec.
eInstrumentModel :: Lens' Experiment (Maybe Text)
eInstrumentModel
= lens _eInstrumentModel
(\ s a -> s{_eInstrumentModel = a})
-- | The platform unit used as part of this experiment, for example
-- flowcell-barcode.lane for Illumina or slide for SOLiD. Corresponds to
-- the \'RG PU field in the SAM spec.
ePlatformUnit :: Lens' Experiment (Maybe Text)
ePlatformUnit
= lens _ePlatformUnit
(\ s a -> s{_ePlatformUnit = a})
-- | The sequencing center used as part of this experiment.
eSequencingCenter :: Lens' Experiment (Maybe Text)
eSequencingCenter
= lens _eSequencingCenter
(\ s a -> s{_eSequencingCenter = a})
-- | A client-supplied library identifier; a library is a collection of DNA
-- fragments which have been prepared for sequencing from a sample. This
-- field is important for quality control as error or bias can be
-- introduced during sample preparation.
eLibraryId :: Lens' Experiment (Maybe Text)
eLibraryId
= lens _eLibraryId (\ s a -> s{_eLibraryId = a})
instance FromJSON Experiment where
parseJSON
= withObject "Experiment"
(\ o ->
Experiment' <$>
(o .:? "instrumentModel") <*> (o .:? "platformUnit")
<*> (o .:? "sequencingCenter")
<*> (o .:? "libraryId"))
instance ToJSON Experiment where
toJSON Experiment'{..}
= object
(catMaybes
[("instrumentModel" .=) <$> _eInstrumentModel,
("platformUnit" .=) <$> _ePlatformUnit,
("sequencingCenter" .=) <$> _eSequencingCenter,
("libraryId" .=) <$> _eLibraryId])
-- | The search variant sets request.
--
-- /See:/ 'searchVariantSetsRequest' smart constructor.
data SearchVariantSetsRequest = SearchVariantSetsRequest'
{ _svsrDataSetIds :: !(Maybe [Text])
, _svsrPageToken :: !(Maybe Text)
, _svsrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchVariantSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'svsrDataSetIds'
--
-- * 'svsrPageToken'
--
-- * 'svsrPageSize'
searchVariantSetsRequest
:: SearchVariantSetsRequest
searchVariantSetsRequest =
SearchVariantSetsRequest'
{ _svsrDataSetIds = Nothing
, _svsrPageToken = Nothing
, _svsrPageSize = Nothing
}
-- | Exactly one dataset ID must be provided here. Only variant sets which
-- belong to this dataset will be returned.
svsrDataSetIds :: Lens' SearchVariantSetsRequest [Text]
svsrDataSetIds
= lens _svsrDataSetIds
(\ s a -> s{_svsrDataSetIds = a})
. _Default
. _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
svsrPageToken :: Lens' SearchVariantSetsRequest (Maybe Text)
svsrPageToken
= lens _svsrPageToken
(\ s a -> s{_svsrPageToken = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 1024.
svsrPageSize :: Lens' SearchVariantSetsRequest (Maybe Int32)
svsrPageSize
= lens _svsrPageSize (\ s a -> s{_svsrPageSize = a})
. mapping _Coerce
instance FromJSON SearchVariantSetsRequest where
parseJSON
= withObject "SearchVariantSetsRequest"
(\ o ->
SearchVariantSetsRequest' <$>
(o .:? "datasetIds" .!= mempty) <*>
(o .:? "pageToken")
<*> (o .:? "pageSize"))
instance ToJSON SearchVariantSetsRequest where
toJSON SearchVariantSetsRequest'{..}
= object
(catMaybes
[("datasetIds" .=) <$> _svsrDataSetIds,
("pageToken" .=) <$> _svsrPageToken,
("pageSize" .=) <$> _svsrPageSize])
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
--
-- /See:/ 'annotationInfo' smart constructor.
newtype AnnotationInfo = AnnotationInfo'
{ _aiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotationInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aiAddtional'
annotationInfo
:: HashMap Text [JSONValue] -- ^ 'aiAddtional'
-> AnnotationInfo
annotationInfo pAiAddtional_ =
AnnotationInfo'
{ _aiAddtional = _Coerce # pAiAddtional_
}
aiAddtional :: Lens' AnnotationInfo (HashMap Text [JSONValue])
aiAddtional
= lens _aiAddtional (\ s a -> s{_aiAddtional = a}) .
_Coerce
instance FromJSON AnnotationInfo where
parseJSON
= withObject "AnnotationInfo"
(\ o -> AnnotationInfo' <$> (parseJSONObject o))
instance ToJSON AnnotationInfo where
toJSON = toJSON . _aiAddtional
--
-- /See:/ 'searchAnnotationsResponse' smart constructor.
data SearchAnnotationsResponse = SearchAnnotationsResponse'
{ _sarAnnotations :: !(Maybe [Annotation])
, _sarNextPageToken :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchAnnotationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sarAnnotations'
--
-- * 'sarNextPageToken'
searchAnnotationsResponse
:: SearchAnnotationsResponse
searchAnnotationsResponse =
SearchAnnotationsResponse'
{ _sarAnnotations = Nothing
, _sarNextPageToken = Nothing
}
-- | The matching annotations.
sarAnnotations :: Lens' SearchAnnotationsResponse [Annotation]
sarAnnotations
= lens _sarAnnotations
(\ s a -> s{_sarAnnotations = a})
. _Default
. _Coerce
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
sarNextPageToken :: Lens' SearchAnnotationsResponse (Maybe Text)
sarNextPageToken
= lens _sarNextPageToken
(\ s a -> s{_sarNextPageToken = a})
instance FromJSON SearchAnnotationsResponse where
parseJSON
= withObject "SearchAnnotationsResponse"
(\ o ->
SearchAnnotationsResponse' <$>
(o .:? "annotations" .!= mempty) <*>
(o .:? "nextPageToken"))
instance ToJSON SearchAnnotationsResponse where
toJSON SearchAnnotationsResponse'{..}
= object
(catMaybes
[("annotations" .=) <$> _sarAnnotations,
("nextPageToken" .=) <$> _sarNextPageToken])
--
-- /See:/ 'searchAnnotationSetsRequest' smart constructor.
data SearchAnnotationSetsRequest = SearchAnnotationSetsRequest'
{ _sasrReferenceSetId :: !(Maybe Text)
, _sasrTypes :: !(Maybe [Text])
, _sasrDataSetIds :: !(Maybe [Text])
, _sasrName :: !(Maybe Text)
, _sasrPageToken :: !(Maybe Text)
, _sasrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchAnnotationSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sasrReferenceSetId'
--
-- * 'sasrTypes'
--
-- * 'sasrDataSetIds'
--
-- * 'sasrName'
--
-- * 'sasrPageToken'
--
-- * 'sasrPageSize'
searchAnnotationSetsRequest
:: SearchAnnotationSetsRequest
searchAnnotationSetsRequest =
SearchAnnotationSetsRequest'
{ _sasrReferenceSetId = Nothing
, _sasrTypes = Nothing
, _sasrDataSetIds = Nothing
, _sasrName = Nothing
, _sasrPageToken = Nothing
, _sasrPageSize = Nothing
}
-- | If specified, only annotation sets associated with the given reference
-- set are returned.
sasrReferenceSetId :: Lens' SearchAnnotationSetsRequest (Maybe Text)
sasrReferenceSetId
= lens _sasrReferenceSetId
(\ s a -> s{_sasrReferenceSetId = a})
-- | If specified, only annotation sets that have any of these types are
-- returned.
sasrTypes :: Lens' SearchAnnotationSetsRequest [Text]
sasrTypes
= lens _sasrTypes (\ s a -> s{_sasrTypes = a}) .
_Default
. _Coerce
-- | Required. The dataset IDs to search within. Caller must have \`READ\`
-- access to these datasets.
sasrDataSetIds :: Lens' SearchAnnotationSetsRequest [Text]
sasrDataSetIds
= lens _sasrDataSetIds
(\ s a -> s{_sasrDataSetIds = a})
. _Default
. _Coerce
-- | Only return annotations sets for which a substring of the name matches
-- this string (case insensitive).
sasrName :: Lens' SearchAnnotationSetsRequest (Maybe Text)
sasrName = lens _sasrName (\ s a -> s{_sasrName = a})
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
sasrPageToken :: Lens' SearchAnnotationSetsRequest (Maybe Text)
sasrPageToken
= lens _sasrPageToken
(\ s a -> s{_sasrPageToken = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 128. The maximum value is 1024.
sasrPageSize :: Lens' SearchAnnotationSetsRequest (Maybe Int32)
sasrPageSize
= lens _sasrPageSize (\ s a -> s{_sasrPageSize = a})
. mapping _Coerce
instance FromJSON SearchAnnotationSetsRequest where
parseJSON
= withObject "SearchAnnotationSetsRequest"
(\ o ->
SearchAnnotationSetsRequest' <$>
(o .:? "referenceSetId") <*>
(o .:? "types" .!= mempty)
<*> (o .:? "datasetIds" .!= mempty)
<*> (o .:? "name")
<*> (o .:? "pageToken")
<*> (o .:? "pageSize"))
instance ToJSON SearchAnnotationSetsRequest where
toJSON SearchAnnotationSetsRequest'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _sasrReferenceSetId,
("types" .=) <$> _sasrTypes,
("datasetIds" .=) <$> _sasrDataSetIds,
("name" .=) <$> _sasrName,
("pageToken" .=) <$> _sasrPageToken,
("pageSize" .=) <$> _sasrPageSize])
-- | The variant search response.
--
-- /See:/ 'searchVariantsResponse' smart constructor.
data SearchVariantsResponse = SearchVariantsResponse'
{ _svrVariants :: !(Maybe [Variant])
, _svrNextPageToken :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchVariantsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'svrVariants'
--
-- * 'svrNextPageToken'
searchVariantsResponse
:: SearchVariantsResponse
searchVariantsResponse =
SearchVariantsResponse'
{ _svrVariants = Nothing
, _svrNextPageToken = Nothing
}
-- | The list of matching Variants.
svrVariants :: Lens' SearchVariantsResponse [Variant]
svrVariants
= lens _svrVariants (\ s a -> s{_svrVariants = a}) .
_Default
. _Coerce
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
svrNextPageToken :: Lens' SearchVariantsResponse (Maybe Text)
svrNextPageToken
= lens _svrNextPageToken
(\ s a -> s{_svrNextPageToken = a})
instance FromJSON SearchVariantsResponse where
parseJSON
= withObject "SearchVariantsResponse"
(\ o ->
SearchVariantsResponse' <$>
(o .:? "variants" .!= mempty) <*>
(o .:? "nextPageToken"))
instance ToJSON SearchVariantsResponse where
toJSON SearchVariantsResponse'{..}
= object
(catMaybes
[("variants" .=) <$> _svrVariants,
("nextPageToken" .=) <$> _svrNextPageToken])
-- | Runtime metadata on this Operation.
--
-- /See:/ 'operationMetadataRuntimeMetadata' smart constructor.
newtype OperationMetadataRuntimeMetadata = OperationMetadataRuntimeMetadata'
{ _omrmAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadataRuntimeMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omrmAddtional'
operationMetadataRuntimeMetadata
:: HashMap Text JSONValue -- ^ 'omrmAddtional'
-> OperationMetadataRuntimeMetadata
operationMetadataRuntimeMetadata pOmrmAddtional_ =
OperationMetadataRuntimeMetadata'
{ _omrmAddtional = _Coerce # pOmrmAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
omrmAddtional :: Lens' OperationMetadataRuntimeMetadata (HashMap Text JSONValue)
omrmAddtional
= lens _omrmAddtional
(\ s a -> s{_omrmAddtional = a})
. _Coerce
instance FromJSON OperationMetadataRuntimeMetadata
where
parseJSON
= withObject "OperationMetadataRuntimeMetadata"
(\ o ->
OperationMetadataRuntimeMetadata' <$>
(parseJSONObject o))
instance ToJSON OperationMetadataRuntimeMetadata
where
toJSON = toJSON . _omrmAddtional
--
-- /See:/ 'clinicalCondition' smart constructor.
data ClinicalCondition = ClinicalCondition'
{ _ccExternalIds :: !(Maybe [ExternalId])
, _ccNames :: !(Maybe [Text])
, _ccConceptId :: !(Maybe Text)
, _ccOmimId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ClinicalCondition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccExternalIds'
--
-- * 'ccNames'
--
-- * 'ccConceptId'
--
-- * 'ccOmimId'
clinicalCondition
:: ClinicalCondition
clinicalCondition =
ClinicalCondition'
{ _ccExternalIds = Nothing
, _ccNames = Nothing
, _ccConceptId = Nothing
, _ccOmimId = Nothing
}
-- | The set of external IDs for this condition.
ccExternalIds :: Lens' ClinicalCondition [ExternalId]
ccExternalIds
= lens _ccExternalIds
(\ s a -> s{_ccExternalIds = a})
. _Default
. _Coerce
-- | A set of names for the condition.
ccNames :: Lens' ClinicalCondition [Text]
ccNames
= lens _ccNames (\ s a -> s{_ccNames = a}) . _Default
. _Coerce
-- | The MedGen concept id associated with this gene. Search for these IDs at
-- http:\/\/www.ncbi.nlm.nih.gov\/medgen\/
ccConceptId :: Lens' ClinicalCondition (Maybe Text)
ccConceptId
= lens _ccConceptId (\ s a -> s{_ccConceptId = a})
-- | The OMIM id for this condition. Search for these IDs at
-- http:\/\/omim.org\/
ccOmimId :: Lens' ClinicalCondition (Maybe Text)
ccOmimId = lens _ccOmimId (\ s a -> s{_ccOmimId = a})
instance FromJSON ClinicalCondition where
parseJSON
= withObject "ClinicalCondition"
(\ o ->
ClinicalCondition' <$>
(o .:? "externalIds" .!= mempty) <*>
(o .:? "names" .!= mempty)
<*> (o .:? "conceptId")
<*> (o .:? "omimId"))
instance ToJSON ClinicalCondition where
toJSON ClinicalCondition'{..}
= object
(catMaybes
[("externalIds" .=) <$> _ccExternalIds,
("names" .=) <$> _ccNames,
("conceptId" .=) <$> _ccConceptId,
("omimId" .=) <$> _ccOmimId])
-- | The call set search request.
--
-- /See:/ 'searchCallSetsRequest' smart constructor.
data SearchCallSetsRequest = SearchCallSetsRequest'
{ _scsrName :: !(Maybe Text)
, _scsrPageToken :: !(Maybe Text)
, _scsrVariantSetIds :: !(Maybe [Text])
, _scsrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchCallSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'scsrName'
--
-- * 'scsrPageToken'
--
-- * 'scsrVariantSetIds'
--
-- * 'scsrPageSize'
searchCallSetsRequest
:: SearchCallSetsRequest
searchCallSetsRequest =
SearchCallSetsRequest'
{ _scsrName = Nothing
, _scsrPageToken = Nothing
, _scsrVariantSetIds = Nothing
, _scsrPageSize = Nothing
}
-- | Only return call sets for which a substring of the name matches this
-- string.
scsrName :: Lens' SearchCallSetsRequest (Maybe Text)
scsrName = lens _scsrName (\ s a -> s{_scsrName = a})
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
scsrPageToken :: Lens' SearchCallSetsRequest (Maybe Text)
scsrPageToken
= lens _scsrPageToken
(\ s a -> s{_scsrPageToken = a})
-- | Restrict the query to call sets within the given variant sets. At least
-- one ID must be provided.
scsrVariantSetIds :: Lens' SearchCallSetsRequest [Text]
scsrVariantSetIds
= lens _scsrVariantSetIds
(\ s a -> s{_scsrVariantSetIds = a})
. _Default
. _Coerce
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 1024.
scsrPageSize :: Lens' SearchCallSetsRequest (Maybe Int32)
scsrPageSize
= lens _scsrPageSize (\ s a -> s{_scsrPageSize = a})
. mapping _Coerce
instance FromJSON SearchCallSetsRequest where
parseJSON
= withObject "SearchCallSetsRequest"
(\ o ->
SearchCallSetsRequest' <$>
(o .:? "name") <*> (o .:? "pageToken") <*>
(o .:? "variantSetIds" .!= mempty)
<*> (o .:? "pageSize"))
instance ToJSON SearchCallSetsRequest where
toJSON SearchCallSetsRequest'{..}
= object
(catMaybes
[("name" .=) <$> _scsrName,
("pageToken" .=) <$> _scsrPageToken,
("variantSetIds" .=) <$> _scsrVariantSetIds,
("pageSize" .=) <$> _scsrPageSize])
--
-- /See:/ 'entry' smart constructor.
data Entry = Entry'
{ _eStatus :: !(Maybe Status)
, _eAnnotation :: !(Maybe Annotation)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Entry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eStatus'
--
-- * 'eAnnotation'
entry
:: Entry
entry =
Entry'
{ _eStatus = Nothing
, _eAnnotation = Nothing
}
-- | The creation status.
eStatus :: Lens' Entry (Maybe Status)
eStatus = lens _eStatus (\ s a -> s{_eStatus = a})
-- | The created annotation, if creation was successful.
eAnnotation :: Lens' Entry (Maybe Annotation)
eAnnotation
= lens _eAnnotation (\ s a -> s{_eAnnotation = a})
instance FromJSON Entry where
parseJSON
= withObject "Entry"
(\ o ->
Entry' <$> (o .:? "status") <*> (o .:? "annotation"))
instance ToJSON Entry where
toJSON Entry'{..}
= object
(catMaybes
[("status" .=) <$> _eStatus,
("annotation" .=) <$> _eAnnotation])
-- | The read search response.
--
-- /See:/ 'searchReadsResponse' smart constructor.
data SearchReadsResponse = SearchReadsResponse'
{ _sNextPageToken :: !(Maybe Text)
, _sAlignments :: !(Maybe [Read'])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReadsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sNextPageToken'
--
-- * 'sAlignments'
searchReadsResponse
:: SearchReadsResponse
searchReadsResponse =
SearchReadsResponse'
{ _sNextPageToken = Nothing
, _sAlignments = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
sNextPageToken :: Lens' SearchReadsResponse (Maybe Text)
sNextPageToken
= lens _sNextPageToken
(\ s a -> s{_sNextPageToken = a})
-- | The list of matching alignments sorted by mapped genomic coordinate, if
-- any, ascending in position within the same reference. Unmapped reads,
-- which have no position, are returned contiguously and are sorted in
-- ascending lexicographic order by fragment name.
sAlignments :: Lens' SearchReadsResponse [Read']
sAlignments
= lens _sAlignments (\ s a -> s{_sAlignments = a}) .
_Default
. _Coerce
instance FromJSON SearchReadsResponse where
parseJSON
= withObject "SearchReadsResponse"
(\ o ->
SearchReadsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "alignments" .!= mempty))
instance ToJSON SearchReadsResponse where
toJSON SearchReadsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _sNextPageToken,
("alignments" .=) <$> _sAlignments])
--
-- /See:/ 'program' smart constructor.
data Program = Program'
{ _pPrevProgramId :: !(Maybe Text)
, _pName :: !(Maybe Text)
, _pVersion :: !(Maybe Text)
, _pId :: !(Maybe Text)
, _pCommandLine :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Program' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pPrevProgramId'
--
-- * 'pName'
--
-- * 'pVersion'
--
-- * 'pId'
--
-- * 'pCommandLine'
program
:: Program
program =
Program'
{ _pPrevProgramId = Nothing
, _pName = Nothing
, _pVersion = Nothing
, _pId = Nothing
, _pCommandLine = Nothing
}
-- | The ID of the program run before this one.
pPrevProgramId :: Lens' Program (Maybe Text)
pPrevProgramId
= lens _pPrevProgramId
(\ s a -> s{_pPrevProgramId = a})
-- | The display name of the program. This is typically the colloquial name
-- of the tool used, for example \'bwa\' or \'picard\'.
pName :: Lens' Program (Maybe Text)
pName = lens _pName (\ s a -> s{_pName = a})
-- | The version of the program run.
pVersion :: Lens' Program (Maybe Text)
pVersion = lens _pVersion (\ s a -> s{_pVersion = a})
-- | The user specified locally unique ID of the program. Used along with
-- \`prevProgramId\` to define an ordering between programs.
pId :: Lens' Program (Maybe Text)
pId = lens _pId (\ s a -> s{_pId = a})
-- | The command line used to run this program.
pCommandLine :: Lens' Program (Maybe Text)
pCommandLine
= lens _pCommandLine (\ s a -> s{_pCommandLine = a})
instance FromJSON Program where
parseJSON
= withObject "Program"
(\ o ->
Program' <$>
(o .:? "prevProgramId") <*> (o .:? "name") <*>
(o .:? "version")
<*> (o .:? "id")
<*> (o .:? "commandLine"))
instance ToJSON Program where
toJSON Program'{..}
= object
(catMaybes
[("prevProgramId" .=) <$> _pPrevProgramId,
("name" .=) <$> _pName, ("version" .=) <$> _pVersion,
("id" .=) <$> _pId,
("commandLine" .=) <$> _pCommandLine])
--
-- /See:/ 'searchReferencesRequest' smart constructor.
data SearchReferencesRequest = SearchReferencesRequest'
{ _sReferenceSetId :: !(Maybe Text)
, _sMD5checksums :: !(Maybe [Text])
, _sAccessions :: !(Maybe [Text])
, _sPageToken :: !(Maybe Text)
, _sPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReferencesRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sReferenceSetId'
--
-- * 'sMD5checksums'
--
-- * 'sAccessions'
--
-- * 'sPageToken'
--
-- * 'sPageSize'
searchReferencesRequest
:: SearchReferencesRequest
searchReferencesRequest =
SearchReferencesRequest'
{ _sReferenceSetId = Nothing
, _sMD5checksums = Nothing
, _sAccessions = Nothing
, _sPageToken = Nothing
, _sPageSize = Nothing
}
-- | If present, return only references which belong to this reference set.
sReferenceSetId :: Lens' SearchReferencesRequest (Maybe Text)
sReferenceSetId
= lens _sReferenceSetId
(\ s a -> s{_sReferenceSetId = a})
-- | If present, return references for which the md5checksum matches exactly.
sMD5checksums :: Lens' SearchReferencesRequest [Text]
sMD5checksums
= lens _sMD5checksums
(\ s a -> s{_sMD5checksums = a})
. _Default
. _Coerce
-- | If present, return references for which a prefix of any of
-- sourceAccessions match any of these strings. Accession numbers typically
-- have a main number and a version, for example \`GCF_000001405.26\`.
sAccessions :: Lens' SearchReferencesRequest [Text]
sAccessions
= lens _sAccessions (\ s a -> s{_sAccessions = a}) .
_Default
. _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
sPageToken :: Lens' SearchReferencesRequest (Maybe Text)
sPageToken
= lens _sPageToken (\ s a -> s{_sPageToken = a})
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 1024. The maximum value is 4096.
sPageSize :: Lens' SearchReferencesRequest (Maybe Int32)
sPageSize
= lens _sPageSize (\ s a -> s{_sPageSize = a}) .
mapping _Coerce
instance FromJSON SearchReferencesRequest where
parseJSON
= withObject "SearchReferencesRequest"
(\ o ->
SearchReferencesRequest' <$>
(o .:? "referenceSetId") <*>
(o .:? "md5checksums" .!= mempty)
<*> (o .:? "accessions" .!= mempty)
<*> (o .:? "pageToken")
<*> (o .:? "pageSize"))
instance ToJSON SearchReferencesRequest where
toJSON SearchReferencesRequest'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _sReferenceSetId,
("md5checksums" .=) <$> _sMD5checksums,
("accessions" .=) <$> _sAccessions,
("pageToken" .=) <$> _sPageToken,
("pageSize" .=) <$> _sPageSize])
--
-- /See:/ 'batchCreateAnnotationsResponse' smart constructor.
newtype BatchCreateAnnotationsResponse = BatchCreateAnnotationsResponse'
{ _bcarEntries :: Maybe [Entry]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'BatchCreateAnnotationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bcarEntries'
batchCreateAnnotationsResponse
:: BatchCreateAnnotationsResponse
batchCreateAnnotationsResponse =
BatchCreateAnnotationsResponse'
{ _bcarEntries = Nothing
}
-- | The resulting per-annotation entries, ordered consistently with the
-- original request.
bcarEntries :: Lens' BatchCreateAnnotationsResponse [Entry]
bcarEntries
= lens _bcarEntries (\ s a -> s{_bcarEntries = a}) .
_Default
. _Coerce
instance FromJSON BatchCreateAnnotationsResponse
where
parseJSON
= withObject "BatchCreateAnnotationsResponse"
(\ o ->
BatchCreateAnnotationsResponse' <$>
(o .:? "entries" .!= mempty))
instance ToJSON BatchCreateAnnotationsResponse where
toJSON BatchCreateAnnotationsResponse'{..}
= object
(catMaybes [("entries" .=) <$> _bcarEntries])
--
-- /See:/ 'codingSequence' smart constructor.
data CodingSequence = CodingSequence'
{ _csStart :: !(Maybe (Textual Int64))
, _csEnd :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CodingSequence' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csStart'
--
-- * 'csEnd'
codingSequence
:: CodingSequence
codingSequence =
CodingSequence'
{ _csStart = Nothing
, _csEnd = Nothing
}
-- | The start of the coding sequence on this annotation\'s reference
-- sequence, 0-based inclusive. Note that this position is relative to the
-- reference start, and *not* the containing annotation start.
csStart :: Lens' CodingSequence (Maybe Int64)
csStart
= lens _csStart (\ s a -> s{_csStart = a}) .
mapping _Coerce
-- | The end of the coding sequence on this annotation\'s reference sequence,
-- 0-based exclusive. Note that this position is relative to the reference
-- start, and *not* the containing annotation start.
csEnd :: Lens' CodingSequence (Maybe Int64)
csEnd
= lens _csEnd (\ s a -> s{_csEnd = a}) .
mapping _Coerce
instance FromJSON CodingSequence where
parseJSON
= withObject "CodingSequence"
(\ o ->
CodingSequence' <$>
(o .:? "start") <*> (o .:? "end"))
instance ToJSON CodingSequence where
toJSON CodingSequence'{..}
= object
(catMaybes
[("start" .=) <$> _csStart, ("end" .=) <$> _csEnd])
--
-- /See:/ 'searchReferenceSetsResponse' smart constructor.
data SearchReferenceSetsResponse = SearchReferenceSetsResponse'
{ _srsrNextPageToken :: !(Maybe Text)
, _srsrReferenceSets :: !(Maybe [ReferenceSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReferenceSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srsrNextPageToken'
--
-- * 'srsrReferenceSets'
searchReferenceSetsResponse
:: SearchReferenceSetsResponse
searchReferenceSetsResponse =
SearchReferenceSetsResponse'
{ _srsrNextPageToken = Nothing
, _srsrReferenceSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
srsrNextPageToken :: Lens' SearchReferenceSetsResponse (Maybe Text)
srsrNextPageToken
= lens _srsrNextPageToken
(\ s a -> s{_srsrNextPageToken = a})
-- | The matching references sets.
srsrReferenceSets :: Lens' SearchReferenceSetsResponse [ReferenceSet]
srsrReferenceSets
= lens _srsrReferenceSets
(\ s a -> s{_srsrReferenceSets = a})
. _Default
. _Coerce
instance FromJSON SearchReferenceSetsResponse where
parseJSON
= withObject "SearchReferenceSetsResponse"
(\ o ->
SearchReferenceSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "referenceSets" .!= mempty))
instance ToJSON SearchReferenceSetsResponse where
toJSON SearchReferenceSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _srsrNextPageToken,
("referenceSets" .=) <$> _srsrReferenceSets])
-- | A 0-based half-open genomic coordinate range for search requests.
--
-- /See:/ 'range' smart constructor.
data Range = Range'
{ _rStart :: !(Maybe (Textual Int64))
, _rReferenceName :: !(Maybe Text)
, _rEnd :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Range' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rStart'
--
-- * 'rReferenceName'
--
-- * 'rEnd'
range
:: Range
range =
Range'
{ _rStart = Nothing
, _rReferenceName = Nothing
, _rEnd = Nothing
}
-- | The start position of the range on the reference, 0-based inclusive.
rStart :: Lens' Range (Maybe Int64)
rStart
= lens _rStart (\ s a -> s{_rStart = a}) .
mapping _Coerce
-- | The reference sequence name, for example \`chr1\`, \`1\`, or \`chrX\`.
rReferenceName :: Lens' Range (Maybe Text)
rReferenceName
= lens _rReferenceName
(\ s a -> s{_rReferenceName = a})
-- | The end position of the range on the reference, 0-based exclusive.
rEnd :: Lens' Range (Maybe Int64)
rEnd
= lens _rEnd (\ s a -> s{_rEnd = a}) .
mapping _Coerce
instance FromJSON Range where
parseJSON
= withObject "Range"
(\ o ->
Range' <$>
(o .:? "start") <*> (o .:? "referenceName") <*>
(o .:? "end"))
instance ToJSON Range where
toJSON Range'{..}
= object
(catMaybes
[("start" .=) <$> _rStart,
("referenceName" .=) <$> _rReferenceName,
("end" .=) <$> _rEnd])
-- | A read group set is a logical collection of read groups, which are
-- collections of reads produced by a sequencer. A read group set typically
-- models reads corresponding to one sample, sequenced one way, and aligned
-- one way. * A read group set belongs to one dataset. * A read group
-- belongs to one read group set. * A read belongs to one read group. For
-- more genomics resource definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'readGroupSet' smart constructor.
data ReadGroupSet = ReadGroupSet'
{ _rgsReferenceSetId :: !(Maybe Text)
, _rgsName :: !(Maybe Text)
, _rgsDataSetId :: !(Maybe Text)
, _rgsId :: !(Maybe Text)
, _rgsInfo :: !(Maybe ReadGroupSetInfo)
, _rgsReadGroups :: !(Maybe [ReadGroup])
, _rgsFilename :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReadGroupSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgsReferenceSetId'
--
-- * 'rgsName'
--
-- * 'rgsDataSetId'
--
-- * 'rgsId'
--
-- * 'rgsInfo'
--
-- * 'rgsReadGroups'
--
-- * 'rgsFilename'
readGroupSet
:: ReadGroupSet
readGroupSet =
ReadGroupSet'
{ _rgsReferenceSetId = Nothing
, _rgsName = Nothing
, _rgsDataSetId = Nothing
, _rgsId = Nothing
, _rgsInfo = Nothing
, _rgsReadGroups = Nothing
, _rgsFilename = Nothing
}
-- | The reference set to which the reads in this read group set are aligned.
rgsReferenceSetId :: Lens' ReadGroupSet (Maybe Text)
rgsReferenceSetId
= lens _rgsReferenceSetId
(\ s a -> s{_rgsReferenceSetId = a})
-- | The read group set name. By default this will be initialized to the
-- sample name of the sequenced data contained in this set.
rgsName :: Lens' ReadGroupSet (Maybe Text)
rgsName = lens _rgsName (\ s a -> s{_rgsName = a})
-- | The dataset to which this read group set belongs.
rgsDataSetId :: Lens' ReadGroupSet (Maybe Text)
rgsDataSetId
= lens _rgsDataSetId (\ s a -> s{_rgsDataSetId = a})
-- | The server-generated read group set ID, unique for all read group sets.
rgsId :: Lens' ReadGroupSet (Maybe Text)
rgsId = lens _rgsId (\ s a -> s{_rgsId = a})
-- | A map of additional read group set information.
rgsInfo :: Lens' ReadGroupSet (Maybe ReadGroupSetInfo)
rgsInfo = lens _rgsInfo (\ s a -> s{_rgsInfo = a})
-- | The read groups in this set. There are typically 1-10 read groups in a
-- read group set.
rgsReadGroups :: Lens' ReadGroupSet [ReadGroup]
rgsReadGroups
= lens _rgsReadGroups
(\ s a -> s{_rgsReadGroups = a})
. _Default
. _Coerce
-- | The filename of the original source file for this read group set, if
-- any.
rgsFilename :: Lens' ReadGroupSet (Maybe Text)
rgsFilename
= lens _rgsFilename (\ s a -> s{_rgsFilename = a})
instance FromJSON ReadGroupSet where
parseJSON
= withObject "ReadGroupSet"
(\ o ->
ReadGroupSet' <$>
(o .:? "referenceSetId") <*> (o .:? "name") <*>
(o .:? "datasetId")
<*> (o .:? "id")
<*> (o .:? "info")
<*> (o .:? "readGroups" .!= mempty)
<*> (o .:? "filename"))
instance ToJSON ReadGroupSet where
toJSON ReadGroupSet'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _rgsReferenceSetId,
("name" .=) <$> _rgsName,
("datasetId" .=) <$> _rgsDataSetId,
("id" .=) <$> _rgsId, ("info" .=) <$> _rgsInfo,
("readGroups" .=) <$> _rgsReadGroups,
("filename" .=) <$> _rgsFilename])
-- | The read group set export request.
--
-- /See:/ 'exportReadGroupSetRequest' smart constructor.
data ExportReadGroupSetRequest = ExportReadGroupSetRequest'
{ _ergsrReferenceNames :: !(Maybe [Text])
, _ergsrExportURI :: !(Maybe Text)
, _ergsrProjectId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExportReadGroupSetRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ergsrReferenceNames'
--
-- * 'ergsrExportURI'
--
-- * 'ergsrProjectId'
exportReadGroupSetRequest
:: ExportReadGroupSetRequest
exportReadGroupSetRequest =
ExportReadGroupSetRequest'
{ _ergsrReferenceNames = Nothing
, _ergsrExportURI = Nothing
, _ergsrProjectId = Nothing
}
-- | The reference names to export. If this is not specified, all reference
-- sequences, including unmapped reads, are exported. Use \`*\` to export
-- only unmapped reads.
ergsrReferenceNames :: Lens' ExportReadGroupSetRequest [Text]
ergsrReferenceNames
= lens _ergsrReferenceNames
(\ s a -> s{_ergsrReferenceNames = a})
. _Default
. _Coerce
-- | Required. A Google Cloud Storage URI for the exported BAM file. The
-- currently authenticated user must have write access to the new file. An
-- error will be returned if the URI already contains data.
ergsrExportURI :: Lens' ExportReadGroupSetRequest (Maybe Text)
ergsrExportURI
= lens _ergsrExportURI
(\ s a -> s{_ergsrExportURI = a})
-- | Required. The Google Cloud project ID that owns this export. The caller
-- must have WRITE access to this project.
ergsrProjectId :: Lens' ExportReadGroupSetRequest (Maybe Text)
ergsrProjectId
= lens _ergsrProjectId
(\ s a -> s{_ergsrProjectId = a})
instance FromJSON ExportReadGroupSetRequest where
parseJSON
= withObject "ExportReadGroupSetRequest"
(\ o ->
ExportReadGroupSetRequest' <$>
(o .:? "referenceNames" .!= mempty) <*>
(o .:? "exportUri")
<*> (o .:? "projectId"))
instance ToJSON ExportReadGroupSetRequest where
toJSON ExportReadGroupSetRequest'{..}
= object
(catMaybes
[("referenceNames" .=) <$> _ergsrReferenceNames,
("exportUri" .=) <$> _ergsrExportURI,
("projectId" .=) <$> _ergsrProjectId])
-- | The variant data import response.
--
-- /See:/ 'importVariantsResponse' smart constructor.
newtype ImportVariantsResponse = ImportVariantsResponse'
{ _ivrCallSetIds :: Maybe [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportVariantsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ivrCallSetIds'
importVariantsResponse
:: ImportVariantsResponse
importVariantsResponse =
ImportVariantsResponse'
{ _ivrCallSetIds = Nothing
}
-- | IDs of the call sets created during the import.
ivrCallSetIds :: Lens' ImportVariantsResponse [Text]
ivrCallSetIds
= lens _ivrCallSetIds
(\ s a -> s{_ivrCallSetIds = a})
. _Default
. _Coerce
instance FromJSON ImportVariantsResponse where
parseJSON
= withObject "ImportVariantsResponse"
(\ o ->
ImportVariantsResponse' <$>
(o .:? "callSetIds" .!= mempty))
instance ToJSON ImportVariantsResponse where
toJSON ImportVariantsResponse'{..}
= object
(catMaybes [("callSetIds" .=) <$> _ivrCallSetIds])
--
-- /See:/ 'listCoverageBucketsResponse' smart constructor.
data ListCoverageBucketsResponse = ListCoverageBucketsResponse'
{ _lcbrNextPageToken :: !(Maybe Text)
, _lcbrBucketWidth :: !(Maybe (Textual Int64))
, _lcbrCoverageBuckets :: !(Maybe [CoverageBucket])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListCoverageBucketsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lcbrNextPageToken'
--
-- * 'lcbrBucketWidth'
--
-- * 'lcbrCoverageBuckets'
listCoverageBucketsResponse
:: ListCoverageBucketsResponse
listCoverageBucketsResponse =
ListCoverageBucketsResponse'
{ _lcbrNextPageToken = Nothing
, _lcbrBucketWidth = Nothing
, _lcbrCoverageBuckets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
lcbrNextPageToken :: Lens' ListCoverageBucketsResponse (Maybe Text)
lcbrNextPageToken
= lens _lcbrNextPageToken
(\ s a -> s{_lcbrNextPageToken = a})
-- | The length of each coverage bucket in base pairs. Note that buckets at
-- the end of a reference sequence may be shorter. This value is omitted if
-- the bucket width is infinity (the default behaviour, with no range or
-- \`targetBucketWidth\`).
lcbrBucketWidth :: Lens' ListCoverageBucketsResponse (Maybe Int64)
lcbrBucketWidth
= lens _lcbrBucketWidth
(\ s a -> s{_lcbrBucketWidth = a})
. mapping _Coerce
-- | The coverage buckets. The list of buckets is sparse; a bucket with 0
-- overlapping reads is not returned. A bucket never crosses more than one
-- reference sequence. Each bucket has width \`bucketWidth\`, unless its
-- end is the end of the reference sequence.
lcbrCoverageBuckets :: Lens' ListCoverageBucketsResponse [CoverageBucket]
lcbrCoverageBuckets
= lens _lcbrCoverageBuckets
(\ s a -> s{_lcbrCoverageBuckets = a})
. _Default
. _Coerce
instance FromJSON ListCoverageBucketsResponse where
parseJSON
= withObject "ListCoverageBucketsResponse"
(\ o ->
ListCoverageBucketsResponse' <$>
(o .:? "nextPageToken") <*> (o .:? "bucketWidth") <*>
(o .:? "coverageBuckets" .!= mempty))
instance ToJSON ListCoverageBucketsResponse where
toJSON ListCoverageBucketsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _lcbrNextPageToken,
("bucketWidth" .=) <$> _lcbrBucketWidth,
("coverageBuckets" .=) <$> _lcbrCoverageBuckets])
-- | Request message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsRequest' smart constructor.
newtype TestIAMPermissionsRequest = TestIAMPermissionsRequest'
{ _tiprPermissions :: Maybe [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TestIAMPermissionsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiprPermissions'
testIAMPermissionsRequest
:: TestIAMPermissionsRequest
testIAMPermissionsRequest =
TestIAMPermissionsRequest'
{ _tiprPermissions = Nothing
}
-- | REQUIRED: The set of permissions to check for the \'resource\'.
-- Permissions with wildcards (such as \'*\' or \'storage.*\') are not
-- allowed. Allowed permissions are: * \`genomics.datasets.create\` *
-- \`genomics.datasets.delete\` * \`genomics.datasets.get\` *
-- \`genomics.datasets.list\` * \`genomics.datasets.update\` *
-- \`genomics.datasets.getIamPolicy\` * \`genomics.datasets.setIamPolicy\`
tiprPermissions :: Lens' TestIAMPermissionsRequest [Text]
tiprPermissions
= lens _tiprPermissions
(\ s a -> s{_tiprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsRequest where
parseJSON
= withObject "TestIAMPermissionsRequest"
(\ o ->
TestIAMPermissionsRequest' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsRequest where
toJSON TestIAMPermissionsRequest'{..}
= object
(catMaybes [("permissions" .=) <$> _tiprPermissions])
-- | The read group set import response.
--
-- /See:/ 'importReadGroupSetsResponse' smart constructor.
newtype ImportReadGroupSetsResponse = ImportReadGroupSetsResponse'
{ _irgsrReadGroupSetIds :: Maybe [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportReadGroupSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'irgsrReadGroupSetIds'
importReadGroupSetsResponse
:: ImportReadGroupSetsResponse
importReadGroupSetsResponse =
ImportReadGroupSetsResponse'
{ _irgsrReadGroupSetIds = Nothing
}
-- | IDs of the read group sets that were created.
irgsrReadGroupSetIds :: Lens' ImportReadGroupSetsResponse [Text]
irgsrReadGroupSetIds
= lens _irgsrReadGroupSetIds
(\ s a -> s{_irgsrReadGroupSetIds = a})
. _Default
. _Coerce
instance FromJSON ImportReadGroupSetsResponse where
parseJSON
= withObject "ImportReadGroupSetsResponse"
(\ o ->
ImportReadGroupSetsResponse' <$>
(o .:? "readGroupSetIds" .!= mempty))
instance ToJSON ImportReadGroupSetsResponse where
toJSON ImportReadGroupSetsResponse'{..}
= object
(catMaybes
[("readGroupSetIds" .=) <$> _irgsrReadGroupSetIds])
-- | A linear alignment can be represented by one CIGAR string. Describes the
-- mapped position and local alignment of the read to the reference.
--
-- /See:/ 'linearAlignment' smart constructor.
data LinearAlignment = LinearAlignment'
{ _laCigar :: !(Maybe [CigarUnit])
, _laMAppingQuality :: !(Maybe (Textual Int32))
, _laPosition :: !(Maybe Position)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LinearAlignment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'laCigar'
--
-- * 'laMAppingQuality'
--
-- * 'laPosition'
linearAlignment
:: LinearAlignment
linearAlignment =
LinearAlignment'
{ _laCigar = Nothing
, _laMAppingQuality = Nothing
, _laPosition = Nothing
}
-- | Represents the local alignment of this sequence (alignment matches,
-- indels, etc) against the reference.
laCigar :: Lens' LinearAlignment [CigarUnit]
laCigar
= lens _laCigar (\ s a -> s{_laCigar = a}) . _Default
. _Coerce
-- | The mapping quality of this alignment. Represents how likely the read
-- maps to this position as opposed to other locations. Specifically, this
-- is -10 log10 Pr(mapping position is wrong), rounded to the nearest
-- integer.
laMAppingQuality :: Lens' LinearAlignment (Maybe Int32)
laMAppingQuality
= lens _laMAppingQuality
(\ s a -> s{_laMAppingQuality = a})
. mapping _Coerce
-- | The position of this alignment.
laPosition :: Lens' LinearAlignment (Maybe Position)
laPosition
= lens _laPosition (\ s a -> s{_laPosition = a})
instance FromJSON LinearAlignment where
parseJSON
= withObject "LinearAlignment"
(\ o ->
LinearAlignment' <$>
(o .:? "cigar" .!= mempty) <*>
(o .:? "mappingQuality")
<*> (o .:? "position"))
instance ToJSON LinearAlignment where
toJSON LinearAlignment'{..}
= object
(catMaybes
[("cigar" .=) <$> _laCigar,
("mappingQuality" .=) <$> _laMAppingQuality,
("position" .=) <$> _laPosition])
-- | An annotation set is a logical grouping of annotations that share
-- consistent type information and provenance. Examples of annotation sets
-- include \'all genes from refseq\', and \'all variant annotations from
-- ClinVar\'.
--
-- /See:/ 'annotationSet' smart constructor.
data AnnotationSet = AnnotationSet'
{ _asReferenceSetId :: !(Maybe Text)
, _asName :: !(Maybe Text)
, _asDataSetId :: !(Maybe Text)
, _asId :: !(Maybe Text)
, _asType :: !(Maybe AnnotationSetType)
, _asSourceURI :: !(Maybe Text)
, _asInfo :: !(Maybe AnnotationSetInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotationSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asReferenceSetId'
--
-- * 'asName'
--
-- * 'asDataSetId'
--
-- * 'asId'
--
-- * 'asType'
--
-- * 'asSourceURI'
--
-- * 'asInfo'
annotationSet
:: AnnotationSet
annotationSet =
AnnotationSet'
{ _asReferenceSetId = Nothing
, _asName = Nothing
, _asDataSetId = Nothing
, _asId = Nothing
, _asType = Nothing
, _asSourceURI = Nothing
, _asInfo = Nothing
}
-- | The ID of the reference set that defines the coordinate space for this
-- set\'s annotations.
asReferenceSetId :: Lens' AnnotationSet (Maybe Text)
asReferenceSetId
= lens _asReferenceSetId
(\ s a -> s{_asReferenceSetId = a})
-- | The display name for this annotation set.
asName :: Lens' AnnotationSet (Maybe Text)
asName = lens _asName (\ s a -> s{_asName = a})
-- | The dataset to which this annotation set belongs.
asDataSetId :: Lens' AnnotationSet (Maybe Text)
asDataSetId
= lens _asDataSetId (\ s a -> s{_asDataSetId = a})
-- | The server-generated annotation set ID, unique across all annotation
-- sets.
asId :: Lens' AnnotationSet (Maybe Text)
asId = lens _asId (\ s a -> s{_asId = a})
-- | The type of annotations contained within this set.
asType :: Lens' AnnotationSet (Maybe AnnotationSetType)
asType = lens _asType (\ s a -> s{_asType = a})
-- | The source URI describing the file from which this annotation set was
-- generated, if any.
asSourceURI :: Lens' AnnotationSet (Maybe Text)
asSourceURI
= lens _asSourceURI (\ s a -> s{_asSourceURI = a})
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
asInfo :: Lens' AnnotationSet (Maybe AnnotationSetInfo)
asInfo = lens _asInfo (\ s a -> s{_asInfo = a})
instance FromJSON AnnotationSet where
parseJSON
= withObject "AnnotationSet"
(\ o ->
AnnotationSet' <$>
(o .:? "referenceSetId") <*> (o .:? "name") <*>
(o .:? "datasetId")
<*> (o .:? "id")
<*> (o .:? "type")
<*> (o .:? "sourceUri")
<*> (o .:? "info"))
instance ToJSON AnnotationSet where
toJSON AnnotationSet'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _asReferenceSetId,
("name" .=) <$> _asName,
("datasetId" .=) <$> _asDataSetId,
("id" .=) <$> _asId, ("type" .=) <$> _asType,
("sourceUri" .=) <$> _asSourceURI,
("info" .=) <$> _asInfo])
-- | A variant set is a collection of call sets and variants. It contains
-- summary statistics of those contents. A variant set belongs to a
-- dataset. For more genomics resource definitions, see [Fundamentals of
-- Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'variantSet' smart constructor.
data VariantSet = VariantSet'
{ _vsReferenceSetId :: !(Maybe Text)
, _vsName :: !(Maybe Text)
, _vsDataSetId :: !(Maybe Text)
, _vsReferenceBounds :: !(Maybe [ReferenceBound])
, _vsMetadata :: !(Maybe [VariantSetMetadata])
, _vsId :: !(Maybe Text)
, _vsDescription :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vsReferenceSetId'
--
-- * 'vsName'
--
-- * 'vsDataSetId'
--
-- * 'vsReferenceBounds'
--
-- * 'vsMetadata'
--
-- * 'vsId'
--
-- * 'vsDescription'
variantSet
:: VariantSet
variantSet =
VariantSet'
{ _vsReferenceSetId = Nothing
, _vsName = Nothing
, _vsDataSetId = Nothing
, _vsReferenceBounds = Nothing
, _vsMetadata = Nothing
, _vsId = Nothing
, _vsDescription = Nothing
}
-- | The reference set to which the variant set is mapped. The reference set
-- describes the alignment provenance of the variant set, while the
-- \`referenceBounds\` describe the shape of the actual variant data. The
-- reference set\'s reference names are a superset of those found in the
-- \`referenceBounds\`. For example, given a variant set that is mapped to
-- the GRCh38 reference set and contains a single variant on reference
-- \'X\', \`referenceBounds\` would contain only an entry for \'X\', while
-- the associated reference set enumerates all possible references: \'1\',
-- \'2\', \'X\', \'Y\', \'MT\', etc.
vsReferenceSetId :: Lens' VariantSet (Maybe Text)
vsReferenceSetId
= lens _vsReferenceSetId
(\ s a -> s{_vsReferenceSetId = a})
-- | User-specified, mutable name.
vsName :: Lens' VariantSet (Maybe Text)
vsName = lens _vsName (\ s a -> s{_vsName = a})
-- | The dataset to which this variant set belongs.
vsDataSetId :: Lens' VariantSet (Maybe Text)
vsDataSetId
= lens _vsDataSetId (\ s a -> s{_vsDataSetId = a})
-- | A list of all references used by the variants in a variant set with
-- associated coordinate upper bounds for each one.
vsReferenceBounds :: Lens' VariantSet [ReferenceBound]
vsReferenceBounds
= lens _vsReferenceBounds
(\ s a -> s{_vsReferenceBounds = a})
. _Default
. _Coerce
-- | The metadata associated with this variant set.
vsMetadata :: Lens' VariantSet [VariantSetMetadata]
vsMetadata
= lens _vsMetadata (\ s a -> s{_vsMetadata = a}) .
_Default
. _Coerce
-- | The server-generated variant set ID, unique across all variant sets.
vsId :: Lens' VariantSet (Maybe Text)
vsId = lens _vsId (\ s a -> s{_vsId = a})
-- | A textual description of this variant set.
vsDescription :: Lens' VariantSet (Maybe Text)
vsDescription
= lens _vsDescription
(\ s a -> s{_vsDescription = a})
instance FromJSON VariantSet where
parseJSON
= withObject "VariantSet"
(\ o ->
VariantSet' <$>
(o .:? "referenceSetId") <*> (o .:? "name") <*>
(o .:? "datasetId")
<*> (o .:? "referenceBounds" .!= mempty)
<*> (o .:? "metadata" .!= mempty)
<*> (o .:? "id")
<*> (o .:? "description"))
instance ToJSON VariantSet where
toJSON VariantSet'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _vsReferenceSetId,
("name" .=) <$> _vsName,
("datasetId" .=) <$> _vsDataSetId,
("referenceBounds" .=) <$> _vsReferenceBounds,
("metadata" .=) <$> _vsMetadata, ("id" .=) <$> _vsId,
("description" .=) <$> _vsDescription])
-- | Response message for \`TestIamPermissions\` method.
--
-- /See:/ 'testIAMPermissionsResponse' smart constructor.
newtype TestIAMPermissionsResponse = TestIAMPermissionsResponse'
{ _tiamprPermissions :: Maybe [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TestIAMPermissionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiamprPermissions'
testIAMPermissionsResponse
:: TestIAMPermissionsResponse
testIAMPermissionsResponse =
TestIAMPermissionsResponse'
{ _tiamprPermissions = Nothing
}
-- | A subset of \`TestPermissionsRequest.permissions\` that the caller is
-- allowed.
tiamprPermissions :: Lens' TestIAMPermissionsResponse [Text]
tiamprPermissions
= lens _tiamprPermissions
(\ s a -> s{_tiamprPermissions = a})
. _Default
. _Coerce
instance FromJSON TestIAMPermissionsResponse where
parseJSON
= withObject "TestIAMPermissionsResponse"
(\ o ->
TestIAMPermissionsResponse' <$>
(o .:? "permissions" .!= mempty))
instance ToJSON TestIAMPermissionsResponse where
toJSON TestIAMPermissionsResponse'{..}
= object
(catMaybes
[("permissions" .=) <$> _tiamprPermissions])
-- | The dataset list response.
--
-- /See:/ 'listDataSetsResponse' smart constructor.
data ListDataSetsResponse = ListDataSetsResponse'
{ _ldsrNextPageToken :: !(Maybe Text)
, _ldsrDataSets :: !(Maybe [DataSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListDataSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ldsrNextPageToken'
--
-- * 'ldsrDataSets'
listDataSetsResponse
:: ListDataSetsResponse
listDataSetsResponse =
ListDataSetsResponse'
{ _ldsrNextPageToken = Nothing
, _ldsrDataSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
ldsrNextPageToken :: Lens' ListDataSetsResponse (Maybe Text)
ldsrNextPageToken
= lens _ldsrNextPageToken
(\ s a -> s{_ldsrNextPageToken = a})
-- | The list of matching Datasets.
ldsrDataSets :: Lens' ListDataSetsResponse [DataSet]
ldsrDataSets
= lens _ldsrDataSets (\ s a -> s{_ldsrDataSets = a})
. _Default
. _Coerce
instance FromJSON ListDataSetsResponse where
parseJSON
= withObject "ListDataSetsResponse"
(\ o ->
ListDataSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "datasets" .!= mempty))
instance ToJSON ListDataSetsResponse where
toJSON ListDataSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _ldsrNextPageToken,
("datasets" .=) <$> _ldsrDataSets])
-- | The read group set import request.
--
-- /See:/ 'importReadGroupSetsRequest' smart constructor.
data ImportReadGroupSetsRequest = ImportReadGroupSetsRequest'
{ _irgsrReferenceSetId :: !(Maybe Text)
, _irgsrDataSetId :: !(Maybe Text)
, _irgsrSourceURIs :: !(Maybe [Text])
, _irgsrPartitionStrategy :: !(Maybe ImportReadGroupSetsRequestPartitionStrategy)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportReadGroupSetsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'irgsrReferenceSetId'
--
-- * 'irgsrDataSetId'
--
-- * 'irgsrSourceURIs'
--
-- * 'irgsrPartitionStrategy'
importReadGroupSetsRequest
:: ImportReadGroupSetsRequest
importReadGroupSetsRequest =
ImportReadGroupSetsRequest'
{ _irgsrReferenceSetId = Nothing
, _irgsrDataSetId = Nothing
, _irgsrSourceURIs = Nothing
, _irgsrPartitionStrategy = Nothing
}
-- | The reference set to which the imported read group sets are aligned to,
-- if any. The reference names of this reference set must be a superset of
-- those found in the imported file headers. If no reference set id is
-- provided, a best effort is made to associate with a matching reference
-- set.
irgsrReferenceSetId :: Lens' ImportReadGroupSetsRequest (Maybe Text)
irgsrReferenceSetId
= lens _irgsrReferenceSetId
(\ s a -> s{_irgsrReferenceSetId = a})
-- | Required. The ID of the dataset these read group sets will belong to.
-- The caller must have WRITE permissions to this dataset.
irgsrDataSetId :: Lens' ImportReadGroupSetsRequest (Maybe Text)
irgsrDataSetId
= lens _irgsrDataSetId
(\ s a -> s{_irgsrDataSetId = a})
-- | A list of URIs pointing at [BAM
-- files](https:\/\/samtools.github.io\/hts-specs\/SAMv1.pdf) in Google
-- Cloud Storage. Those URIs can include wildcards (*), but do not add or
-- remove matching files before import has completed. Note that Google
-- Cloud Storage object listing is only eventually consistent: files added
-- may be not be immediately visible to everyone. Thus, if using a wildcard
-- it is preferable not to start the import immediately after the files are
-- created.
irgsrSourceURIs :: Lens' ImportReadGroupSetsRequest [Text]
irgsrSourceURIs
= lens _irgsrSourceURIs
(\ s a -> s{_irgsrSourceURIs = a})
. _Default
. _Coerce
-- | The partition strategy describes how read groups are partitioned into
-- read group sets.
irgsrPartitionStrategy :: Lens' ImportReadGroupSetsRequest (Maybe ImportReadGroupSetsRequestPartitionStrategy)
irgsrPartitionStrategy
= lens _irgsrPartitionStrategy
(\ s a -> s{_irgsrPartitionStrategy = a})
instance FromJSON ImportReadGroupSetsRequest where
parseJSON
= withObject "ImportReadGroupSetsRequest"
(\ o ->
ImportReadGroupSetsRequest' <$>
(o .:? "referenceSetId") <*> (o .:? "datasetId") <*>
(o .:? "sourceUris" .!= mempty)
<*> (o .:? "partitionStrategy"))
instance ToJSON ImportReadGroupSetsRequest where
toJSON ImportReadGroupSetsRequest'{..}
= object
(catMaybes
[("referenceSetId" .=) <$> _irgsrReferenceSetId,
("datasetId" .=) <$> _irgsrDataSetId,
("sourceUris" .=) <$> _irgsrSourceURIs,
("partitionStrategy" .=) <$>
_irgsrPartitionStrategy])
-- | The variant data import request.
--
-- /See:/ 'importVariantsRequest' smart constructor.
data ImportVariantsRequest = ImportVariantsRequest'
{ _ivrVariantSetId :: !(Maybe Text)
, _ivrFormat :: !(Maybe ImportVariantsRequestFormat)
, _ivrInfoMergeConfig :: !(Maybe ImportVariantsRequestInfoMergeConfig)
, _ivrNormalizeReferenceNames :: !(Maybe Bool)
, _ivrSourceURIs :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ImportVariantsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ivrVariantSetId'
--
-- * 'ivrFormat'
--
-- * 'ivrInfoMergeConfig'
--
-- * 'ivrNormalizeReferenceNames'
--
-- * 'ivrSourceURIs'
importVariantsRequest
:: ImportVariantsRequest
importVariantsRequest =
ImportVariantsRequest'
{ _ivrVariantSetId = Nothing
, _ivrFormat = Nothing
, _ivrInfoMergeConfig = Nothing
, _ivrNormalizeReferenceNames = Nothing
, _ivrSourceURIs = Nothing
}
-- | Required. The variant set to which variant data should be imported.
ivrVariantSetId :: Lens' ImportVariantsRequest (Maybe Text)
ivrVariantSetId
= lens _ivrVariantSetId
(\ s a -> s{_ivrVariantSetId = a})
-- | The format of the variant data being imported. If unspecified, defaults
-- to to \`VCF\`.
ivrFormat :: Lens' ImportVariantsRequest (Maybe ImportVariantsRequestFormat)
ivrFormat
= lens _ivrFormat (\ s a -> s{_ivrFormat = a})
-- | A mapping between info field keys and the InfoMergeOperations to be
-- performed on them. This is plumbed down to the MergeVariantRequests
-- generated by the resulting import job.
ivrInfoMergeConfig :: Lens' ImportVariantsRequest (Maybe ImportVariantsRequestInfoMergeConfig)
ivrInfoMergeConfig
= lens _ivrInfoMergeConfig
(\ s a -> s{_ivrInfoMergeConfig = a})
-- | Convert reference names to the canonical representation. hg19
-- haploytypes (those reference names containing \"_hap\") are not modified
-- in any way. All other reference names are modified according to the
-- following rules: The reference name is capitalized. The \"chr\" prefix
-- is dropped for all autosomes and sex chromsomes. For example \"chr17\"
-- becomes \"17\" and \"chrX\" becomes \"X\". All mitochondrial chromosomes
-- (\"chrM\", \"chrMT\", etc) become \"MT\".
ivrNormalizeReferenceNames :: Lens' ImportVariantsRequest (Maybe Bool)
ivrNormalizeReferenceNames
= lens _ivrNormalizeReferenceNames
(\ s a -> s{_ivrNormalizeReferenceNames = a})
-- | A list of URIs referencing variant files in Google Cloud Storage. URIs
-- can include wildcards [as described
-- here](https:\/\/cloud.google.com\/storage\/docs\/gsutil\/addlhelp\/WildcardNames).
-- Note that recursive wildcards (\'**\') are not supported.
ivrSourceURIs :: Lens' ImportVariantsRequest [Text]
ivrSourceURIs
= lens _ivrSourceURIs
(\ s a -> s{_ivrSourceURIs = a})
. _Default
. _Coerce
instance FromJSON ImportVariantsRequest where
parseJSON
= withObject "ImportVariantsRequest"
(\ o ->
ImportVariantsRequest' <$>
(o .:? "variantSetId") <*> (o .:? "format") <*>
(o .:? "infoMergeConfig")
<*> (o .:? "normalizeReferenceNames")
<*> (o .:? "sourceUris" .!= mempty))
instance ToJSON ImportVariantsRequest where
toJSON ImportVariantsRequest'{..}
= object
(catMaybes
[("variantSetId" .=) <$> _ivrVariantSetId,
("format" .=) <$> _ivrFormat,
("infoMergeConfig" .=) <$> _ivrInfoMergeConfig,
("normalizeReferenceNames" .=) <$>
_ivrNormalizeReferenceNames,
("sourceUris" .=) <$> _ivrSourceURIs])
--
-- /See:/ 'externalId' smart constructor.
data ExternalId = ExternalId'
{ _eiSourceName :: !(Maybe Text)
, _eiId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExternalId' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eiSourceName'
--
-- * 'eiId'
externalId
:: ExternalId
externalId =
ExternalId'
{ _eiSourceName = Nothing
, _eiId = Nothing
}
-- | The name of the source of this data.
eiSourceName :: Lens' ExternalId (Maybe Text)
eiSourceName
= lens _eiSourceName (\ s a -> s{_eiSourceName = a})
-- | The id used by the source of this data.
eiId :: Lens' ExternalId (Maybe Text)
eiId = lens _eiId (\ s a -> s{_eiId = a})
instance FromJSON ExternalId where
parseJSON
= withObject "ExternalId"
(\ o ->
ExternalId' <$>
(o .:? "sourceName") <*> (o .:? "id"))
instance ToJSON ExternalId where
toJSON ExternalId'{..}
= object
(catMaybes
[("sourceName" .=) <$> _eiSourceName,
("id" .=) <$> _eiId])
-- | A single CIGAR operation.
--
-- /See:/ 'cigarUnit' smart constructor.
data CigarUnit = CigarUnit'
{ _cuOperation :: !(Maybe CigarUnitOperation)
, _cuOperationLength :: !(Maybe (Textual Int64))
, _cuReferenceSequence :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CigarUnit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cuOperation'
--
-- * 'cuOperationLength'
--
-- * 'cuReferenceSequence'
cigarUnit
:: CigarUnit
cigarUnit =
CigarUnit'
{ _cuOperation = Nothing
, _cuOperationLength = Nothing
, _cuReferenceSequence = Nothing
}
cuOperation :: Lens' CigarUnit (Maybe CigarUnitOperation)
cuOperation
= lens _cuOperation (\ s a -> s{_cuOperation = a})
-- | The number of genomic bases that the operation runs for. Required.
cuOperationLength :: Lens' CigarUnit (Maybe Int64)
cuOperationLength
= lens _cuOperationLength
(\ s a -> s{_cuOperationLength = a})
. mapping _Coerce
-- | \`referenceSequence\` is only used at mismatches (\`SEQUENCE_MISMATCH\`)
-- and deletions (\`DELETE\`). Filling this field replaces SAM\'s MD tag.
-- If the relevant information is not available, this field is unset.
cuReferenceSequence :: Lens' CigarUnit (Maybe Text)
cuReferenceSequence
= lens _cuReferenceSequence
(\ s a -> s{_cuReferenceSequence = a})
instance FromJSON CigarUnit where
parseJSON
= withObject "CigarUnit"
(\ o ->
CigarUnit' <$>
(o .:? "operation") <*> (o .:? "operationLength") <*>
(o .:? "referenceSequence"))
instance ToJSON CigarUnit where
toJSON CigarUnit'{..}
= object
(catMaybes
[("operation" .=) <$> _cuOperation,
("operationLength" .=) <$> _cuOperationLength,
("referenceSequence" .=) <$> _cuReferenceSequence])
-- | Defines an Identity and Access Management (IAM) policy. It is used to
-- specify access control policies for Cloud Platform resources. A
-- \`Policy\` consists of a list of \`bindings\`. A \`Binding\` binds a
-- list of \`members\` to a \`role\`, where the members can be user
-- accounts, Google groups, Google domains, and service accounts. A
-- \`role\` is a named list of permissions defined by IAM. **Example** {
-- \"bindings\": [ { \"role\": \"roles\/owner\", \"members\": [
-- \"user:mike\'example.com\", \"group:admins\'example.com\",
-- \"domain:google.com\",
-- \"serviceAccount:my-other-app\'appspot.gserviceaccount.com\", ] }, {
-- \"role\": \"roles\/viewer\", \"members\": [\"user:sean\'example.com\"] }
-- ] } For a description of IAM and its features, see the [IAM developer\'s
-- guide](https:\/\/cloud.google.com\/iam).
--
-- /See:/ 'policy' smart constructor.
data Policy = Policy'
{ _polEtag :: !(Maybe Bytes)
, _polVersion :: !(Maybe (Textual Int32))
, _polBindings :: !(Maybe [Binding])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Policy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'polEtag'
--
-- * 'polVersion'
--
-- * 'polBindings'
policy
:: Policy
policy =
Policy'
{ _polEtag = Nothing
, _polVersion = Nothing
, _polBindings = Nothing
}
-- | \`etag\` is used for optimistic concurrency control as a way to help
-- prevent simultaneous updates of a policy from overwriting each other. It
-- is strongly suggested that systems make use of the \`etag\` in the
-- read-modify-write cycle to perform policy updates in order to avoid race
-- conditions: An \`etag\` is returned in the response to \`getIamPolicy\`,
-- and systems are expected to put that etag in the request to
-- \`setIamPolicy\` to ensure that their change will be applied to the same
-- version of the policy. If no \`etag\` is provided in the call to
-- \`setIamPolicy\`, then the existing policy is overwritten blindly.
polEtag :: Lens' Policy (Maybe ByteString)
polEtag
= lens _polEtag (\ s a -> s{_polEtag = a}) .
mapping _Bytes
-- | Version of the \`Policy\`. The default version is 0.
polVersion :: Lens' Policy (Maybe Int32)
polVersion
= lens _polVersion (\ s a -> s{_polVersion = a}) .
mapping _Coerce
-- | Associates a list of \`members\` to a \`role\`. Multiple \`bindings\`
-- must not be specified for the same \`role\`. \`bindings\` with no
-- members will result in an error.
polBindings :: Lens' Policy [Binding]
polBindings
= lens _polBindings (\ s a -> s{_polBindings = a}) .
_Default
. _Coerce
instance FromJSON Policy where
parseJSON
= withObject "Policy"
(\ o ->
Policy' <$>
(o .:? "etag") <*> (o .:? "version") <*>
(o .:? "bindings" .!= mempty))
instance ToJSON Policy where
toJSON Policy'{..}
= object
(catMaybes
[("etag" .=) <$> _polEtag,
("version" .=) <$> _polVersion,
("bindings" .=) <$> _polBindings])
-- | The variant data export request.
--
-- /See:/ 'exportVariantSetRequest' smart constructor.
data ExportVariantSetRequest = ExportVariantSetRequest'
{ _evsrBigQueryDataSet :: !(Maybe Text)
, _evsrBigQueryTable :: !(Maybe Text)
, _evsrFormat :: !(Maybe ExportVariantSetRequestFormat)
, _evsrCallSetIds :: !(Maybe [Text])
, _evsrProjectId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExportVariantSetRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'evsrBigQueryDataSet'
--
-- * 'evsrBigQueryTable'
--
-- * 'evsrFormat'
--
-- * 'evsrCallSetIds'
--
-- * 'evsrProjectId'
exportVariantSetRequest
:: ExportVariantSetRequest
exportVariantSetRequest =
ExportVariantSetRequest'
{ _evsrBigQueryDataSet = Nothing
, _evsrBigQueryTable = Nothing
, _evsrFormat = Nothing
, _evsrCallSetIds = Nothing
, _evsrProjectId = Nothing
}
-- | Required. The BigQuery dataset to export data to. This dataset must
-- already exist. Note that this is distinct from the Genomics concept of
-- \"dataset\".
evsrBigQueryDataSet :: Lens' ExportVariantSetRequest (Maybe Text)
evsrBigQueryDataSet
= lens _evsrBigQueryDataSet
(\ s a -> s{_evsrBigQueryDataSet = a})
-- | Required. The BigQuery table to export data to. If the table doesn\'t
-- exist, it will be created. If it already exists, it will be overwritten.
evsrBigQueryTable :: Lens' ExportVariantSetRequest (Maybe Text)
evsrBigQueryTable
= lens _evsrBigQueryTable
(\ s a -> s{_evsrBigQueryTable = a})
-- | The format for the exported data.
evsrFormat :: Lens' ExportVariantSetRequest (Maybe ExportVariantSetRequestFormat)
evsrFormat
= lens _evsrFormat (\ s a -> s{_evsrFormat = a})
-- | If provided, only variant call information from the specified call sets
-- will be exported. By default all variant calls are exported.
evsrCallSetIds :: Lens' ExportVariantSetRequest [Text]
evsrCallSetIds
= lens _evsrCallSetIds
(\ s a -> s{_evsrCallSetIds = a})
. _Default
. _Coerce
-- | Required. The Google Cloud project ID that owns the destination BigQuery
-- dataset. The caller must have WRITE access to this project. This project
-- will also own the resulting export job.
evsrProjectId :: Lens' ExportVariantSetRequest (Maybe Text)
evsrProjectId
= lens _evsrProjectId
(\ s a -> s{_evsrProjectId = a})
instance FromJSON ExportVariantSetRequest where
parseJSON
= withObject "ExportVariantSetRequest"
(\ o ->
ExportVariantSetRequest' <$>
(o .:? "bigqueryDataset") <*> (o .:? "bigqueryTable")
<*> (o .:? "format")
<*> (o .:? "callSetIds" .!= mempty)
<*> (o .:? "projectId"))
instance ToJSON ExportVariantSetRequest where
toJSON ExportVariantSetRequest'{..}
= object
(catMaybes
[("bigqueryDataset" .=) <$> _evsrBigQueryDataSet,
("bigqueryTable" .=) <$> _evsrBigQueryTable,
("format" .=) <$> _evsrFormat,
("callSetIds" .=) <$> _evsrCallSetIds,
("projectId" .=) <$> _evsrProjectId])
-- | Metadata describing an Operation.
--
-- /See:/ 'operationMetadata' smart constructor.
data OperationMetadata = OperationMetadata'
{ _omClientId :: !(Maybe Text)
, _omStartTime :: !(Maybe DateTime')
, _omEvents :: !(Maybe [OperationEvent])
, _omEndTime :: !(Maybe DateTime')
, _omLabels :: !(Maybe OperationMetadataLabels)
, _omProjectId :: !(Maybe Text)
, _omCreateTime :: !(Maybe DateTime')
, _omRuntimeMetadata :: !(Maybe OperationMetadataRuntimeMetadata)
, _omRequest :: !(Maybe OperationMetadataRequest)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'omClientId'
--
-- * 'omStartTime'
--
-- * 'omEvents'
--
-- * 'omEndTime'
--
-- * 'omLabels'
--
-- * 'omProjectId'
--
-- * 'omCreateTime'
--
-- * 'omRuntimeMetadata'
--
-- * 'omRequest'
operationMetadata
:: OperationMetadata
operationMetadata =
OperationMetadata'
{ _omClientId = Nothing
, _omStartTime = Nothing
, _omEvents = Nothing
, _omEndTime = Nothing
, _omLabels = Nothing
, _omProjectId = Nothing
, _omCreateTime = Nothing
, _omRuntimeMetadata = Nothing
, _omRequest = Nothing
}
-- | This field is deprecated. Use \`labels\` instead. Optionally provided by
-- the caller when submitting the request that creates the operation.
omClientId :: Lens' OperationMetadata (Maybe Text)
omClientId
= lens _omClientId (\ s a -> s{_omClientId = a})
-- | The time at which the job began to run.
omStartTime :: Lens' OperationMetadata (Maybe UTCTime)
omStartTime
= lens _omStartTime (\ s a -> s{_omStartTime = a}) .
mapping _DateTime
-- | Optional event messages that were generated during the job\'s execution.
-- This also contains any warnings that were generated during import or
-- export.
omEvents :: Lens' OperationMetadata [OperationEvent]
omEvents
= lens _omEvents (\ s a -> s{_omEvents = a}) .
_Default
. _Coerce
-- | The time at which the job stopped running.
omEndTime :: Lens' OperationMetadata (Maybe UTCTime)
omEndTime
= lens _omEndTime (\ s a -> s{_omEndTime = a}) .
mapping _DateTime
-- | Optionally provided by the caller when submitting the request that
-- creates the operation.
omLabels :: Lens' OperationMetadata (Maybe OperationMetadataLabels)
omLabels = lens _omLabels (\ s a -> s{_omLabels = a})
-- | The Google Cloud Project in which the job is scoped.
omProjectId :: Lens' OperationMetadata (Maybe Text)
omProjectId
= lens _omProjectId (\ s a -> s{_omProjectId = a})
-- | The time at which the job was submitted to the Genomics service.
omCreateTime :: Lens' OperationMetadata (Maybe UTCTime)
omCreateTime
= lens _omCreateTime (\ s a -> s{_omCreateTime = a})
. mapping _DateTime
-- | Runtime metadata on this Operation.
omRuntimeMetadata :: Lens' OperationMetadata (Maybe OperationMetadataRuntimeMetadata)
omRuntimeMetadata
= lens _omRuntimeMetadata
(\ s a -> s{_omRuntimeMetadata = a})
-- | The original request that started the operation. Note that this will be
-- in current version of the API. If the operation was started with v1beta2
-- API and a GetOperation is performed on v1 API, a v1 request will be
-- returned.
omRequest :: Lens' OperationMetadata (Maybe OperationMetadataRequest)
omRequest
= lens _omRequest (\ s a -> s{_omRequest = a})
instance FromJSON OperationMetadata where
parseJSON
= withObject "OperationMetadata"
(\ o ->
OperationMetadata' <$>
(o .:? "clientId") <*> (o .:? "startTime") <*>
(o .:? "events" .!= mempty)
<*> (o .:? "endTime")
<*> (o .:? "labels")
<*> (o .:? "projectId")
<*> (o .:? "createTime")
<*> (o .:? "runtimeMetadata")
<*> (o .:? "request"))
instance ToJSON OperationMetadata where
toJSON OperationMetadata'{..}
= object
(catMaybes
[("clientId" .=) <$> _omClientId,
("startTime" .=) <$> _omStartTime,
("events" .=) <$> _omEvents,
("endTime" .=) <$> _omEndTime,
("labels" .=) <$> _omLabels,
("projectId" .=) <$> _omProjectId,
("createTime" .=) <$> _omCreateTime,
("runtimeMetadata" .=) <$> _omRuntimeMetadata,
("request" .=) <$> _omRequest])
--
-- /See:/ 'searchAnnotationsRequest' smart constructor.
data SearchAnnotationsRequest = SearchAnnotationsRequest'
{ _sarStart :: !(Maybe (Textual Int64))
, _sarReferenceId :: !(Maybe Text)
, _sarReferenceName :: !(Maybe Text)
, _sarEnd :: !(Maybe (Textual Int64))
, _sarPageToken :: !(Maybe Text)
, _sarAnnotationSetIds :: !(Maybe [Text])
, _sarPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchAnnotationsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sarStart'
--
-- * 'sarReferenceId'
--
-- * 'sarReferenceName'
--
-- * 'sarEnd'
--
-- * 'sarPageToken'
--
-- * 'sarAnnotationSetIds'
--
-- * 'sarPageSize'
searchAnnotationsRequest
:: SearchAnnotationsRequest
searchAnnotationsRequest =
SearchAnnotationsRequest'
{ _sarStart = Nothing
, _sarReferenceId = Nothing
, _sarReferenceName = Nothing
, _sarEnd = Nothing
, _sarPageToken = Nothing
, _sarAnnotationSetIds = Nothing
, _sarPageSize = Nothing
}
-- | The start position of the range on the reference, 0-based inclusive. If
-- specified, referenceId or referenceName must be specified. Defaults to
-- 0.
sarStart :: Lens' SearchAnnotationsRequest (Maybe Int64)
sarStart
= lens _sarStart (\ s a -> s{_sarStart = a}) .
mapping _Coerce
-- | The ID of the reference to query.
sarReferenceId :: Lens' SearchAnnotationsRequest (Maybe Text)
sarReferenceId
= lens _sarReferenceId
(\ s a -> s{_sarReferenceId = a})
-- | The name of the reference to query, within the reference set associated
-- with this query.
sarReferenceName :: Lens' SearchAnnotationsRequest (Maybe Text)
sarReferenceName
= lens _sarReferenceName
(\ s a -> s{_sarReferenceName = a})
-- | The end position of the range on the reference, 0-based exclusive. If
-- referenceId or referenceName must be specified, Defaults to the length
-- of the reference.
sarEnd :: Lens' SearchAnnotationsRequest (Maybe Int64)
sarEnd
= lens _sarEnd (\ s a -> s{_sarEnd = a}) .
mapping _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
sarPageToken :: Lens' SearchAnnotationsRequest (Maybe Text)
sarPageToken
= lens _sarPageToken (\ s a -> s{_sarPageToken = a})
-- | Required. The annotation sets to search within. The caller must have
-- \`READ\` access to these annotation sets. All queried annotation sets
-- must have the same type.
sarAnnotationSetIds :: Lens' SearchAnnotationsRequest [Text]
sarAnnotationSetIds
= lens _sarAnnotationSetIds
(\ s a -> s{_sarAnnotationSetIds = a})
. _Default
. _Coerce
-- | The maximum number of results to return in a single page. If
-- unspecified, defaults to 256. The maximum value is 2048.
sarPageSize :: Lens' SearchAnnotationsRequest (Maybe Int32)
sarPageSize
= lens _sarPageSize (\ s a -> s{_sarPageSize = a}) .
mapping _Coerce
instance FromJSON SearchAnnotationsRequest where
parseJSON
= withObject "SearchAnnotationsRequest"
(\ o ->
SearchAnnotationsRequest' <$>
(o .:? "start") <*> (o .:? "referenceId") <*>
(o .:? "referenceName")
<*> (o .:? "end")
<*> (o .:? "pageToken")
<*> (o .:? "annotationSetIds" .!= mempty)
<*> (o .:? "pageSize"))
instance ToJSON SearchAnnotationsRequest where
toJSON SearchAnnotationsRequest'{..}
= object
(catMaybes
[("start" .=) <$> _sarStart,
("referenceId" .=) <$> _sarReferenceId,
("referenceName" .=) <$> _sarReferenceName,
("end" .=) <$> _sarEnd,
("pageToken" .=) <$> _sarPageToken,
("annotationSetIds" .=) <$> _sarAnnotationSetIds,
("pageSize" .=) <$> _sarPageSize])
-- | The search variant sets response.
--
-- /See:/ 'searchVariantSetsResponse' smart constructor.
data SearchVariantSetsResponse = SearchVariantSetsResponse'
{ _svsrNextPageToken :: !(Maybe Text)
, _svsrVariantSets :: !(Maybe [VariantSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchVariantSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'svsrNextPageToken'
--
-- * 'svsrVariantSets'
searchVariantSetsResponse
:: SearchVariantSetsResponse
searchVariantSetsResponse =
SearchVariantSetsResponse'
{ _svsrNextPageToken = Nothing
, _svsrVariantSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
svsrNextPageToken :: Lens' SearchVariantSetsResponse (Maybe Text)
svsrNextPageToken
= lens _svsrNextPageToken
(\ s a -> s{_svsrNextPageToken = a})
-- | The variant sets belonging to the requested dataset.
svsrVariantSets :: Lens' SearchVariantSetsResponse [VariantSet]
svsrVariantSets
= lens _svsrVariantSets
(\ s a -> s{_svsrVariantSets = a})
. _Default
. _Coerce
instance FromJSON SearchVariantSetsResponse where
parseJSON
= withObject "SearchVariantSetsResponse"
(\ o ->
SearchVariantSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "variantSets" .!= mempty))
instance ToJSON SearchVariantSetsResponse where
toJSON SearchVariantSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _svsrNextPageToken,
("variantSets" .=) <$> _svsrVariantSets])
-- | The variant search request.
--
-- /See:/ 'searchVariantsRequest' smart constructor.
data SearchVariantsRequest = SearchVariantsRequest'
{ _svrStart :: !(Maybe (Textual Int64))
, _svrCallSetIds :: !(Maybe [Text])
, _svrReferenceName :: !(Maybe Text)
, _svrEnd :: !(Maybe (Textual Int64))
, _svrMaxCalls :: !(Maybe (Textual Int32))
, _svrPageToken :: !(Maybe Text)
, _svrVariantName :: !(Maybe Text)
, _svrVariantSetIds :: !(Maybe [Text])
, _svrPageSize :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchVariantsRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'svrStart'
--
-- * 'svrCallSetIds'
--
-- * 'svrReferenceName'
--
-- * 'svrEnd'
--
-- * 'svrMaxCalls'
--
-- * 'svrPageToken'
--
-- * 'svrVariantName'
--
-- * 'svrVariantSetIds'
--
-- * 'svrPageSize'
searchVariantsRequest
:: SearchVariantsRequest
searchVariantsRequest =
SearchVariantsRequest'
{ _svrStart = Nothing
, _svrCallSetIds = Nothing
, _svrReferenceName = Nothing
, _svrEnd = Nothing
, _svrMaxCalls = Nothing
, _svrPageToken = Nothing
, _svrVariantName = Nothing
, _svrVariantSetIds = Nothing
, _svrPageSize = Nothing
}
-- | The beginning of the window (0-based, inclusive) for which overlapping
-- variants should be returned. If unspecified, defaults to 0.
svrStart :: Lens' SearchVariantsRequest (Maybe Int64)
svrStart
= lens _svrStart (\ s a -> s{_svrStart = a}) .
mapping _Coerce
-- | Only return variant calls which belong to call sets with these ids.
-- Leaving this blank returns all variant calls. If a variant has no calls
-- belonging to any of these call sets, it won\'t be returned at all.
svrCallSetIds :: Lens' SearchVariantsRequest [Text]
svrCallSetIds
= lens _svrCallSetIds
(\ s a -> s{_svrCallSetIds = a})
. _Default
. _Coerce
-- | Required. Only return variants in this reference sequence.
svrReferenceName :: Lens' SearchVariantsRequest (Maybe Text)
svrReferenceName
= lens _svrReferenceName
(\ s a -> s{_svrReferenceName = a})
-- | The end of the window, 0-based exclusive. If unspecified or 0, defaults
-- to the length of the reference.
svrEnd :: Lens' SearchVariantsRequest (Maybe Int64)
svrEnd
= lens _svrEnd (\ s a -> s{_svrEnd = a}) .
mapping _Coerce
-- | The maximum number of calls to return in a single page. Note that this
-- limit may be exceeded in the event that a matching variant contains more
-- calls than the requested maximum. If unspecified, defaults to 5000. The
-- maximum value is 10000.
svrMaxCalls :: Lens' SearchVariantsRequest (Maybe Int32)
svrMaxCalls
= lens _svrMaxCalls (\ s a -> s{_svrMaxCalls = a}) .
mapping _Coerce
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \`nextPageToken\` from the previous response.
svrPageToken :: Lens' SearchVariantsRequest (Maybe Text)
svrPageToken
= lens _svrPageToken (\ s a -> s{_svrPageToken = a})
-- | Only return variants which have exactly this name.
svrVariantName :: Lens' SearchVariantsRequest (Maybe Text)
svrVariantName
= lens _svrVariantName
(\ s a -> s{_svrVariantName = a})
-- | At most one variant set ID must be provided. Only variants from this
-- variant set will be returned. If omitted, a call set id must be included
-- in the request.
svrVariantSetIds :: Lens' SearchVariantsRequest [Text]
svrVariantSetIds
= lens _svrVariantSetIds
(\ s a -> s{_svrVariantSetIds = a})
. _Default
. _Coerce
-- | The maximum number of variants to return in a single page. If
-- unspecified, defaults to 5000. The maximum value is 10000.
svrPageSize :: Lens' SearchVariantsRequest (Maybe Int32)
svrPageSize
= lens _svrPageSize (\ s a -> s{_svrPageSize = a}) .
mapping _Coerce
instance FromJSON SearchVariantsRequest where
parseJSON
= withObject "SearchVariantsRequest"
(\ o ->
SearchVariantsRequest' <$>
(o .:? "start") <*> (o .:? "callSetIds" .!= mempty)
<*> (o .:? "referenceName")
<*> (o .:? "end")
<*> (o .:? "maxCalls")
<*> (o .:? "pageToken")
<*> (o .:? "variantName")
<*> (o .:? "variantSetIds" .!= mempty)
<*> (o .:? "pageSize"))
instance ToJSON SearchVariantsRequest where
toJSON SearchVariantsRequest'{..}
= object
(catMaybes
[("start" .=) <$> _svrStart,
("callSetIds" .=) <$> _svrCallSetIds,
("referenceName" .=) <$> _svrReferenceName,
("end" .=) <$> _svrEnd,
("maxCalls" .=) <$> _svrMaxCalls,
("pageToken" .=) <$> _svrPageToken,
("variantName" .=) <$> _svrVariantName,
("variantSetIds" .=) <$> _svrVariantSetIds,
("pageSize" .=) <$> _svrPageSize])
-- | A map of additional read alignment information. This must be of the form
-- map (string key mapping to a list of string values).
--
-- /See:/ 'annotationSetInfo' smart constructor.
newtype AnnotationSetInfo = AnnotationSetInfo'
{ _asiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotationSetInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asiAddtional'
annotationSetInfo
:: HashMap Text [JSONValue] -- ^ 'asiAddtional'
-> AnnotationSetInfo
annotationSetInfo pAsiAddtional_ =
AnnotationSetInfo'
{ _asiAddtional = _Coerce # pAsiAddtional_
}
asiAddtional :: Lens' AnnotationSetInfo (HashMap Text [JSONValue])
asiAddtional
= lens _asiAddtional (\ s a -> s{_asiAddtional = a})
. _Coerce
instance FromJSON AnnotationSetInfo where
parseJSON
= withObject "AnnotationSetInfo"
(\ o -> AnnotationSetInfo' <$> (parseJSONObject o))
instance ToJSON AnnotationSetInfo where
toJSON = toJSON . _asiAddtional
-- | Describes a Compute Engine resource that is being managed by a running
-- pipeline.
--
-- /See:/ 'computeEngine' smart constructor.
data ComputeEngine = ComputeEngine'
{ _ceZone :: !(Maybe Text)
, _ceDiskNames :: !(Maybe [Text])
, _ceMachineType :: !(Maybe Text)
, _ceInstanceName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ComputeEngine' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ceZone'
--
-- * 'ceDiskNames'
--
-- * 'ceMachineType'
--
-- * 'ceInstanceName'
computeEngine
:: ComputeEngine
computeEngine =
ComputeEngine'
{ _ceZone = Nothing
, _ceDiskNames = Nothing
, _ceMachineType = Nothing
, _ceInstanceName = Nothing
}
-- | The availability zone in which the instance resides.
ceZone :: Lens' ComputeEngine (Maybe Text)
ceZone = lens _ceZone (\ s a -> s{_ceZone = a})
-- | The names of the disks that were created for this pipeline.
ceDiskNames :: Lens' ComputeEngine [Text]
ceDiskNames
= lens _ceDiskNames (\ s a -> s{_ceDiskNames = a}) .
_Default
. _Coerce
-- | The machine type of the instance.
ceMachineType :: Lens' ComputeEngine (Maybe Text)
ceMachineType
= lens _ceMachineType
(\ s a -> s{_ceMachineType = a})
-- | The instance on which the operation is running.
ceInstanceName :: Lens' ComputeEngine (Maybe Text)
ceInstanceName
= lens _ceInstanceName
(\ s a -> s{_ceInstanceName = a})
instance FromJSON ComputeEngine where
parseJSON
= withObject "ComputeEngine"
(\ o ->
ComputeEngine' <$>
(o .:? "zone") <*> (o .:? "diskNames" .!= mempty) <*>
(o .:? "machineType")
<*> (o .:? "instanceName"))
instance ToJSON ComputeEngine where
toJSON ComputeEngine'{..}
= object
(catMaybes
[("zone" .=) <$> _ceZone,
("diskNames" .=) <$> _ceDiskNames,
("machineType" .=) <$> _ceMachineType,
("instanceName" .=) <$> _ceInstanceName])
--
-- /See:/ 'searchAnnotationSetsResponse' smart constructor.
data SearchAnnotationSetsResponse = SearchAnnotationSetsResponse'
{ _sasrNextPageToken :: !(Maybe Text)
, _sasrAnnotationSets :: !(Maybe [AnnotationSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchAnnotationSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sasrNextPageToken'
--
-- * 'sasrAnnotationSets'
searchAnnotationSetsResponse
:: SearchAnnotationSetsResponse
searchAnnotationSetsResponse =
SearchAnnotationSetsResponse'
{ _sasrNextPageToken = Nothing
, _sasrAnnotationSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
sasrNextPageToken :: Lens' SearchAnnotationSetsResponse (Maybe Text)
sasrNextPageToken
= lens _sasrNextPageToken
(\ s a -> s{_sasrNextPageToken = a})
-- | The matching annotation sets.
sasrAnnotationSets :: Lens' SearchAnnotationSetsResponse [AnnotationSet]
sasrAnnotationSets
= lens _sasrAnnotationSets
(\ s a -> s{_sasrAnnotationSets = a})
. _Default
. _Coerce
instance FromJSON SearchAnnotationSetsResponse where
parseJSON
= withObject "SearchAnnotationSetsResponse"
(\ o ->
SearchAnnotationSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "annotationSets" .!= mempty))
instance ToJSON SearchAnnotationSetsResponse where
toJSON SearchAnnotationSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _sasrNextPageToken,
("annotationSets" .=) <$> _sasrAnnotationSets])
-- | A map of additional call set information. This must be of the form map
-- (string key mapping to a list of string values).
--
-- /See:/ 'callSetInfo' smart constructor.
newtype CallSetInfo = CallSetInfo'
{ _csiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CallSetInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csiAddtional'
callSetInfo
:: HashMap Text [JSONValue] -- ^ 'csiAddtional'
-> CallSetInfo
callSetInfo pCsiAddtional_ =
CallSetInfo'
{ _csiAddtional = _Coerce # pCsiAddtional_
}
csiAddtional :: Lens' CallSetInfo (HashMap Text [JSONValue])
csiAddtional
= lens _csiAddtional (\ s a -> s{_csiAddtional = a})
. _Coerce
instance FromJSON CallSetInfo where
parseJSON
= withObject "CallSetInfo"
(\ o -> CallSetInfo' <$> (parseJSONObject o))
instance ToJSON CallSetInfo where
toJSON = toJSON . _csiAddtional
-- | Remaining structured metadata key-value pairs. This must be of the form
-- map (string key mapping to a list of string values).
--
-- /See:/ 'variantSetMetadataInfo' smart constructor.
newtype VariantSetMetadataInfo = VariantSetMetadataInfo'
{ _vsmiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VariantSetMetadataInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vsmiAddtional'
variantSetMetadataInfo
:: HashMap Text [JSONValue] -- ^ 'vsmiAddtional'
-> VariantSetMetadataInfo
variantSetMetadataInfo pVsmiAddtional_ =
VariantSetMetadataInfo'
{ _vsmiAddtional = _Coerce # pVsmiAddtional_
}
vsmiAddtional :: Lens' VariantSetMetadataInfo (HashMap Text [JSONValue])
vsmiAddtional
= lens _vsmiAddtional
(\ s a -> s{_vsmiAddtional = a})
. _Coerce
instance FromJSON VariantSetMetadataInfo where
parseJSON
= withObject "VariantSetMetadataInfo"
(\ o ->
VariantSetMetadataInfo' <$> (parseJSONObject o))
instance ToJSON VariantSetMetadataInfo where
toJSON = toJSON . _vsmiAddtional
-- | If importing ReadGroupSets, an ImportReadGroupSetsResponse is returned.
-- If importing Variants, an ImportVariantsResponse is returned. For
-- pipelines and exports, an empty response is returned.
--
-- /See:/ 'operationResponse' smart constructor.
newtype OperationResponse = OperationResponse'
{ _orAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'orAddtional'
operationResponse
:: HashMap Text JSONValue -- ^ 'orAddtional'
-> OperationResponse
operationResponse pOrAddtional_ =
OperationResponse'
{ _orAddtional = _Coerce # pOrAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
orAddtional :: Lens' OperationResponse (HashMap Text JSONValue)
orAddtional
= lens _orAddtional (\ s a -> s{_orAddtional = a}) .
_Coerce
instance FromJSON OperationResponse where
parseJSON
= withObject "OperationResponse"
(\ o -> OperationResponse' <$> (parseJSONObject o))
instance ToJSON OperationResponse where
toJSON = toJSON . _orAddtional
-- | A transcript represents the assertion that a particular region of the
-- reference genome may be transcribed as RNA.
--
-- /See:/ 'transcript' smart constructor.
data Transcript = Transcript'
{ _tGeneId :: !(Maybe Text)
, _tCodingSequence :: !(Maybe CodingSequence)
, _tExons :: !(Maybe [Exon])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Transcript' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tGeneId'
--
-- * 'tCodingSequence'
--
-- * 'tExons'
transcript
:: Transcript
transcript =
Transcript'
{ _tGeneId = Nothing
, _tCodingSequence = Nothing
, _tExons = Nothing
}
-- | The annotation ID of the gene from which this transcript is transcribed.
tGeneId :: Lens' Transcript (Maybe Text)
tGeneId = lens _tGeneId (\ s a -> s{_tGeneId = a})
-- | The range of the coding sequence for this transcript, if any. To
-- determine the exact ranges of coding sequence, intersect this range with
-- those of the exons, if any. If there are any exons, the codingSequence
-- must start and end within them. Note that in some cases, the reference
-- genome will not exactly match the observed mRNA transcript e.g. due to
-- variance in the source genome from reference. In these cases, exon.frame
-- will not necessarily match the expected reference reading frame and
-- coding exon reference bases cannot necessarily be concatenated to
-- produce the original transcript mRNA.
tCodingSequence :: Lens' Transcript (Maybe CodingSequence)
tCodingSequence
= lens _tCodingSequence
(\ s a -> s{_tCodingSequence = a})
-- | The <http://en.wikipedia.org/wiki/Exon exons> that compose this
-- transcript. This field should be unset for genomes where transcript
-- splicing does not occur, for example prokaryotes. Introns are regions of
-- the transcript that are not included in the spliced RNA product. Though
-- not explicitly modeled here, intron ranges can be deduced; all regions
-- of this transcript that are not exons are introns. Exonic sequences do
-- not necessarily code for a translational product (amino acids). Only the
-- regions of exons bounded by the codingSequence correspond to coding DNA
-- sequence. Exons are ordered by start position and may not overlap.
tExons :: Lens' Transcript [Exon]
tExons
= lens _tExons (\ s a -> s{_tExons = a}) . _Default .
_Coerce
instance FromJSON Transcript where
parseJSON
= withObject "Transcript"
(\ o ->
Transcript' <$>
(o .:? "geneId") <*> (o .:? "codingSequence") <*>
(o .:? "exons" .!= mempty))
instance ToJSON Transcript where
toJSON Transcript'{..}
= object
(catMaybes
[("geneId" .=) <$> _tGeneId,
("codingSequence" .=) <$> _tCodingSequence,
("exons" .=) <$> _tExons])
-- | An event that occurred during an Operation.
--
-- /See:/ 'operationEvent' smart constructor.
data OperationEvent = OperationEvent'
{ _oeStartTime :: !(Maybe DateTime')
, _oeEndTime :: !(Maybe DateTime')
, _oeDescription :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'OperationEvent' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oeStartTime'
--
-- * 'oeEndTime'
--
-- * 'oeDescription'
operationEvent
:: OperationEvent
operationEvent =
OperationEvent'
{ _oeStartTime = Nothing
, _oeEndTime = Nothing
, _oeDescription = Nothing
}
-- | Optional time of when event started.
oeStartTime :: Lens' OperationEvent (Maybe UTCTime)
oeStartTime
= lens _oeStartTime (\ s a -> s{_oeStartTime = a}) .
mapping _DateTime
-- | Optional time of when event finished. An event can have a start time and
-- no finish time. If an event has a finish time, there must be a start
-- time.
oeEndTime :: Lens' OperationEvent (Maybe UTCTime)
oeEndTime
= lens _oeEndTime (\ s a -> s{_oeEndTime = a}) .
mapping _DateTime
-- | Required description of event.
oeDescription :: Lens' OperationEvent (Maybe Text)
oeDescription
= lens _oeDescription
(\ s a -> s{_oeDescription = a})
instance FromJSON OperationEvent where
parseJSON
= withObject "OperationEvent"
(\ o ->
OperationEvent' <$>
(o .:? "startTime") <*> (o .:? "endTime") <*>
(o .:? "description"))
instance ToJSON OperationEvent where
toJSON OperationEvent'{..}
= object
(catMaybes
[("startTime" .=) <$> _oeStartTime,
("endTime" .=) <$> _oeEndTime,
("description" .=) <$> _oeDescription])
-- | ReferenceBound records an upper bound for the starting coordinate of
-- variants in a particular reference.
--
-- /See:/ 'referenceBound' smart constructor.
data ReferenceBound = ReferenceBound'
{ _rbUpperBound :: !(Maybe (Textual Int64))
, _rbReferenceName :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReferenceBound' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rbUpperBound'
--
-- * 'rbReferenceName'
referenceBound
:: ReferenceBound
referenceBound =
ReferenceBound'
{ _rbUpperBound = Nothing
, _rbReferenceName = Nothing
}
-- | An upper bound (inclusive) on the starting coordinate of any variant in
-- the reference sequence.
rbUpperBound :: Lens' ReferenceBound (Maybe Int64)
rbUpperBound
= lens _rbUpperBound (\ s a -> s{_rbUpperBound = a})
. mapping _Coerce
-- | The name of the reference associated with this reference bound.
rbReferenceName :: Lens' ReferenceBound (Maybe Text)
rbReferenceName
= lens _rbReferenceName
(\ s a -> s{_rbReferenceName = a})
instance FromJSON ReferenceBound where
parseJSON
= withObject "ReferenceBound"
(\ o ->
ReferenceBound' <$>
(o .:? "upperBound") <*> (o .:? "referenceName"))
instance ToJSON ReferenceBound where
toJSON ReferenceBound'{..}
= object
(catMaybes
[("upperBound" .=) <$> _rbUpperBound,
("referenceName" .=) <$> _rbReferenceName])
--
-- /See:/ 'undeleteDataSetRequest' smart constructor.
data UndeleteDataSetRequest =
UndeleteDataSetRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UndeleteDataSetRequest' with the minimum fields required to make a request.
--
undeleteDataSetRequest
:: UndeleteDataSetRequest
undeleteDataSetRequest = UndeleteDataSetRequest'
instance FromJSON UndeleteDataSetRequest where
parseJSON
= withObject "UndeleteDataSetRequest"
(\ o -> pure UndeleteDataSetRequest')
instance ToJSON UndeleteDataSetRequest where
toJSON = const emptyObject
-- | Associates \`members\` with a \`role\`.
--
-- /See:/ 'binding' smart constructor.
data Binding = Binding'
{ _bMembers :: !(Maybe [Text])
, _bRole :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Binding' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bMembers'
--
-- * 'bRole'
binding
:: Binding
binding =
Binding'
{ _bMembers = Nothing
, _bRole = Nothing
}
-- | Specifies the identities requesting access for a Cloud Platform
-- resource. \`members\` can have the following values: * \`allUsers\`: A
-- special identifier that represents anyone who is on the internet; with
-- or without a Google account. * \`allAuthenticatedUsers\`: A special
-- identifier that represents anyone who is authenticated with a Google
-- account or a service account. * \`user:{emailid}\`: An email address
-- that represents a specific Google account. For example,
-- \`alice\'gmail.com\` or \`joe\'example.com\`. *
-- \`serviceAccount:{emailid}\`: An email address that represents a service
-- account. For example, \`my-other-app\'appspot.gserviceaccount.com\`. *
-- \`group:{emailid}\`: An email address that represents a Google group.
-- For example, \`admins\'example.com\`. * \`domain:{domain}\`: A Google
-- Apps domain name that represents all the users of that domain. For
-- example, \`google.com\` or \`example.com\`.
bMembers :: Lens' Binding [Text]
bMembers
= lens _bMembers (\ s a -> s{_bMembers = a}) .
_Default
. _Coerce
-- | Role that is assigned to \`members\`. For example, \`roles\/viewer\`,
-- \`roles\/editor\`, or \`roles\/owner\`. Required
bRole :: Lens' Binding (Maybe Text)
bRole = lens _bRole (\ s a -> s{_bRole = a})
instance FromJSON Binding where
parseJSON
= withObject "Binding"
(\ o ->
Binding' <$>
(o .:? "members" .!= mempty) <*> (o .:? "role"))
instance ToJSON Binding where
toJSON Binding'{..}
= object
(catMaybes
[("members" .=) <$> _bMembers,
("role" .=) <$> _bRole])
-- | An abstraction for referring to a genomic position, in relation to some
-- already known reference. For now, represents a genomic position as a
-- reference name, a base number on that reference (0-based), and a
-- determination of forward or reverse strand.
--
-- /See:/ 'position' smart constructor.
data Position = Position'
{ _pReverseStrand :: !(Maybe Bool)
, _pReferenceName :: !(Maybe Text)
, _pPosition :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Position' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pReverseStrand'
--
-- * 'pReferenceName'
--
-- * 'pPosition'
position
:: Position
position =
Position'
{ _pReverseStrand = Nothing
, _pReferenceName = Nothing
, _pPosition = Nothing
}
-- | Whether this position is on the reverse strand, as opposed to the
-- forward strand.
pReverseStrand :: Lens' Position (Maybe Bool)
pReverseStrand
= lens _pReverseStrand
(\ s a -> s{_pReverseStrand = a})
-- | The name of the reference in whatever reference set is being used.
pReferenceName :: Lens' Position (Maybe Text)
pReferenceName
= lens _pReferenceName
(\ s a -> s{_pReferenceName = a})
-- | The 0-based offset from the start of the forward strand for that
-- reference.
pPosition :: Lens' Position (Maybe Int64)
pPosition
= lens _pPosition (\ s a -> s{_pPosition = a}) .
mapping _Coerce
instance FromJSON Position where
parseJSON
= withObject "Position"
(\ o ->
Position' <$>
(o .:? "reverseStrand") <*> (o .:? "referenceName")
<*> (o .:? "position"))
instance ToJSON Position where
toJSON Position'{..}
= object
(catMaybes
[("reverseStrand" .=) <$> _pReverseStrand,
("referenceName" .=) <$> _pReferenceName,
("position" .=) <$> _pPosition])
-- | Runtime metadata that will be populated in the runtimeMetadata field of
-- the Operation associated with a RunPipeline execution.
--
-- /See:/ 'runtimeMetadata' smart constructor.
newtype RuntimeMetadata = RuntimeMetadata'
{ _rmComputeEngine :: Maybe ComputeEngine
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RuntimeMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rmComputeEngine'
runtimeMetadata
:: RuntimeMetadata
runtimeMetadata =
RuntimeMetadata'
{ _rmComputeEngine = Nothing
}
-- | Execution information specific to Google Compute Engine.
rmComputeEngine :: Lens' RuntimeMetadata (Maybe ComputeEngine)
rmComputeEngine
= lens _rmComputeEngine
(\ s a -> s{_rmComputeEngine = a})
instance FromJSON RuntimeMetadata where
parseJSON
= withObject "RuntimeMetadata"
(\ o -> RuntimeMetadata' <$> (o .:? "computeEngine"))
instance ToJSON RuntimeMetadata where
toJSON RuntimeMetadata'{..}
= object
(catMaybes
[("computeEngine" .=) <$> _rmComputeEngine])
-- | The read group set search response.
--
-- /See:/ 'searchReadGroupSetsResponse' smart constructor.
data SearchReadGroupSetsResponse = SearchReadGroupSetsResponse'
{ _srgsrNextPageToken :: !(Maybe Text)
, _srgsrReadGroupSets :: !(Maybe [ReadGroupSet])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SearchReadGroupSetsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'srgsrNextPageToken'
--
-- * 'srgsrReadGroupSets'
searchReadGroupSetsResponse
:: SearchReadGroupSetsResponse
searchReadGroupSetsResponse =
SearchReadGroupSetsResponse'
{ _srgsrNextPageToken = Nothing
, _srgsrReadGroupSets = Nothing
}
-- | The continuation token, which is used to page through large result sets.
-- Provide this value in a subsequent request to return the next page of
-- results. This field will be empty if there aren\'t any additional
-- results.
srgsrNextPageToken :: Lens' SearchReadGroupSetsResponse (Maybe Text)
srgsrNextPageToken
= lens _srgsrNextPageToken
(\ s a -> s{_srgsrNextPageToken = a})
-- | The list of matching read group sets.
srgsrReadGroupSets :: Lens' SearchReadGroupSetsResponse [ReadGroupSet]
srgsrReadGroupSets
= lens _srgsrReadGroupSets
(\ s a -> s{_srgsrReadGroupSets = a})
. _Default
. _Coerce
instance FromJSON SearchReadGroupSetsResponse where
parseJSON
= withObject "SearchReadGroupSetsResponse"
(\ o ->
SearchReadGroupSetsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "readGroupSets" .!= mempty))
instance ToJSON SearchReadGroupSetsResponse where
toJSON SearchReadGroupSetsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _srgsrNextPageToken,
("readGroupSets" .=) <$> _srgsrReadGroupSets])
-- | A map of additional read group set information.
--
-- /See:/ 'readGroupSetInfo' smart constructor.
newtype ReadGroupSetInfo = ReadGroupSetInfo'
{ _rgsiAddtional :: HashMap Text [JSONValue]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReadGroupSetInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rgsiAddtional'
readGroupSetInfo
:: HashMap Text [JSONValue] -- ^ 'rgsiAddtional'
-> ReadGroupSetInfo
readGroupSetInfo pRgsiAddtional_ =
ReadGroupSetInfo'
{ _rgsiAddtional = _Coerce # pRgsiAddtional_
}
rgsiAddtional :: Lens' ReadGroupSetInfo (HashMap Text [JSONValue])
rgsiAddtional
= lens _rgsiAddtional
(\ s a -> s{_rgsiAddtional = a})
. _Coerce
instance FromJSON ReadGroupSetInfo where
parseJSON
= withObject "ReadGroupSetInfo"
(\ o -> ReadGroupSetInfo' <$> (parseJSONObject o))
instance ToJSON ReadGroupSetInfo where
toJSON = toJSON . _rgsiAddtional
-- | A reference set is a set of references which typically comprise a
-- reference assembly for a species, such as \`GRCh38\` which is
-- representative of the human genome. A reference set defines a common
-- coordinate space for comparing reference-aligned experimental data. A
-- reference set contains 1 or more references. For more genomics resource
-- definitions, see [Fundamentals of Google
-- Genomics](https:\/\/cloud.google.com\/genomics\/fundamentals-of-google-genomics)
--
-- /See:/ 'referenceSet' smart constructor.
data ReferenceSet = ReferenceSet'
{ _rsSourceAccessions :: !(Maybe [Text])
, _rsReferenceIds :: !(Maybe [Text])
, _rsMD5checksum :: !(Maybe Text)
, _rsNcbiTaxonId :: !(Maybe (Textual Int32))
, _rsId :: !(Maybe Text)
, _rsAssemblyId :: !(Maybe Text)
, _rsSourceURI :: !(Maybe Text)
, _rsDescription :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReferenceSet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rsSourceAccessions'
--
-- * 'rsReferenceIds'
--
-- * 'rsMD5checksum'
--
-- * 'rsNcbiTaxonId'
--
-- * 'rsId'
--
-- * 'rsAssemblyId'
--
-- * 'rsSourceURI'
--
-- * 'rsDescription'
referenceSet
:: ReferenceSet
referenceSet =
ReferenceSet'
{ _rsSourceAccessions = Nothing
, _rsReferenceIds = Nothing
, _rsMD5checksum = Nothing
, _rsNcbiTaxonId = Nothing
, _rsId = Nothing
, _rsAssemblyId = Nothing
, _rsSourceURI = Nothing
, _rsDescription = Nothing
}
-- | All known corresponding accession IDs in INSDC (GenBank\/ENA\/DDBJ)
-- ideally with a version number, for example \`NC_000001.11\`.
rsSourceAccessions :: Lens' ReferenceSet [Text]
rsSourceAccessions
= lens _rsSourceAccessions
(\ s a -> s{_rsSourceAccessions = a})
. _Default
. _Coerce
-- | The IDs of the reference objects that are part of this set.
-- \`Reference.md5checksum\` must be unique within this set.
rsReferenceIds :: Lens' ReferenceSet [Text]
rsReferenceIds
= lens _rsReferenceIds
(\ s a -> s{_rsReferenceIds = a})
. _Default
. _Coerce
-- | Order-independent MD5 checksum which identifies this reference set. The
-- checksum is computed by sorting all lower case hexidecimal string
-- \`reference.md5checksum\` (for all reference in this set) in ascending
-- lexicographic order, concatenating, and taking the MD5 of that value.
-- The resulting value is represented in lower case hexadecimal format.
rsMD5checksum :: Lens' ReferenceSet (Maybe Text)
rsMD5checksum
= lens _rsMD5checksum
(\ s a -> s{_rsMD5checksum = a})
-- | ID from http:\/\/www.ncbi.nlm.nih.gov\/taxonomy (for example, 9606 for
-- human) indicating the species which this reference set is intended to
-- model. Note that contained references may specify a different
-- \`ncbiTaxonId\`, as assemblies may contain reference sequences which do
-- not belong to the modeled species, for example EBV in a human reference
-- genome.
rsNcbiTaxonId :: Lens' ReferenceSet (Maybe Int32)
rsNcbiTaxonId
= lens _rsNcbiTaxonId
(\ s a -> s{_rsNcbiTaxonId = a})
. mapping _Coerce
-- | The server-generated reference set ID, unique across all reference sets.
rsId :: Lens' ReferenceSet (Maybe Text)
rsId = lens _rsId (\ s a -> s{_rsId = a})
-- | Public id of this reference set, such as \`GRCh37\`.
rsAssemblyId :: Lens' ReferenceSet (Maybe Text)
rsAssemblyId
= lens _rsAssemblyId (\ s a -> s{_rsAssemblyId = a})
-- | The URI from which the references were obtained.
rsSourceURI :: Lens' ReferenceSet (Maybe Text)
rsSourceURI
= lens _rsSourceURI (\ s a -> s{_rsSourceURI = a})
-- | Free text description of this reference set.
rsDescription :: Lens' ReferenceSet (Maybe Text)
rsDescription
= lens _rsDescription
(\ s a -> s{_rsDescription = a})
instance FromJSON ReferenceSet where
parseJSON
= withObject "ReferenceSet"
(\ o ->
ReferenceSet' <$>
(o .:? "sourceAccessions" .!= mempty) <*>
(o .:? "referenceIds" .!= mempty)
<*> (o .:? "md5checksum")
<*> (o .:? "ncbiTaxonId")
<*> (o .:? "id")
<*> (o .:? "assemblyId")
<*> (o .:? "sourceUri")
<*> (o .:? "description"))
instance ToJSON ReferenceSet where
toJSON ReferenceSet'{..}
= object
(catMaybes
[("sourceAccessions" .=) <$> _rsSourceAccessions,
("referenceIds" .=) <$> _rsReferenceIds,
("md5checksum" .=) <$> _rsMD5checksum,
("ncbiTaxonId" .=) <$> _rsNcbiTaxonId,
("id" .=) <$> _rsId,
("assemblyId" .=) <$> _rsAssemblyId,
("sourceUri" .=) <$> _rsSourceURI,
("description" .=) <$> _rsDescription])
|
rueshyna/gogol
|
gogol-genomics/gen/Network/Google/Genomics/Types/Product.hs
|
mpl-2.0
| 223,832
| 0
| 27
| 54,330
| 39,026
| 22,521
| 16,505
| 4,276
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Redis.Projects.Locations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists information about the supported locations for this service.
--
-- /See:/ <https://cloud.google.com/memorystore/docs/redis/ Google Cloud Memorystore for Redis API Reference> for @redis.projects.locations.list@.
module Network.Google.Resource.Redis.Projects.Locations.List
(
-- * REST Resource
ProjectsLocationsListResource
-- * Creating a Request
, projectsLocationsList
, ProjectsLocationsList
-- * Request Lenses
, pllXgafv
, pllUploadProtocol
, pllAccessToken
, pllUploadType
, pllName
, pllFilter
, pllPageToken
, pllPageSize
, pllCallback
) where
import Network.Google.Prelude
import Network.Google.Redis.Types
-- | A resource alias for @redis.projects.locations.list@ method which the
-- 'ProjectsLocationsList' request conforms to.
type ProjectsLocationsListResource =
"v1" :>
Capture "name" Text :>
"locations" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListLocationsResponse
-- | Lists information about the supported locations for this service.
--
-- /See:/ 'projectsLocationsList' smart constructor.
data ProjectsLocationsList =
ProjectsLocationsList'
{ _pllXgafv :: !(Maybe Xgafv)
, _pllUploadProtocol :: !(Maybe Text)
, _pllAccessToken :: !(Maybe Text)
, _pllUploadType :: !(Maybe Text)
, _pllName :: !Text
, _pllFilter :: !(Maybe Text)
, _pllPageToken :: !(Maybe Text)
, _pllPageSize :: !(Maybe (Textual Int32))
, _pllCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pllXgafv'
--
-- * 'pllUploadProtocol'
--
-- * 'pllAccessToken'
--
-- * 'pllUploadType'
--
-- * 'pllName'
--
-- * 'pllFilter'
--
-- * 'pllPageToken'
--
-- * 'pllPageSize'
--
-- * 'pllCallback'
projectsLocationsList
:: Text -- ^ 'pllName'
-> ProjectsLocationsList
projectsLocationsList pPllName_ =
ProjectsLocationsList'
{ _pllXgafv = Nothing
, _pllUploadProtocol = Nothing
, _pllAccessToken = Nothing
, _pllUploadType = Nothing
, _pllName = pPllName_
, _pllFilter = Nothing
, _pllPageToken = Nothing
, _pllPageSize = Nothing
, _pllCallback = Nothing
}
-- | V1 error format.
pllXgafv :: Lens' ProjectsLocationsList (Maybe Xgafv)
pllXgafv = lens _pllXgafv (\ s a -> s{_pllXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pllUploadProtocol :: Lens' ProjectsLocationsList (Maybe Text)
pllUploadProtocol
= lens _pllUploadProtocol
(\ s a -> s{_pllUploadProtocol = a})
-- | OAuth access token.
pllAccessToken :: Lens' ProjectsLocationsList (Maybe Text)
pllAccessToken
= lens _pllAccessToken
(\ s a -> s{_pllAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pllUploadType :: Lens' ProjectsLocationsList (Maybe Text)
pllUploadType
= lens _pllUploadType
(\ s a -> s{_pllUploadType = a})
-- | The resource that owns the locations collection, if applicable.
pllName :: Lens' ProjectsLocationsList Text
pllName = lens _pllName (\ s a -> s{_pllName = a})
-- | A filter to narrow down results to a preferred subset. The filtering
-- language accepts strings like \"displayName=tokyo\", and is documented
-- in more detail in [AIP-160](https:\/\/google.aip.dev\/160).
pllFilter :: Lens' ProjectsLocationsList (Maybe Text)
pllFilter
= lens _pllFilter (\ s a -> s{_pllFilter = a})
-- | A page token received from the \`next_page_token\` field in the
-- response. Send that page token to receive the subsequent page.
pllPageToken :: Lens' ProjectsLocationsList (Maybe Text)
pllPageToken
= lens _pllPageToken (\ s a -> s{_pllPageToken = a})
-- | The maximum number of results to return. If not set, the service selects
-- a default.
pllPageSize :: Lens' ProjectsLocationsList (Maybe Int32)
pllPageSize
= lens _pllPageSize (\ s a -> s{_pllPageSize = a}) .
mapping _Coerce
-- | JSONP
pllCallback :: Lens' ProjectsLocationsList (Maybe Text)
pllCallback
= lens _pllCallback (\ s a -> s{_pllCallback = a})
instance GoogleRequest ProjectsLocationsList where
type Rs ProjectsLocationsList = ListLocationsResponse
type Scopes ProjectsLocationsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsList'{..}
= go _pllName _pllXgafv _pllUploadProtocol
_pllAccessToken
_pllUploadType
_pllFilter
_pllPageToken
_pllPageSize
_pllCallback
(Just AltJSON)
redisService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsListResource)
mempty
|
brendanhay/gogol
|
gogol-redis/gen/Network/Google/Resource/Redis/Projects/Locations/List.hs
|
mpl-2.0
| 6,113
| 0
| 19
| 1,442
| 962
| 556
| 406
| 133
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.InstanceGroupManagers.ApplyUpdatesToInstances
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Applies changes to selected instances on the managed instance group.
-- This method can be used to apply new overrides and\/or new versions.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instanceGroupManagers.applyUpdatesToInstances@.
module Network.Google.Resource.Compute.InstanceGroupManagers.ApplyUpdatesToInstances
(
-- * REST Resource
InstanceGroupManagersApplyUpdatesToInstancesResource
-- * Creating a Request
, instanceGroupManagersApplyUpdatesToInstances
, InstanceGroupManagersApplyUpdatesToInstances
-- * Request Lenses
, igmautiProject
, igmautiInstanceGroupManager
, igmautiZone
, igmautiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instanceGroupManagers.applyUpdatesToInstances@ method which the
-- 'InstanceGroupManagersApplyUpdatesToInstances' request conforms to.
type InstanceGroupManagersApplyUpdatesToInstancesResource
=
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instanceGroupManagers" :>
Capture "instanceGroupManager" Text :>
"applyUpdatesToInstances" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
InstanceGroupManagersApplyUpdatesRequest
:> Post '[JSON] Operation
-- | Applies changes to selected instances on the managed instance group.
-- This method can be used to apply new overrides and\/or new versions.
--
-- /See:/ 'instanceGroupManagersApplyUpdatesToInstances' smart constructor.
data InstanceGroupManagersApplyUpdatesToInstances =
InstanceGroupManagersApplyUpdatesToInstances'
{ _igmautiProject :: !Text
, _igmautiInstanceGroupManager :: !Text
, _igmautiZone :: !Text
, _igmautiPayload :: !InstanceGroupManagersApplyUpdatesRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InstanceGroupManagersApplyUpdatesToInstances' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igmautiProject'
--
-- * 'igmautiInstanceGroupManager'
--
-- * 'igmautiZone'
--
-- * 'igmautiPayload'
instanceGroupManagersApplyUpdatesToInstances
:: Text -- ^ 'igmautiProject'
-> Text -- ^ 'igmautiInstanceGroupManager'
-> Text -- ^ 'igmautiZone'
-> InstanceGroupManagersApplyUpdatesRequest -- ^ 'igmautiPayload'
-> InstanceGroupManagersApplyUpdatesToInstances
instanceGroupManagersApplyUpdatesToInstances pIgmautiProject_ pIgmautiInstanceGroupManager_ pIgmautiZone_ pIgmautiPayload_ =
InstanceGroupManagersApplyUpdatesToInstances'
{ _igmautiProject = pIgmautiProject_
, _igmautiInstanceGroupManager = pIgmautiInstanceGroupManager_
, _igmautiZone = pIgmautiZone_
, _igmautiPayload = pIgmautiPayload_
}
-- | Project ID for this request.
igmautiProject :: Lens' InstanceGroupManagersApplyUpdatesToInstances Text
igmautiProject
= lens _igmautiProject
(\ s a -> s{_igmautiProject = a})
-- | The name of the managed instance group, should conform to RFC1035.
igmautiInstanceGroupManager :: Lens' InstanceGroupManagersApplyUpdatesToInstances Text
igmautiInstanceGroupManager
= lens _igmautiInstanceGroupManager
(\ s a -> s{_igmautiInstanceGroupManager = a})
-- | The name of the zone where the managed instance group is located. Should
-- conform to RFC1035.
igmautiZone :: Lens' InstanceGroupManagersApplyUpdatesToInstances Text
igmautiZone
= lens _igmautiZone (\ s a -> s{_igmautiZone = a})
-- | Multipart request metadata.
igmautiPayload :: Lens' InstanceGroupManagersApplyUpdatesToInstances InstanceGroupManagersApplyUpdatesRequest
igmautiPayload
= lens _igmautiPayload
(\ s a -> s{_igmautiPayload = a})
instance GoogleRequest
InstanceGroupManagersApplyUpdatesToInstances
where
type Rs InstanceGroupManagersApplyUpdatesToInstances
= Operation
type Scopes
InstanceGroupManagersApplyUpdatesToInstances
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
InstanceGroupManagersApplyUpdatesToInstances'{..}
= go _igmautiProject _igmautiZone
_igmautiInstanceGroupManager
(Just AltJSON)
_igmautiPayload
computeService
where go
= buildClient
(Proxy ::
Proxy
InstanceGroupManagersApplyUpdatesToInstancesResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/InstanceGroupManagers/ApplyUpdatesToInstances.hs
|
mpl-2.0
| 5,677
| 0
| 18
| 1,249
| 550
| 327
| 223
| 100
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Genomics.Annotations.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes an annotation. Caller must have WRITE permission for the
-- associated annotation set.
--
-- /See:/ <https://cloud.google.com/genomics Genomics API Reference> for @genomics.annotations.delete@.
module Network.Google.Resource.Genomics.Annotations.Delete
(
-- * REST Resource
AnnotationsDeleteResource
-- * Creating a Request
, annotationsDelete
, AnnotationsDelete
-- * Request Lenses
, adXgafv
, adUploadProtocol
, adPp
, adAccessToken
, adUploadType
, adBearerToken
, adAnnotationId
, adCallback
) where
import Network.Google.Genomics.Types
import Network.Google.Prelude
-- | A resource alias for @genomics.annotations.delete@ method which the
-- 'AnnotationsDelete' request conforms to.
type AnnotationsDeleteResource =
"v1" :>
"annotations" :>
Capture "annotationId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes an annotation. Caller must have WRITE permission for the
-- associated annotation set.
--
-- /See:/ 'annotationsDelete' smart constructor.
data AnnotationsDelete = AnnotationsDelete'
{ _adXgafv :: !(Maybe Xgafv)
, _adUploadProtocol :: !(Maybe Text)
, _adPp :: !Bool
, _adAccessToken :: !(Maybe Text)
, _adUploadType :: !(Maybe Text)
, _adBearerToken :: !(Maybe Text)
, _adAnnotationId :: !Text
, _adCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotationsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'adXgafv'
--
-- * 'adUploadProtocol'
--
-- * 'adPp'
--
-- * 'adAccessToken'
--
-- * 'adUploadType'
--
-- * 'adBearerToken'
--
-- * 'adAnnotationId'
--
-- * 'adCallback'
annotationsDelete
:: Text -- ^ 'adAnnotationId'
-> AnnotationsDelete
annotationsDelete pAdAnnotationId_ =
AnnotationsDelete'
{ _adXgafv = Nothing
, _adUploadProtocol = Nothing
, _adPp = True
, _adAccessToken = Nothing
, _adUploadType = Nothing
, _adBearerToken = Nothing
, _adAnnotationId = pAdAnnotationId_
, _adCallback = Nothing
}
-- | V1 error format.
adXgafv :: Lens' AnnotationsDelete (Maybe Xgafv)
adXgafv = lens _adXgafv (\ s a -> s{_adXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
adUploadProtocol :: Lens' AnnotationsDelete (Maybe Text)
adUploadProtocol
= lens _adUploadProtocol
(\ s a -> s{_adUploadProtocol = a})
-- | Pretty-print response.
adPp :: Lens' AnnotationsDelete Bool
adPp = lens _adPp (\ s a -> s{_adPp = a})
-- | OAuth access token.
adAccessToken :: Lens' AnnotationsDelete (Maybe Text)
adAccessToken
= lens _adAccessToken
(\ s a -> s{_adAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
adUploadType :: Lens' AnnotationsDelete (Maybe Text)
adUploadType
= lens _adUploadType (\ s a -> s{_adUploadType = a})
-- | OAuth bearer token.
adBearerToken :: Lens' AnnotationsDelete (Maybe Text)
adBearerToken
= lens _adBearerToken
(\ s a -> s{_adBearerToken = a})
-- | The ID of the annotation to be deleted.
adAnnotationId :: Lens' AnnotationsDelete Text
adAnnotationId
= lens _adAnnotationId
(\ s a -> s{_adAnnotationId = a})
-- | JSONP
adCallback :: Lens' AnnotationsDelete (Maybe Text)
adCallback
= lens _adCallback (\ s a -> s{_adCallback = a})
instance GoogleRequest AnnotationsDelete where
type Rs AnnotationsDelete = Empty
type Scopes AnnotationsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/genomics"]
requestClient AnnotationsDelete'{..}
= go _adAnnotationId _adXgafv _adUploadProtocol
(Just _adPp)
_adAccessToken
_adUploadType
_adBearerToken
_adCallback
(Just AltJSON)
genomicsService
where go
= buildClient
(Proxy :: Proxy AnnotationsDeleteResource)
mempty
|
rueshyna/gogol
|
gogol-genomics/gen/Network/Google/Resource/Genomics/Annotations/Delete.hs
|
mpl-2.0
| 5,315
| 0
| 18
| 1,325
| 857
| 498
| 359
| 122
| 1
|
module Utils.Zip where
import qualified Codec.Archive.Zip as Zip
import qualified Network.HTTP.Client as Client
import Utils.Http as Http
extract :: FilePath -> Http.Handler ()
extract destination request manager =
let
zipOpts = [Zip.OptDestination destination]
in do
response <- Client.httpLbs request manager
let archive = Zip.toArchive (Client.responseBody response)
Zip.extractFilesFromArchive zipOpts archive
|
johnpmayer/elm-protoc
|
app/Utils/Zip.hs
|
agpl-3.0
| 439
| 0
| 15
| 74
| 124
| 66
| 58
| 12
| 1
|
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Main where
import Proto.Tools.BuildDefs.Haskell.Tests.Proto.Generated
import Data.ProtoLens (defMessage)
main = print (defMessage :: Generated)
|
google/cabal2bazel
|
bzl/tests/proto/GeneratedTest.hs
|
apache-2.0
| 742
| 0
| 6
| 117
| 51
| 38
| 13
| 4
| 1
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-
Copyright 2016 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.GameServer
( ServerState
, initGameServer
, gameServer
, gameStats
) where
import CodeWorld.Message
import Data.Char (isPunctuation, isSpace)
import Data.Monoid ((<>), mappend)
import Data.Text (Text)
import Data.List (find)
import Control.Exception (finally)
import Control.Monad
import Control.Concurrent
import Data.Time.Clock
import GHC.Generics
import GHC.Stats
import Data.Aeson
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Network.WebSockets as WS
import Snap.Core (MonadSnap, writeLBS, modifyResponse, setHeader, extendTimeout)
import Network.WebSockets.Snap
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as HM
import System.Random
import Text.Read
import Control.Monad.IO.Class
import Control.Applicative
-- Server state
type Key = (Signature, GameId)
data Game = Game
{ numPlayers :: Int
, gameKey :: Key
, gameState :: GameState
, players :: [(PlayerId, WS.Connection)]
, gameEventCount :: !Int -- ^ counts only broadcasted game message
, gameEventSize :: !Int
}
data GameState = Waiting | Running
type Games = HM.HashMap Key (MVar Game)
data ServerState = ServerState
{ games :: MVar Games
, totalStats :: MVar TotalStats
, started :: UTCTime
}
-- Server state manipulation
randomGameId :: IO GameId
randomGameId = T.pack <$> replicateM 4 (randomRIO ('A', 'Z'))
freshGame :: ServerState -> Int -> Signature -> IO (GameId, MVar Game)
freshGame state playerCount sig = modifyMVar (games state) go
where
go games = do
gid <- randomGameId
if (sig, gid) `HM.member` games
then go games
else do
let game = Game
{ numPlayers = playerCount
, gameKey = (sig, gid)
, gameState = Waiting
, players = []
, gameEventCount = 0
, gameEventSize = 0
}
gameMV <- newMVar game
return (HM.insert (sig, gid) gameMV games, (gid, gameMV))
joinGame :: WS.Connection -> MVar Game -> IO (Maybe PlayerId)
joinGame conn gameMV = modifyMVar gameMV $ \game -> case game of
Game { gameState = Waiting } | length (players game) < numPlayers game ->
let Just pid = find (`notElem` map fst (players game)) [0..] -- fill holes
game' = game { players = (pid, conn) : players game }
in return (game', Just pid)
_ -> return (game, Nothing)
tryStartGame :: MVar Game -> IO Bool
tryStartGame gameMV = modifyMVar gameMV $ \game -> case game of
Game { gameState = Waiting } | length (players game) == numPlayers game ->
return (game { gameState = Running } , True)
_ -> return (game, False)
getPlayers :: MVar Game -> IO [WS.Connection]
getPlayers gameMVar = map snd . players <$> readMVar gameMVar
getStats :: MVar Game -> IO (Int, Int)
getStats gameMVar = go <$> readMVar gameMVar
where go game = (length (players game), numPlayers game)
cleanup :: MVar Game -> PlayerId -> ServerState -> IO ()
cleanup gameMV mypid state = do
done <- modifyMVar gameMV go
if done
then do
game <- readMVar gameMV
let key = gameKey game
modifyMVar_ (games state) $ return . HM.delete key
modifyMVar_ (totalStats state) $ \ts -> return $!
ts { totalEventCount = totalEventCount ts + gameEventCount game
, totalEventSize = totalEventSize ts + gameEventSize game
}
else do
announcePlayers gameMV
where
go g = let players' = filter ((/= mypid) . fst) (players g)
in return $ (g { players = players' }, null players')
-- Communication
sendServerMessage :: ServerMessage -> WS.Connection -> IO ()
sendServerMessage msg conn = WS.sendTextData conn (T.pack (show msg))
getClientMessage :: WS.Connection -> IO ClientMessage
getClientMessage conn = do
msg <- WS.receiveData conn
case readMaybe (T.unpack msg) of
Just msg -> return msg
Nothing -> fail "Invalid client message"
broadcast :: ServerMessage -> MVar Game -> IO ()
broadcast msg gameMV = do
let !msg_txt = T.encodeUtf8 (T.pack (show msg))
withMVar gameMV $ \game ->
forM_ (players game) (\(_,conn) -> WS.sendTextData conn msg_txt)
modifyMVar_ gameMV $ \game -> return $!
game { gameEventCount = gameEventCount game + 1
, gameEventSize = gameEventSize game + BS.length msg_txt
}
-- Statistics
data CurrentStats = CurrentStats
{ waitingGames :: !Int
, runningGames :: !Int
, connections :: !Int
} deriving (Show, Generic)
instance ToJSON CurrentStats
data TotalStats = TotalStats
{ totalConnections :: !Int
, totalGames :: !Int
, totalEventCount :: !Int
, totalEventSize :: !Int
} deriving (Show, Generic)
instance ToJSON TotalStats
tickConnection :: ServerState -> IO ()
tickConnection state = modifyMVar_ (totalStats state) $ \ts ->
return $! ts { totalConnections = totalConnections ts + 1}
tickGame :: ServerState -> IO ()
tickGame state = modifyMVar_ (totalStats state) $ \ts ->
return $! ts { totalGames = totalGames ts + 1}
deriving instance Generic GCStats
instance ToJSON GCStats
data ServerStats = ServerStats CurrentStats TotalStats GCStats
-- | merge the fields of 'CurrentStats' and 'TotalStats'
instance ToJSON ServerStats where
toJSON (ServerStats cs ts gs) = Object (o1 <> o2 <> o3)
where Object o1 = toJSON cs
Object o2 = toJSON ts
Object o3 = object [ "mem" .= toJSON gs ]
allGames :: ServerState -> IO [Game]
allGames state = do
gm <- readMVar (games state)
mapM readMVar $ HM.elems gm
tally :: [Game] -> CurrentStats
tally games = CurrentStats {..}
where
waitingGames = length [ () | Game { gameState = Waiting {}} <- games ]
runningGames = length [ () | Game { gameState = Running {}} <- games ]
connections = sum [ length (players g) | g <- games ]
gameStats :: MonadSnap m => ServerState -> m ()
gameStats state = do
cs <- tally <$> liftIO (allGames state)
ts <- liftIO $ readMVar (totalStats state)
gs <- liftIO $ getGCStats
let stats = ServerStats cs ts gs
modifyResponse $ setHeader "Content-Type" "application/json"
writeLBS (encode stats)
-- Handling logic
-- | Initializes the mutable state of the game server
initGameServer :: IO ServerState
initGameServer = do
started <- getCurrentTime
totalStats <- newMVar (TotalStats 0 0 0 0)
games <- newMVar HM.empty
return $ ServerState {..}
-- | A snap handler
gameServer :: MonadSnap m => ServerState -> m ()
gameServer state = do
-- extendTimeout 36000
runWebSocketsSnap (wsApp state)
wsApp :: ServerState -> WS.ServerApp
wsApp state pending = do
conn <- WS.acceptRequest pending
WS.forkPingThread conn 30
welcome conn state
welcome :: WS.Connection -> ServerState -> IO ()
welcome conn state = do
tickConnection state
msg <- getClientMessage conn
case msg of NewGame n sig -> welcomeNew conn state n sig
JoinGame gid sig -> welcomeJoin conn state gid sig
welcomeNew :: WS.Connection -> ServerState -> Int -> Signature -> IO ()
welcomeNew conn state n sig = do
tickGame state
(gid, gameMV) <- freshGame state n sig
Just pid <- joinGame conn gameMV
sendServerMessage (JoinedAs pid gid) conn
announcePlayers gameMV
talk pid conn gameMV `finally` cleanup gameMV pid state
findGame :: ServerState -> GameId -> Signature -> IO (MVar Game)
findGame state gid "BOT" = do
games <- readMVar (games state)
(gameMV:_) <- return $ [ gameMV | ((_, gid), gameMV) <- HM.toList games ]
return gameMV
findGame state gid sig = do
Just gameMV <- HM.lookup (sig, gid) <$> readMVar (games state)
return gameMV
welcomeJoin :: WS.Connection -> ServerState -> GameId -> Signature -> IO ()
welcomeJoin conn state gid sig = do
gameMV <- findGame state gid sig
Just pid <- joinGame conn gameMV
sendServerMessage (JoinedAs pid gid) conn
announcePlayers gameMV
talk pid conn gameMV `finally` cleanup gameMV pid state
announcePlayers :: MVar Game -> IO ()
announcePlayers gameMV = do
(n, m) <- getStats gameMV
started <- tryStartGame gameMV
broadcast (if started then Started else PlayersWaiting n m) gameMV
talk :: PlayerId -> WS.Connection -> MVar Game -> IO ()
talk pid conn gameMV = forever $ getClientMessage conn >>= \case
InEvent e -> do
g <- readMVar gameMV
case g of
Game { gameState = Running, ..} -> broadcast (OutEvent pid e) gameMV
_ -> return ()
|
nomeata/codeworld
|
codeworld-game-server/src/CodeWorld/GameServer.hs
|
apache-2.0
| 9,778
| 0
| 19
| 2,507
| 3,004
| 1,531
| 1,473
| 233
| 2
|
{- |
Module : Cantor.Parser.AST
Copyright : Copyright (C) 2014 Krzysztof Langner
License : BSD3
Maintainer : Krzysztof Langner <klangner@gmail.com>
Stability : alpha
Portability : portable
AST model for java source code
-}
module Cantor.Parser.AST ( Class
, Function
, ImportDecl
, Package
, addImports
, importPkgPath
, importClass
, mkClass
, mkImportDecl
, mkPackage
, packageDir
, packageClasses
, packageImports
, packageName ) where
import Cantor.Utils.List (splitByLast)
-- | some helper types
type PackagePath = String
type Name = String
-- | Package
data Package = Package { packageName :: String
, packageImports :: [ImportDecl]
, packageClasses :: [Class] }
-- | Import declaration with package name and class name
data ImportDecl = ImportDecl { importPkgPath :: PackagePath
, importClass :: Name } deriving (Eq, Show)
data Class = Class Name [Function] deriving (Eq, Show)
data Function = Function Name deriving (Eq, Show)
-- | Create new package with given name
mkPackage :: String -> Package
mkPackage name = Package name [] []
-- | Add import declarations to the package
addImports :: Package -> [ImportDecl] -> Package
addImports (Package name is cs) xs = Package name (is++xs) cs
-- | Get package path from
packageDir :: Package -> FilePath
packageDir (Package a _ _) = map f a
where f x | x == '.' = '/'
| otherwise = x
-- | Create import decalration
mkImportDecl :: PackagePath -> ImportDecl
mkImportDecl path = ImportDecl pkg cls
where (pkg, cls) = splitByLast "." path
-- | Create class
mkClass :: Name -> Class
mkClass name = Class name []
|
klangner/cantor
|
src/Cantor/Parser/AST.hs
|
bsd-2-clause
| 2,020
| 0
| 10
| 719
| 397
| 225
| 172
| 37
| 1
|
{-# LANGUAGE NumDecimals #-}
import Gauge.Main
import Data.Default.Class
import qualified Data.Vector.Unboxed as U
import Text.Printf
import System.Random (randomIO)
import qualified Numeric.Sum as Sum
import Numeric.SpecFunctions
import Numeric.Polynomial
import Numeric.RootFinding
-- Uniformly sample logGamma performance between 10^-6 to 10^6
benchmarkLogGamma logG =
[ bench (printf "%.3g" x) $ nf logG x
| x <- [ m * 10**n | n <- [ -8 .. 8 ]
, m <- [ 10**(i / tics) | i <- [0 .. tics-1] ]
]
]
where tics = 3
{-# INLINE benchmarkLogGamma #-}
-- Power of polynomial to be evaluated (In other words length of coefficients vector)
coef_size :: [Int]
coef_size = [ 1,2,3,4,5,6,7,8,9
, 10, 30
, 100, 300
, 1000, 3000
, 10000, 30000
]
{-# INLINE coef_size #-}
-- Precalculated coefficients
coef_list :: [U.Vector Double]
coef_list = [ U.replicate n 1.2 | n <- coef_size]
{-# NOINLINE coef_list #-}
main :: IO ()
main = do
v <- U.replicateM 1e6 randomIO :: IO (U.Vector Double)
defaultMain
[ bgroup "logGamma" $
benchmarkLogGamma logGamma
, bgroup "incompleteGamma" $
[ bench (show p) $ nf (incompleteGamma p) p
| p <- [ 0.1
, 1, 3
, 10, 30
, 100, 300
, 999, 1000
]
]
, bgroup "factorial"
[ bench (show n) $ nf factorial n
| n <- [ 0, 1, 3, 6, 9, 11, 15
, 20, 30, 40, 50, 60, 70, 80, 90, 100
]
]
, bgroup "incompleteBeta"
[ bench (show (p,q,x)) $ nf (incompleteBeta p q) x
| (p,q,x) <- [ (10, 10, 0.5)
, (101, 101, 0.5)
, (1010, 1010, 0.5)
, (10100, 10100, 0.5)
, (100100, 100100, 0.5)
, (1001000, 1001000, 0.5)
, (10010000,10010000,0.5)
]
]
, bgroup "log1p"
[ bench (show x) $ nf log1p x
| x <- [ -0.9
, -0.5
, -0.1
, 0.1
, 0.5
, 1
, 10
, 100
] :: [Double]
]
, bgroup "sinc" $
bench "sin" (nf sin (0.55 :: Double))
: [ bench (show x) $ nf sinc x
| x <- [0, 1e-6, 1e-3, 0.5]
]
, bgroup "erf & Co"
[ bgroup "erf"
[ bench (show x) $ nf erf x
| x <- [0, 1.1, 100, 1000]
]
, bgroup "erfc"
[ bench (show x) $ nf erfc x
| x <- [0, 1.1, 100, 1000]
]
, bgroup "invErfc"
[ bench (show x) $ nf erfc x
| x <- [1e-9, 1e-6, 1e-3, 0.1, 1]
]
]
, bgroup "expm1"
[ bench (show x) $ nf expm1 (x :: Double)
| x <- [-0.1, 0, 1, 19]
]
, bgroup "poly"
$ [ bench ("vector_"++show (U.length coefs)) $ nf (\x -> evaluatePolynomial x coefs) (1 :: Double)
| coefs <- coef_list ]
++ [ bench ("unpacked_"++show n) $ nf (\x -> evaluatePolynomialL x (map fromIntegral [1..n])) (1 :: Double)
| n <- coef_size ]
, bgroup "RootFinding"
[ bench "ridders sin" $ nf (ridders def (0,pi/2)) (\x -> sin x - 0.525)
, bench "newton sin" $ nf (newtonRaphson def (0,1.2,pi/2)) (\x -> (sin x - 0.525,cos x))
]
, bgroup "Sum"
[ bench "naive" $ whnf U.sum v
, bench "kahan" $ whnf (Sum.sumVector Sum.kahan) v
, bench "kbn" $ whnf (Sum.sumVector Sum.kbn) v
, bench "kb2" $ whnf (Sum.sumVector Sum.kb2) v
]
]
|
Shimuuar/math-functions
|
bench/bench.hs
|
bsd-2-clause
| 3,673
| 0
| 19
| 1,460
| 1,363
| 751
| 612
| 93
| 1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE PatternGuards #-}
module Main where
import Control.Applicative
import Control.Monad (guard)
import Control.Lens
import Data.List (nub)
import Data.Map as M
import Data.Maybe (fromMaybe)
import Data.Vector as B
import Data.Vector.Unboxed as U
import Data.Vector.Hybrid as H
import Data.Vector.Generic as G
import Data.Word
import Instances
import Sparse.Matrix as SM
import Test.Framework.TH
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
import Test.QuickCheck.Function
import Linear
-- model for matrix multiplication
type Linear a = Map Word (Map Word a)
nonEmpty :: Lens' (Maybe (Map k Int)) (Map k Int)
nonEmpty f m = f (fromMaybe M.empty m) <&> \ m -> m <$ guard (not (M.null m))
-- | matrix multiplication in linear will leave empty maps inside the outer map in sparse multiplication
sane :: Linear Int -> Linear Int
sane = M.filter (not . M.null)
toLinear :: Mat Int -> Linear Int
toLinear = sane . H.foldr (\(k,v) r -> r & at (k^._1) . nonEmpty . at (k^._2) ?~ v) M.empty . view _Mat
fromLinear :: Linear Int -> Mat Int
fromLinear m = SM.fromList $ do
(i, n) <- M.toList m
(j, a) <- M.toList n
return (Key i j, a)
prop_to_from x = toLinear (fromLinear x) == sane x
prop_from_to x = fromLinear (toLinear x) == x
prop_model :: Mat Int -> Mat Int -> Gen Prop
prop_model x y | z <- x * y, z' <- fromLinear (toLinear x !*! toLinear y)
= label (show z Prelude.++ " == " Prelude.++ show z') (z == z')
main = $defaultMainGenerator
|
ekmett/sparse
|
tests/properties.hs
|
bsd-2-clause
| 1,514
| 0
| 16
| 270
| 595
| 316
| 279
| -1
| -1
|
module Data.Text.Prettyprint.Doc {-# DEPRECATED "Use \"Prettyprinter\" instead." #-} (
module Prettyprinter
) where
import Prettyprinter
|
quchen/prettyprinter
|
prettyprinter/src/Data/Text/Prettyprint/Doc.hs
|
bsd-2-clause
| 142
| 0
| 4
| 20
| 18
| 13
| 5
| 3
| 0
|
{-# LANGUAGE CPP, TypeOperators,
FlexibleInstances,
ScopedTypeVariables,
MultiParamTypeClasses,
KindSignatures #-}
{- 2012 Joel Svensson -}
module Intel.ArBB.Vector where
import qualified Data.Vector.Storable as V
import qualified Data.Vector.Storable.Mutable as M
import Intel.ArBB.Data -- Embeddable
import Intel.ArBB.Data.Int
import Intel.ArBB.Types
import Intel.ArBB.Syntax
import Intel.ArBB.IsScalar
----------------------------------------------------------------------------
-- Dense Vectors
data DVector d a = DVector { dVectorData :: V.Vector a -- dVectorID :: Integer,
, dVectorShape :: d } --Dim}
mkDVector :: Dimensions t => V.Vector a -> t -> DVector t a
mkDVector v d = DVector v d
data Dim = Dim {dimList :: [Int]}
dimensions = length . dimList
-- In ArBB the order seems to be Pages, Rows, Cols.
-- so Z:.Cols:.Rows:.Pages in EmbArBB.
-- Does that make sense ?
class Dimensions a where
toDim :: a -> Dim
fromDim :: Dim -> a
instance Dimensions Z where
toDim Z = Dim []
fromDim (Dim []) = Z
instance Dimensions t => Dimensions (t :. Int) where
toDim (is :. i) = Dim (i:is')
where (Dim is') = toDim is
fromDim (Dim (i:is)) = fromDim (Dim is) :. i
-- | Encode Dimensionality in the type of vectors
data a :. b = a :. b
infixl :.
data Z = Z
type Dim0 = Z
type Dim1 = Dim0 :. Int
type Dim2 = Dim1 :. Int
type Dim3 = Dim2 :. Int
-- | Easy to use names.
--type Scalar = DVector Dim0 -- This or the next one?
type Vector0D = DVector Dim0 -- hmm nice ?
type Vector = DVector Dim1
type Vector2D = DVector Dim2
type Vector3D = DVector Dim3
----------------------------------------------------------------------------
-- Nested Vectors
data NVector a = NVector { nVectorData :: V.Vector a
, nVectorNesting :: V.Vector USize }
----------------------------------------------------------------------------
-- Data Instances..
unS (Scalar a) = a
#define ContainerOfScal(t,mod) instance IsScalar a => Data (t) where { \
typeOf _ = mod (unS (scalarType (undefined :: a))); \
sizeOf _ = undefined}
ContainerOfScal(DVector Dim0 a,Scalar)
ContainerOfScal(DVector Dim1 a,Dense I)
ContainerOfScal(DVector Dim2 a ,Dense II)
ContainerOfScal(DVector Dim3 a ,Dense III)
ContainerOfScal(NVector a, Nested)
----------------------------------------------------------------------------
-- IsVector
class IsVector (t :: * -> *) e
instance IsScalar a => IsVector NVector a
-- Zero dimensional DVectors are not Vectors.
instance (Dimensions t, IsScalar a) => IsVector (DVector (t:.Int)) a
----------------------------------------------------------------------------
-- Show
instance (M.Storable a, Show a) => Show (DVector t a) where
show (DVector dat t) = show dat
-- TODO: show based on the dimensions. (so a matrix appears as a matrix)
|
svenssonjoel/EmbArBB
|
Intel/ArBB/Vector.hs
|
bsd-3-clause
| 3,089
| 0
| 10
| 775
| 702
| 386
| 316
| -1
| -1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Stack.PackageDump
( Line
, eachSection
, eachPair
, DumpPackage (..)
, conduitDumpPackage
, ghcPkgDump
, InstalledCache
, InstalledCacheEntry (..)
, newInstalledCache
, loadInstalledCache
, saveInstalledCache
, addProfiling
, addHaddock
, sinkMatching
, pruneDeps
) where
import Control.Applicative
import Control.Exception.Enclosed (tryIO)
import Control.Monad (when, liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.Trans.Control
import Data.Attoparsec.Args
import Data.Attoparsec.Text as P
import Data.Binary (Binary)
import Data.Binary.VersionTagged (taggedDecodeOrLoad, taggedEncodeFile, BinarySchema (..))
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import Data.Either (partitionEithers)
import qualified Data.Foldable as F
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import qualified Data.Set as Set
import qualified Data.Text.Encoding as T
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Path
import Prelude -- Fix AMP warning
import Stack.GhcPkg
import Stack.Types
import System.Directory (createDirectoryIfMissing, getDirectoryContents, doesFileExist)
import System.Process.Read
-- | Cached information on whether package have profiling libraries and haddocks.
newtype InstalledCache = InstalledCache (IORef InstalledCacheInner)
newtype InstalledCacheInner = InstalledCacheInner (Map GhcPkgId InstalledCacheEntry)
deriving Binary
instance BinarySchema InstalledCacheInner where
-- Don't forget to update this if you change the datatype in any way!
binarySchema _ = 1
-- | Cached information on whether a package has profiling libraries and haddocks.
data InstalledCacheEntry = InstalledCacheEntry
{ installedCacheProfiling :: !Bool
, installedCacheHaddock :: !Bool }
deriving (Eq, Generic)
instance Binary InstalledCacheEntry
-- | Call ghc-pkg dump with appropriate flags and stream to the given @Sink@, for a single database
ghcPkgDump
:: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m)
=> EnvOverride
-> Maybe (Path Abs Dir) -- ^ if Nothing, use global
-> Sink ByteString IO a
-> m a
ghcPkgDump menv mpkgDb sink = do
F.mapM_ (createDatabase menv) mpkgDb -- TODO maybe use some retry logic instead?
a <- sinkProcessStdout Nothing menv "ghc-pkg" args sink
return a
where
args = concat
[ case mpkgDb of
Nothing -> ["--global", "--no-user-package-db"]
Just pkgdb -> ["--user", "--no-user-package-db", "--package-db", toFilePath pkgdb]
, ["dump", "--expand-pkgroot"]
]
-- | Create a new, empty @InstalledCache@
newInstalledCache :: MonadIO m => m InstalledCache
newInstalledCache = liftIO $ InstalledCache <$> newIORef (InstalledCacheInner Map.empty)
-- | Load a @InstalledCache@ from disk, swallowing any errors and returning an
-- empty cache.
loadInstalledCache :: MonadIO m => Path Abs File -> m InstalledCache
loadInstalledCache path = do
m <- taggedDecodeOrLoad (toFilePath path) (return $ InstalledCacheInner Map.empty)
liftIO $ fmap InstalledCache $ newIORef m
-- | Save a @InstalledCache@ to disk
saveInstalledCache :: MonadIO m => Path Abs File -> InstalledCache -> m ()
saveInstalledCache path (InstalledCache ref) = liftIO $ do
createDirectoryIfMissing True $ toFilePath $ parent path
readIORef ref >>= taggedEncodeFile (toFilePath path)
-- | Prune a list of possible packages down to those whose dependencies are met.
--
-- * id uniquely identifies an item
--
-- * There can be multiple items per name
pruneDeps
:: (Ord name, Ord id)
=> (id -> name) -- ^ extract the name from an id
-> (item -> id) -- ^ the id of an item
-> (item -> [id]) -- ^ get the dependencies of an item
-> (item -> item -> item) -- ^ choose the desired of two possible items
-> [item] -- ^ input items
-> Map name item
pruneDeps getName getId getDepends chooseBest =
Map.fromList
. (map $ \item -> (getName $ getId item, item))
. loop Set.empty Set.empty []
where
loop foundIds usedNames foundItems dps =
case partitionEithers $ map depsMet dps of
([], _) -> foundItems
(s', dps') ->
let foundIds' = Map.fromListWith chooseBest s'
foundIds'' = Set.fromList $ map getId $ Map.elems foundIds'
usedNames' = Map.keysSet foundIds'
foundItems' = Map.elems foundIds'
in loop
(Set.union foundIds foundIds'')
(Set.union usedNames usedNames')
(foundItems ++ foundItems')
(catMaybes dps')
where
depsMet dp
| name `Set.member` usedNames = Right Nothing
| all (`Set.member` foundIds) (getDepends dp) = Left (name, dp)
| otherwise = Right $ Just dp
where
id' = getId dp
name = getName id'
-- | Find the package IDs matching the given constraints with all dependencies installed.
-- Packages not mentioned in the provided @Map@ are allowed to be present too.
sinkMatching :: Monad m
=> Bool -- ^ require profiling?
-> Bool -- ^ require haddock?
-> Map PackageName Version -- ^ allowed versions
-> Consumer (DumpPackage Bool Bool)
m
(Map PackageName (DumpPackage Bool Bool))
sinkMatching reqProfiling reqHaddock allowed = do
dps <- CL.filter (\dp -> isAllowed (dpGhcPkgId dp) &&
(not reqProfiling || dpProfiling dp) &&
(not reqHaddock || dpHaddock dp))
=$= CL.consume
return $ pruneDeps
(packageIdentifierName . ghcPkgIdPackageIdentifier)
dpGhcPkgId
dpDepends
const -- Could consider a better comparison in the future
dps
where
isAllowed gid =
case Map.lookup name allowed of
Just version' | version /= version' -> False
_ -> True
where
PackageIdentifier name version = ghcPkgIdPackageIdentifier gid
-- | Add profiling information to the stream of @DumpPackage@s
addProfiling :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage Bool b)
addProfiling (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
p <- case Map.lookup gid m of
Just installed -> return (installedCacheProfiling installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let loop [] = return False
loop (dir:dirs) = do
econtents <- tryIO $ getDirectoryContents dir
let contents = either (const []) id econtents
if or [isProfiling content lib
| content <- contents
, lib <- dpLibraries dp
] && not (null contents)
then return True
else loop dirs
loop $ dpLibDirs dp
return dp { dpProfiling = p }
isProfiling :: FilePath -- ^ entry in directory
-> ByteString -- ^ name of library
-> Bool
isProfiling content lib =
prefix `S.isPrefixOf` S8.pack content
where
prefix = S.concat ["lib", lib, "_p"]
-- | Add haddock information to the stream of @DumpPackage@s
addHaddock :: MonadIO m
=> InstalledCache
-> Conduit (DumpPackage a b) m (DumpPackage a Bool)
addHaddock (InstalledCache ref) =
CL.mapM go
where
go dp = liftIO $ do
InstalledCacheInner m <- readIORef ref
let gid = dpGhcPkgId dp
h <- case Map.lookup gid m of
Just installed -> return (installedCacheHaddock installed)
Nothing | null (dpLibraries dp) -> return True
Nothing -> do
let loop [] = return False
loop (ifc:ifcs) = do
exists <- doesFileExist (S8.unpack ifc)
if exists
then return True
else loop ifcs
loop $ dpHaddockInterfaces dp
return dp { dpHaddock = h }
-- | Dump information for a single package
data DumpPackage profiling haddock = DumpPackage
{ dpGhcPkgId :: !GhcPkgId
, dpLibDirs :: ![FilePath]
, dpLibraries :: ![ByteString]
, dpDepends :: ![GhcPkgId]
, dpHaddockInterfaces :: ![ByteString]
, dpProfiling :: !profiling
, dpHaddock :: !haddock
}
deriving (Show, Eq, Ord)
data PackageDumpException
= MissingSingleField ByteString (Map ByteString [Line])
| MismatchedId PackageName Version GhcPkgId
| Couldn'tParseField ByteString [Line]
deriving Typeable
instance Exception PackageDumpException
instance Show PackageDumpException where
show (MissingSingleField name values) = unlines $ concat
[ return $ concat
[ "Expected single value for field name "
, show name
, " when parsing ghc-pkg dump output:"
]
, map (\(k, v) -> " " ++ show (k, v)) (Map.toList values)
]
show (MismatchedId name version gid) =
"Invalid id/name/version in ghc-pkg dump output: " ++
show (name, version, gid)
show (Couldn'tParseField name ls) =
"Couldn't parse the field " ++ show name ++ " from lines: " ++ show ls
-- | Convert a stream of bytes into a stream of @DumpPackage@s
conduitDumpPackage :: MonadThrow m
=> Conduit ByteString m (DumpPackage () ())
conduitDumpPackage = (=$= CL.catMaybes) $ eachSection $ do
pairs <- eachPair (\k -> (k, ) <$> CL.consume) =$= CL.consume
let m = Map.fromList pairs
let parseS k =
case Map.lookup k m of
Just [v] -> return v
_ -> throwM $ MissingSingleField k m
-- Can't fail: if not found, same as an empty list. See:
-- https://github.com/fpco/stack/issues/182
parseM k =
case Map.lookup k m of
Just vs -> vs
Nothing -> []
parseDepend :: MonadThrow m => ByteString -> m (Maybe GhcPkgId)
parseDepend "builtin_rts" = return Nothing
parseDepend bs =
liftM Just $ parseGhcPkgId bs'
where
(bs', _builtinRts) =
case stripSuffixBS " builtin_rts" bs of
Nothing ->
case stripPrefixBS "builtin_rts " bs of
Nothing -> (bs, False)
Just x -> (x, True)
Just x -> (x, True)
case Map.lookup "id" m of
Just ["builtin_rts"] -> return Nothing
_ -> do
name <- parseS "name" >>= parsePackageName
version <- parseS "version" >>= parseVersion
ghcPkgId <- parseS "id" >>= parseGhcPkgId
when (PackageIdentifier name version /= ghcPkgIdPackageIdentifier ghcPkgId)
$ throwM $ MismatchedId name version ghcPkgId
-- if a package has no modules, these won't exist
let libDirKey = "library-dirs"
libDirs = parseM libDirKey
libraries = parseM "hs-libraries"
haddockInterfaces = parseM "haddock-interfaces"
depends <- mapM parseDepend $ parseM "depends"
libDirPaths <-
case mapM (P.parseOnly (argsParser NoEscaping) . T.decodeUtf8) libDirs of
Left{} -> throwM (Couldn'tParseField libDirKey libDirs)
Right dirs -> return (concat dirs)
return $ Just DumpPackage
{ dpGhcPkgId = ghcPkgId
, dpLibDirs = libDirPaths
, dpLibraries = S8.words $ S8.unwords libraries
, dpDepends = catMaybes (depends :: [Maybe GhcPkgId])
, dpHaddockInterfaces = S8.words $ S8.unwords haddockInterfaces
, dpProfiling = ()
, dpHaddock = ()
}
stripPrefixBS :: ByteString -> ByteString -> Maybe ByteString
stripPrefixBS x y
| x `S.isPrefixOf` y = Just $ S.drop (S.length x) y
| otherwise = Nothing
stripSuffixBS :: ByteString -> ByteString -> Maybe ByteString
stripSuffixBS x y
| x `S.isSuffixOf` y = Just $ S.take (S.length y - S.length x) y
| otherwise = Nothing
-- | A single line of input, not including line endings
type Line = ByteString
-- | Apply the given Sink to each section of output, broken by a single line containing ---
eachSection :: Monad m
=> Sink Line m a
-> Conduit ByteString m a
eachSection inner =
CL.map (S.filter (/= _cr)) =$= CB.lines =$= start
where
_cr = 13
peekBS = await >>= maybe (return Nothing) (\bs ->
if S.null bs
then peekBS
else leftover bs >> return (Just bs))
start = peekBS >>= maybe (return ()) (const go)
go = do
x <- toConsumer $ takeWhileC (/= "---") =$= inner
yield x
CL.drop 1
start
-- | Grab each key/value pair
eachPair :: Monad m
=> (ByteString -> Sink Line m a)
-> Conduit Line m a
eachPair inner =
start
where
start = await >>= maybe (return ()) start'
_colon = 58
_space = 32
start' bs1 =
toConsumer (valSrc =$= inner key) >>= yield >> start
where
(key, bs2) = S.breakByte _colon bs1
(spaces, bs3) = S.span (== _space) $ S.drop 1 bs2
indent = S.length key + 1 + S.length spaces
valSrc
| S.null bs3 = noIndent
| otherwise = yield bs3 >> loopIndent indent
noIndent = do
mx <- await
case mx of
Nothing -> return ()
Just bs -> do
let (spaces, val) = S.span (== _space) bs
if S.length spaces == 0
then leftover val
else do
yield val
loopIndent (S.length spaces)
loopIndent i =
loop
where
loop = await >>= maybe (return ()) go
go bs
| S.length spaces == i && S.all (== _space) spaces =
yield val >> loop
| otherwise = leftover bs
where
(spaces, val) = S.splitAt i bs
-- | General purpose utility
takeWhileC :: Monad m => (a -> Bool) -> Conduit a m a
takeWhileC f =
loop
where
loop = await >>= maybe (return ()) go
go x
| f x = yield x >> loop
| otherwise = leftover x
|
hesselink/stack
|
src/Stack/PackageDump.hs
|
bsd-3-clause
| 15,600
| 0
| 27
| 5,098
| 3,972
| 2,032
| 1,940
| 357
| 8
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Traduisons.Types where
import Control.Monad.Except
import Control.Monad.Reader
import Control.Concurrent.MVar
import Data.Aeson
import Data.ByteString.Char8
import qualified Data.Map as M
-- | The 'Traduisons' Monad gives you a stateful way to interact with the
-- translation API.
newtype Traduisons a = Traduisons {
unTraduisons :: ReaderT TraduisonsState (ExceptT TraduisonsError IO) a }
deriving (Functor, Applicative, Monad, MonadIO, MonadReader TraduisonsState,
MonadError TraduisonsError)
-- | The failure modes and their descriptions.
data TraduisonsError = TErr TraduisonsErrorFlag String
deriving (Show, Eq)
-- | 'TraduisionsState' is an authentication token acquired from
-- the translation API. It's stored in an MVar as it will need to be updated
-- regularly.
type TraduisonsState = TokenRef
newtype TokenRef = TokenRef { unTokenRef :: MVar TokenData }
liftExceptT :: ExceptT TraduisonsError IO a -> Traduisons a
liftExceptT = Traduisons . lift
-- | Runs a traduisons action
runTraduisons :: TraduisonsState -> Traduisons a
-> IO (Either TraduisonsError a)
runTraduisons = flip $ (runExceptT .) . runReaderT . unTraduisons
instance Show TokenRef where
show = const "<TokenRef: API token reference>"
data TokenResponse = TokenResponse
{ trToken :: String -- ^ The auth token
, trScope :: String -- ^ The scope of the auth token. This limits capabilities
, trTokenType :: String
, trExpiresIn :: Seconds -- ^ The number of seconds until the token expires
} deriving (Show, Eq)
emptyTokenResponse :: TokenResponse
emptyTokenResponse = TokenResponse "" "" "" 0
data TokenData = TokenData
{ tdExpiresAt :: Seconds -- ^ The token's expiration time in epoch seconds
, tdToken :: TokenResponse -- ^ The auth token data
} deriving (Show, Eq)
emptyTokenData :: TokenData
emptyTokenData = TokenData 0 emptyTokenResponse
instance FromJSON TokenResponse where
parseJSON (Object o) = do
token <- o .: "access_token"
scope <- o .: "scope"
tokenType <- o .: "token_type"
expiry <- read <$> o .: "expires_in"
return $ TokenResponse token scope tokenType expiry
parseJSON _ = mzero
-- | A 'Message' represents a body of text in a known language
data Message = Message {msgLanguage :: Language, msgBody :: String}
deriving (Show, Eq)
mkMessage :: String -> String -> Message
mkMessage = Message . Language
newtype Language = Language {getLanguage :: String}
deriving (Show, Eq)
type Seconds = Integer
type FormData = [(ByteString, Maybe ByteString)]
type URL = String
-- | The application level state, independent of both the lower level
-- translation machinery and the higher level UI state
data AppState = AppState
{ asFromLang :: Language -- ^ The current input language
, asToLang :: Language -- ^ The target output language
, asHistory :: [(Command, Maybe Message)] -- ^ The collection of all inputs
-- and their translations thus far.
, asLanguageNameCodes :: M.Map String String
, asTraduisonsState :: TraduisonsState -- ^ The underlying translator's state
} deriving Show
instance Eq AppState where
AppState f l h _ _ == AppState f' l' h' _ _ = f == f' && l == l' && h == h'
-- | Operations that transform the 'AppState' in some way
data Command = SetFromLanguage String
| SetToLanguage String
| Translate String
| DetectLanguage String
| SwapLanguages
| Help
| Exit
deriving (Show, Eq)
{- Microsoft does not appear to have documented the possible exceptions that
the translator API might return, and they change them unannounced. -}
-- FIXME: We can get much better error handling if we translate an array of
-- text. See https://msdn.microsoft.com/en-us/library/ff512407.aspx
-- Also consider retrieiving multiple translations.
-- https://msdn.microsoft.com/en-us/library/ff512403.aspx
-- | Anticipated error modes
data TraduisonsErrorFlag = ArgumentOutOfRangeException
| CurlError
| TraduisonsExit
| TraduisonsHelp
| ArgumentException
| LanguageDetectionError
| UnknownError
| UnrecognizedJSONError
deriving (Show, Eq, Enum)
|
johntyree/traduisons-hs
|
src/Traduisons/Types.hs
|
bsd-3-clause
| 4,438
| 0
| 11
| 1,022
| 789
| 455
| 334
| 81
| 1
|
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.Core32
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions, tokens and types from the OpenGL 3.2 core, see
-- <http://www.opengl.org/registry/>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.Core32 (
-- * OpenGL 3.1 core
module Graphics.Rendering.OpenGL.Raw.Core31,
-- * Extensions which had some hidden entitites in OpenGL 3.1 core
module Graphics.Rendering.OpenGL.Raw.ARB.UniformBufferObject,
-- * Extensions promoted to OpenGL 3.2 core
module Graphics.Rendering.OpenGL.Raw.ARB.CreateContextProfile,
module Graphics.Rendering.OpenGL.Raw.ARB.DrawElementsBaseVertex,
module Graphics.Rendering.OpenGL.Raw.ARB.ProvokingVertex,
module Graphics.Rendering.OpenGL.Raw.ARB.SeamlessCubeMap,
module Graphics.Rendering.OpenGL.Raw.ARB.TextureMultisample,
module Graphics.Rendering.OpenGL.Raw.ARB.DepthClamp,
module Graphics.Rendering.OpenGL.Raw.ARB.GeometryShader4,
module Graphics.Rendering.OpenGL.Raw.ARB.Sync,
-- * Additional OpenGL 3.2 entities
glGetInteger64i_v
) where
import Graphics.Rendering.OpenGL.Raw.Core31
import Graphics.Rendering.OpenGL.Raw.ARB.UniformBufferObject
import Graphics.Rendering.OpenGL.Raw.ARB.CreateContextProfile
import Graphics.Rendering.OpenGL.Raw.ARB.DrawElementsBaseVertex
import Graphics.Rendering.OpenGL.Raw.ARB.ProvokingVertex
import Graphics.Rendering.OpenGL.Raw.ARB.SeamlessCubeMap
import Graphics.Rendering.OpenGL.Raw.ARB.TextureMultisample
import Graphics.Rendering.OpenGL.Raw.ARB.DepthClamp
import Graphics.Rendering.OpenGL.Raw.ARB.GeometryShader4
import Graphics.Rendering.OpenGL.Raw.ARB.Sync
import Foreign.Ptr
import Foreign.C.Types
import Graphics.Rendering.OpenGL.Raw.Extensions
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "OpenGL 3.2"
EXTENSION_ENTRY(dyn_glGetInteger64i_v,ptr_glGetInteger64i_v,"glGetInteger64i_v",glGetInteger64i_v,GLenum -> GLuint -> Ptr GLint64 -> IO ())
|
mfpi/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/Core32.hs
|
bsd-3-clause
| 2,292
| 1
| 11
| 230
| 292
| 218
| 74
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Graph where
import Data.Text (pack,unpack)
import Graphics.Blank
import Control.Arrow hiding (loop)
import System.Random
import GHC.Float
import LinReg
import Data.List
lsrf :: Int -> [(Float,Float)] -> [Float]
lsrf o l = map double2Float $ lsr o (map (float2Double *** float2Double) l)
sketch :: ((Float,Float) -> (Float,Float)) -> [(Float,Float)] -> Canvas ()
sketch f l = beginPath () >> moveTo (f $ head l) >> mapM_ (lineTo . f) l
dot :: ((Float,Float) -> (Float,Float)) -> (Float,Float) -> Canvas ()
dot f p = do
save ()
beginPath ()
translate (f p)
arc (0, 0, 8, 0, 2 * pi, False)
strokeStyle "#2222DD"
lineWidth 4
stroke ()
restore ()
adjust :: (Float, Float) -> [(Float, Float)] -> (Float, Float) -> (Float, Float)
adjust (w,h) l = f
where
((lx,ly),(hx,hy)) = bounds l
dx = hx - lx
dy = hy - ly
f (x,y) = (w * (x - lx) / dx, h * (hy - y) / dy)
bounds :: [(Float,Float)] -> ((Float,Float),(Float,Float))
bounds = ((minimum *** minimum) &&& (maximum *** maximum)) . unzip
annealedData :: Float -> [Float] -> IO [(Float, Float)]
annealedData m constants = do
gen <- newStdGen
return $ zipWith f (randomRs (0,m) gen) (functionData constants)
where f a (x,y) = (x, a+y-(m/2))
functionData :: [Float] -> [(Float, Float)]
functionData cs = map (id &&& poly cs) [0..]
poly :: [Float] -> Float -> Float
poly cs x = sum $ zipWith (*) cs $ map (x**) [0..]
colors :: [String]
colors = cycle $ words "#CC3300 #CC9900 #99CC00 #33CC00 #00CC33 #00CC99 #0099CC #0033CC #470AFF #7547FF #C2FF0A"
main :: IO ()
main = blankCanvas 5001 $ draw
draw :: DeviceContext -> IO ()
draw context =
do points <- fmap (take 18) $ annealedData 5000 [0,8,-3,1,0,-0.1]
let screenSize@(w,_) = (width context,height context)
let adjuster = adjust screenSize (concat $ points : fits)
cs = flip map [2..8] $ \x -> lsrf x points
fits = map (\c -> map (id &&& poly c) (map fst points)) cs
-- send context $ clearRect (0,0,w,h)
flip mapM_ (take 3 $ zip4 [0..] cs colors fits) $ \(i, c, color, fit) -> do
send context $ do
strokeStyle (pack color)
sketch adjuster fit
lineWidth 4
stroke ()
save ()
fillStyle (pack color)
font "15pt arial"
fillText(pack(show c), w - 450, 50 + 30 * i)
restore ()
flip mapM_ points $ send context . dot adjuster
send context $ do
save ()
translate (-30, 100)
font "40pt arial"
fillText("CanvasGraph", 150, 100)
translate (10, 70)
font "30pt times"
fillText("Simple, Cross-Platform Graphing", 150, 100)
translate (0, 70)
font "30pt times"
fillText("From Haskell -> To Your Browser", 150, 100)
restore ()
|
sordina/CanvasGraph
|
Graph.hs
|
bsd-3-clause
| 3,038
| 0
| 18
| 940
| 1,338
| 698
| 640
| 75
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Test.Framework.TH (defaultMainGenerator)
import Test.HUnit
import Test.Framework.Providers.HUnit (testCase)
import Test.QuickCheck
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Euler.Problem16 (sumDigitsPowerOfTwo)
main = $(defaultMainGenerator)
case_small =
sumDigitsPowerOfTwo 15 @?= 26
|
FranklinChen/project-euler-haskell
|
Test/Test16.hs
|
bsd-3-clause
| 376
| 0
| 6
| 41
| 81
| 50
| 31
| 11
| 1
|
{-# LANGUAGE RecursiveDo #-}
module Main where
import LOGL.Application
import Foreign.Ptr
import Graphics.UI.GLFW as GLFW
import Graphics.Rendering.OpenGL.GL as GL hiding (normalize)
import Graphics.GLUtil
import System.FilePath
import Graphics.Rendering.OpenGL.GL.Shaders.ProgramObjects
import Linear.Matrix
import Linear.V3
import Linear.Vector
import Linear.Quaternion
import Linear.Projection
import Linear.Metric
import Reactive.Banana.Frameworks
import Reactive.Banana.Combinators hiding (empty)
import LOGL.FRP
import LOGL.Objects
cubePositions :: [V3 GLfloat]
cubePositions = [
V3 0.0 0.0 0.0,
V3 2.0 5.0 (-15.0),
V3 (-1.5) (-2.2) (-2.5),
V3 (-3.8) (-2.0) (-12.3),
V3 2.4 (-0.4) (-3.5),
V3 (-1.7) 3.0 (-7.5),
V3 1.3 (-2.0) (-2.5),
V3 1.5 2.0 (-2.5),
V3 1.5 0.2 (-1.5),
V3 (-1.3) 1.0 (-1.5)]
data Camera = Camera { pos :: V3 GLfloat,
front :: V3 GLfloat,
up :: V3 GLfloat,
lastFrame :: Double,
lastX :: GLfloat,
lastY :: GLfloat,
yaw :: GLfloat,
pitch :: GLfloat,
firstMouse :: Bool,
fov :: GLfloat}
deriving (Eq, Show)
main :: IO ()
main = do
GLFW.init
w <- createAppWindow 800 600 "LearnOpenGL"
setCursorInputMode (window w) CursorInputMode'Disabled
depthFunc $= Just Less
shader <- simpleShaderProgram ("data" </> "1_Getting-started" </> "6_Coordinate-systems" </> "coord-systems.vs")
("data" </> "1_Getting-started" </> "6_Coordinate-systems" </> "coord-systems.frag")
(vao, vbo) <- createVAO
-- load and create texture
t0 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures" </> "container.jpg")
t1 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures-combined" </> "awesomeface3.png")
-- init camera
let initCam = Camera { pos = V3 0.0 0.0 3.0, front = V3 0.0 0.0 (-1.0) , up = V3 0.0 1.0 0.0,
lastFrame = 0.0, lastX = 400.0, lastY = 300.0, yaw = -90.0,
pitch = 0.0, firstMouse = True, fov = 45.0}
--polygonMode $= (Line, Line)
let networkDescription :: MomentIO ()
networkDescription = mdo
posE <- cursorPosEvent w
scrollE <- scrollEvent w
idleE <- idleEvent w
timeB <- currentTimeB
keyB <- keyBehavior w
camB <- accumB initCam $ unions [
handleScrollEvent <$> scrollE,
handlePosEvent <$> posE,
(doMovement <$> keyB ) <@> (timeB <@ idleE)]
reactimate $ drawScene shader t0 t1 vao w <$> (camB <@ idleE)
runAppLoopEx w networkDescription
deleteObjectName vao
deleteObjectName vbo
terminate
handleScrollEvent :: ScrollEvent -> Camera -> Camera
handleScrollEvent (w, xoffset, yoffset) cam = cam { fov = restrictFov newFov}
where
newFov = fov cam - realToFrac yoffset
restrictFov :: GLfloat -> GLfloat
restrictFov f
| f < 1.0 = 1.0
| f > 45.0 = 45.0
| otherwise = f
handlePosEvent :: CursorPosEvent -> Camera -> Camera
handlePosEvent (w, xpos, ypos) cam = cam {lastX = realToFrac xpos, lastY = realToFrac ypos,
yaw = newYaw, pitch = newPitch,
front = normalize (V3 newFrontX newFrontY newFrontZ),
firstMouse = False}
where
lx = if firstMouse cam then realToFrac xpos else lastX cam
ly = if firstMouse cam then realToFrac ypos else lastY cam
sensivity = 0.5
xoffset = ( realToFrac xpos - lx) * sensivity
yoffset = (ly - realToFrac ypos) * sensivity
newYaw = yaw cam + xoffset
newPitch = restrictPitch $ pitch cam + yoffset
newFrontX = cos (radians newYaw) * cos (radians newPitch)
newFrontY = sin (radians newPitch)
newFrontZ = sin (radians newYaw) * cos (radians newPitch)
radians :: GLfloat -> GLfloat
radians deg = pi / 180.0 * deg
restrictPitch :: GLfloat -> GLfloat
restrictPitch p
| p > 89.0 = 89.0
| p < (-89.0) = -89.0
| otherwise = p
doMovement :: Keys -> Double -> Camera -> Camera
doMovement keys time cam = afterMoveRight {lastFrame = time}
where
speed = 5.0 * realToFrac (time - lastFrame cam)
upPressed = keyPressed Key'W keys
downPressed = keyPressed Key'S keys
leftPressed = keyPressed Key'A keys
rightPressed = keyPressed Key'D keys
afterZoomIn = if upPressed then moveForeward speed cam else cam
afterZoomOut = if downPressed then moveBackward speed afterZoomIn else afterZoomIn
afterMoveLeft = if leftPressed then moveLeft speed afterZoomOut else afterZoomOut
afterMoveRight = if rightPressed then moveRight speed afterMoveLeft else afterMoveLeft
moveForeward :: GLfloat -> Camera -> Camera
moveForeward speed cam = cam { pos = pos cam ^+^ (speed *^ front cam) }
moveBackward :: GLfloat -> Camera -> Camera
moveBackward speed cam = cam { pos = pos cam ^-^ (speed *^ front cam) }
moveLeft :: GLfloat -> Camera -> Camera
moveLeft speed cam = cam { pos = pos cam ^-^ (speed *^ normalize (cross (front cam ) (up cam)))}
moveRight :: GLfloat -> Camera -> Camera
moveRight speed cam = cam { pos = pos cam ^+^ (speed *^ normalize (cross (front cam ) (up cam)))}
drawScene :: ShaderProgram -> TextureObject -> TextureObject -> VertexArrayObject -> AppWindow -> Camera -> IO ()
drawScene shader t0 t1 vao w cam = do
pollEvents
clearColor $= Color4 0.2 0.3 0.3 1.0
clear [ColorBuffer, DepthBuffer]
-- Draw our first triangle
currentProgram $= Just (program shader)
activeTexture $= TextureUnit 0
textureBinding Texture2D $= Just t0
setUniform shader "ourTexture1" (TextureUnit 0)
activeTexture $= TextureUnit 1
textureBinding Texture2D $= Just t1
setUniform shader "ourTexture2" (TextureUnit 1)
let view = lookAt (pos cam) (pos cam + front cam) (up cam)
projection = perspective (radians (fov cam)) (800.0 / 600.0) 0.1 (100.0 :: GLfloat)
setUniform shader "view" view
setUniform shader "projection" projection
withVAO vao $ mapM_ (drawCube shader) [0..9]
swap w
drawCube :: ShaderProgram -> Int -> IO ()
drawCube shader i = do
let angle = pi / 180.0 * 20.0 * fromIntegral i
rot = axisAngle (V3 (1.0 :: GLfloat) 0.3 0.5) (realToFrac angle)
model = mkTransformation rot (cubePositions !! i)
setUniform shader "model" model
drawArrays Triangles 0 36
createVAO :: IO (VertexArrayObject, BufferObject)
createVAO = do
vao <- genObjectName
bindVertexArrayObject $= Just vao
vbo <- makeBuffer ArrayBuffer cubeWithTexture
vertexAttribPointer (AttribLocation 0) $= (ToFloat, VertexArrayDescriptor 3 Float (5*4) offset0)
vertexAttribArray (AttribLocation 0) $= Enabled
vertexAttribPointer (AttribLocation 2) $= (ToFloat, VertexArrayDescriptor 2 Float (5*4) (offsetPtr (3*4)))
vertexAttribArray (AttribLocation 2) $= Enabled
bindVertexArrayObject $= Nothing
return (vao, vbo)
|
atwupack/LearnOpenGL
|
app/1_Getting-started/7_Camera/Camera-zoom.hs
|
bsd-3-clause
| 7,283
| 0
| 18
| 1,992
| 2,336
| 1,203
| 1,133
| 157
| 5
|
{-# LANGUAGE OverloadedStrings, NamedFieldPuns #-}
module ReferenceTests where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Base64.URL as Base64url
import qualified Data.ByteString.Base16 as Base16
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import Data.Scientific (fromFloatDigits, toRealFloat)
import Data.Aeson as Aeson
import Control.Applicative
import Control.Monad
import Data.Word
import qualified Numeric.Half as Half
import Test.Tasty.HUnit
import ReferenceImpl as CBOR
data TestCase = TestCase {
encoded :: !LBS.ByteString,
decoded :: !(Either Aeson.Value String),
roundTrip :: !Bool
}
deriving Show
instance FromJSON TestCase where
parseJSON =
withObject "cbor test" $ \obj -> do
encoded64 <- T.encodeUtf8 <$> obj .: "cbor"
encoded <- either (fail "invalid base64") return $
Base64.decode encoded64
encoded16 <- T.encodeUtf8 <$> obj .: "hex"
let encoded' = fst (Base16.decode encoded16)
when (encoded /= encoded') $
fail "hex and cbor encoding mismatch in input"
roundTrip <- obj .: "roundtrip"
decoded <- Left <$> obj .: "decoded"
<|> Right <$> obj .: "diagnostic"
return $! TestCase {
encoded = LBS.fromStrict encoded,
roundTrip,
decoded
}
loadTestCases :: IO [TestCase]
loadTestCases = do
content <- LBS.readFile "tests/test-vectors/appendix_a.json"
either fail return (Aeson.eitherDecode' content)
externalTestCase :: TestCase -> Assertion
externalTestCase TestCase { encoded, decoded = Left expectedJson } = do
let term = deserialise encoded
actualJson = termToJson term
reencoded = serialise term
expectedJson `equalJson` actualJson
encoded @=? reencoded
externalTestCase TestCase { encoded, decoded = Right expectedDiagnostic } = do
let term = deserialise encoded
actualDiagnostic = diagnosticNotation term
reencoded = serialise term
expectedDiagnostic @=? actualDiagnostic
encoded @=? reencoded
equalJson :: Aeson.Value -> Aeson.Value -> Assertion
equalJson (Aeson.Number expected) (Aeson.Number actual)
| toRealFloat expected == promoteDouble (toRealFloat actual)
= return ()
where
-- This is because the expected JSON output is always using double precision
-- where as Aeson's Scientific type preserves the precision of the input.
-- So for tests using Float, we're more precise than the reference values.
promoteDouble :: Float -> Double
promoteDouble = realToFrac
equalJson expected actual = expected @=? actual
termToJson :: CBOR.Term -> Aeson.Value
termToJson (TUInt n) = Aeson.Number (fromIntegral (fromUInt n))
termToJson (TNInt n) = Aeson.Number (-1 - fromIntegral (fromUInt n))
termToJson (TBigInt n) = Aeson.Number (fromIntegral n)
termToJson (TBytes ws) = Aeson.String (bytesToBase64Text ws)
termToJson (TBytess wss) = Aeson.String (bytesToBase64Text (concat wss))
termToJson (TString cs) = Aeson.String (T.pack cs)
termToJson (TStrings css) = Aeson.String (T.pack (concat css))
termToJson (TArray ts) = Aeson.Array (V.fromList (map termToJson ts))
termToJson (TArrayI ts) = Aeson.Array (V.fromList (map termToJson ts))
termToJson (TMap kvs) = Aeson.object [ (T.pack k, termToJson v)
| (TString k,v) <- kvs ]
termToJson (TMapI kvs) = Aeson.object [ (T.pack k, termToJson v)
| (TString k,v) <- kvs ]
termToJson (TTagged _ t) = termToJson t
termToJson TTrue = Aeson.Bool True
termToJson TFalse = Aeson.Bool False
termToJson TNull = Aeson.Null
termToJson TUndef = Aeson.Null -- replacement value
termToJson (TSimple _) = Aeson.Null -- replacement value
termToJson (TFloat16 f) = Aeson.Number (fromFloatDigits (Half.fromHalf f))
termToJson (TFloat32 f) = Aeson.Number (fromFloatDigits f)
termToJson (TFloat64 f) = Aeson.Number (fromFloatDigits f)
bytesToBase64Text :: [Word8] -> T.Text
bytesToBase64Text = T.decodeLatin1 . Base64url.encode . BS.pack
expectedDiagnosticNotation :: String -> [Word8] -> Assertion
expectedDiagnosticNotation expectedDiagnostic encoded = do
let Just (term, []) = runDecoder decodeTerm encoded
actualDiagnostic = diagnosticNotation term
expectedDiagnostic @=? actualDiagnostic
-- | The reference implementation satisfies the roundtrip property for most
-- examples (all the ones from Appendix A). It does not satisfy the roundtrip
-- property in general however, non-canonical over-long int encodings for
-- example.
--
--
encodedRoundtrip :: String -> [Word8] -> Assertion
encodedRoundtrip expectedDiagnostic encoded = do
let Just (term, []) = runDecoder decodeTerm encoded
reencoded = encodeTerm term
assertEqual ("for CBOR: " ++ expectedDiagnostic) encoded reencoded
-- | The examples from the CBOR spec RFC7049 Appendix A.
-- The diagnostic notation and encoded bytes.
--
specTestVector :: [(String, [Word8])]
specTestVector =
[ ("0", [0x00])
, ("1", [0x01])
, ("10", [0x0a])
, ("23", [0x17])
, ("24", [0x18, 0x18])
, ("25", [0x18, 0x19])
, ("100", [0x18, 0x64])
, ("1000", [0x19, 0x03, 0xe8])
, ("1000000", [0x1a, 0x00, 0x0f, 0x42, 0x40])
, ("1000000000000", [0x1b, 0x00, 0x00, 0x00, 0xe8, 0xd4, 0xa5, 0x10, 0x00])
, ("18446744073709551615", [0x1b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])
, ("18446744073709551616", [0xc2, 0x49, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
, ("-18446744073709551616", [0x3b, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])
, ("-18446744073709551617", [0xc3, 0x49, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
, ("-1", [0x20])
, ("-10", [0x29])
, ("-100", [0x38, 0x63])
, ("-1000", [0x39, 0x03, 0xe7])
, ("0.0", [0xf9, 0x00, 0x00])
, ("-0.0", [0xf9, 0x80, 0x00])
, ("1.0", [0xf9, 0x3c, 0x00])
, ("1.1", [0xfb, 0x3f, 0xf1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9a])
, ("1.5", [0xf9, 0x3e, 0x00])
, ("65504.0", [0xf9, 0x7b, 0xff])
, ("100000.0", [0xfa, 0x47, 0xc3, 0x50, 0x00])
, ("3.4028234663852886e38", [0xfa, 0x7f, 0x7f, 0xff, 0xff])
, ("1.0e300", [0xfb, 0x7e, 0x37, 0xe4, 0x3c, 0x88, 0x00, 0x75, 0x9c])
, ("5.960464477539063e-8", [0xf9, 0x00, 0x01])
, ("0.00006103515625", [0xf9, 0x04, 0x00])
, ("-4.0", [0xf9, 0xc4, 0x00])
, ("-4.1", [0xfb, 0xc0, 0x10, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66])
, ("Infinity", [0xf9, 0x7c, 0x00])
, ("NaN", [0xf9, 0x7e, 0x00])
, ("-Infinity", [0xf9, 0xfc, 0x00])
, ("Infinity", [0xfa, 0x7f, 0x80, 0x00, 0x00])
, ("NaN", [0xfa, 0x7f, 0xc0, 0x00, 0x00])
, ("-Infinity", [0xfa, 0xff, 0x80, 0x00, 0x00])
, ("Infinity", [0xfb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
, ("NaN", [0xfb, 0x7f, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
, ("-Infinity", [0xfb, 0xff, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00])
, ("false", [0xf4])
, ("true", [0xf5])
, ("null", [0xf6])
, ("undefined", [0xf7])
, ("simple(16)", [0xf0])
, ("simple(24)", [0xf8, 0x18])
, ("simple(255)", [0xf8, 0xff])
, ("0(\"2013-03-21T20:04:00Z\")",
[0xc0, 0x74, 0x32, 0x30, 0x31, 0x33, 0x2d, 0x30, 0x33, 0x2d, 0x32, 0x31,
0x54, 0x32, 0x30, 0x3a, 0x30, 0x34, 0x3a, 0x30, 0x30, 0x5a])
, ("1(1363896240)", [0xc1, 0x1a, 0x51, 0x4b, 0x67, 0xb0])
, ("1(1363896240.5)", [0xc1, 0xfb, 0x41, 0xd4, 0x52, 0xd9, 0xec, 0x20, 0x00, 0x00])
, ("23(h'01020304')", [0xd7, 0x44, 0x01, 0x02, 0x03, 0x04])
, ("24(h'6449455446')", [0xd8, 0x18, 0x45, 0x64, 0x49, 0x45, 0x54, 0x46])
, ("32(\"http://www.example.com\")",
[0xd8, 0x20, 0x76, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x77, 0x77,
0x77, 0x2e, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x63, 0x6f, 0x6d])
, ("h''", [0x40])
, ("h'01020304'", [0x44, 0x01, 0x02, 0x03, 0x04])
, ("\"\"", [0x60])
, ("\"a\"", [0x61, 0x61])
, ("\"IETF\"", [0x64, 0x49, 0x45, 0x54, 0x46])
, ("\"\\\"\\\\\"", [0x62, 0x22, 0x5c])
, ("\"\\252\"", [0x62, 0xc3, 0xbc])
, ("\"\\27700\"", [0x63, 0xe6, 0xb0, 0xb4])
, ("\"\\65873\"", [0x64, 0xf0, 0x90, 0x85, 0x91])
, ("[]", [0x80])
, ("[1, 2, 3]", [0x83, 0x01, 0x02, 0x03])
, ("[1, [2, 3], [4, 5]]", [0x83, 0x01, 0x82, 0x02, 0x03, 0x82, 0x04, 0x05])
, ("[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]",
[0x98, 0x19, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a,
0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16,
0x17, 0x18, 0x18, 0x18, 0x19])
, ("{}", [0xa0])
, ("{1: 2, 3: 4}", [0xa2, 0x01, 0x02, 0x03, 0x04])
, ("{\"a\": 1, \"b\": [2, 3]}", [0xa2, 0x61, 0x61, 0x01, 0x61, 0x62, 0x82, 0x02, 0x03])
, ("[\"a\", {\"b\": \"c\"}]", [0x82, 0x61, 0x61, 0xa1, 0x61, 0x62, 0x61, 0x63])
, ("{\"a\": \"A\", \"b\": \"B\", \"c\": \"C\", \"d\": \"D\", \"e\": \"E\"}",
[0xa5, 0x61, 0x61, 0x61, 0x41, 0x61, 0x62, 0x61, 0x42, 0x61, 0x63, 0x61,
0x43, 0x61, 0x64, 0x61, 0x44, 0x61, 0x65, 0x61, 0x45])
, ("(_ h'0102', h'030405')", [0x5f, 0x42, 0x01, 0x02, 0x43, 0x03, 0x04, 0x05, 0xff])
, ("(_ \"strea\", \"ming\")", [0x7f, 0x65, 0x73, 0x74, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x69, 0x6e, 0x67, 0xff])
, ("[_ ]", [0x9f, 0xff])
, ("[_ 1, [2, 3], [_ 4, 5]]", [0x9f, 0x01, 0x82, 0x02, 0x03, 0x9f, 0x04, 0x05, 0xff, 0xff])
, ("[_ 1, [2, 3], [4, 5]]", [0x9f, 0x01, 0x82, 0x02, 0x03, 0x82, 0x04, 0x05, 0xff])
, ("[1, [2, 3], [_ 4, 5]]", [0x83, 0x01, 0x82, 0x02, 0x03, 0x9f, 0x04, 0x05, 0xff])
, ("[1, [_ 2, 3], [4, 5]]", [0x83, 0x01, 0x9f, 0x02, 0x03, 0xff, 0x82, 0x04, 0x05])
, ("[_ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]",
[0x9f, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
0x18, 0x18, 0x18, 0x19, 0xff])
, ("{_ \"a\": 1, \"b\": [_ 2, 3]}", [0xbf, 0x61, 0x61, 0x01, 0x61, 0x62, 0x9f, 0x02, 0x03, 0xff, 0xff])
, ("[\"a\", {_ \"b\": \"c\"}]", [0x82, 0x61, 0x61, 0xbf, 0x61, 0x62, 0x61, 0x63, 0xff])
, ("{_ \"Fun\": true, \"Amt\": -2}", [0xbf, 0x63, 0x46, 0x75, 0x6e, 0xf5, 0x63, 0x41, 0x6d, 0x74, 0x21, 0xff])
]
--TODO: test redundant encodings e.g.
-- bigint with zero-length bytestring
-- bigint with leading zeros
-- bigint using indefinate bytestring encoding
-- larger than necessary ints, lengths, tags, simple etc
|
thoughtpolice/binary-serialise-cbor
|
tests/ReferenceTests.hs
|
bsd-3-clause
| 11,016
| 0
| 16
| 2,589
| 3,696
| 2,257
| 1,439
| 201
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Routing(
routing
)where
import Control.Monad.IO.Class(MonadIO,liftIO)
import Control.Monad.Trans
import Control.Monad.Reader
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Map as M
import qualified Data.ByteString.Char8 as C8
import qualified Web.Scotty.Trans as Web
import Network.HTTP.Types.Status
import Network.Wai (Middleware,rawPathInfo,rawQueryString)
import Network.Wai.Middleware.Static
import Network.Wai.Middleware.RequestLogger (logStdout, logStdoutDev)
import App.Types
import Utils.URI.Params
import Utils.URI.String
import Views.Common.Render
import qualified Handlers.Auth.Login as HLogin
import qualified Handlers.Auth.Callback as HCallback
import qualified Handlers.Page.Home as HPHome
onError :: ServerError -> Response ()
onError err = do
Web.status (status err)
if (status err) == unauthorized401
then redirectToAuth
else if (status err) == status500
then renderPage "500.html" "出错了" >>= Web.html
else renderPage "404.html" "页面不见了" >>= Web.html
where
redirectToAuth = do
r <- Web.request
let path = rawPathInfo r
let query = rawQueryString r
let destURL = C8.concat [path,query]
let redirectURL = updateUrlParam "_r" ( C8.unpack destURL) (toURI "/auth/login")
Web.redirect $ LT.pack $ show redirectURL
renderPage tpl title = do
setTpl tpl
setTplValue "title" $ T.pack title
setTplValue "details" $ message err
render
routing = do
Web.defaultHandler onError
Web.middleware $ logStdoutDev
Web.middleware $ staticPolicy (noDots >-> addBase "static")
Web.get "/auth/login" $ HLogin.indexR
Web.get "/auth/callback" $ HCallback.indexR
Web.get "/home" $ HPHome.indexR
Web.notFound $ Web.raise RouteNotFound
|
DavidAlphaFox/sblog
|
src/Routing.hs
|
bsd-3-clause
| 1,868
| 0
| 15
| 325
| 528
| 286
| 242
| 51
| 3
|
tarai :: Int -> Int -> Int -> Int
tarai x y z
| x <= y = y
| otherwise = tarai (tarai (x-1) y z)
(tarai (y-1) z x)
(tarai (z-1) x y)
main :: Stream -> Stream
main stdin = toStream $ show $ tarai 122 52 10
|
tromp/hs2blc
|
examples/tarai.hs
|
bsd-3-clause
| 265
| 0
| 10
| 111
| 139
| 69
| 70
| 8
| 1
|
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances #-}
module NewTypes where
class TooMany a where
tooMany :: a -> Bool
instance TooMany Int where
tooMany n = n > 42
newtype Goats = Goats Int deriving (Show, TooMany)
{- not needed anymore as pragma GeneralizedNewtypeDeriving is used above
instance TooMany Goats where
tooMany (Goats n) = n > 43
-}
instance TooMany (Int, String) where
tooMany (n, _) = n > 52
instance TooMany (Int, Int) where
tooMany (n, n1) = n + n1 > 33
-- not sure what's going on here: it complains that
-- Expected a type, but Num a has kind 'Constraint'
instance TooMany (Num a, TooMany a => (a, a)) where
tooMany (n, n1) = n + n1 > 22
|
chengzh2008/hpffp
|
src/ch11-AlgebraicDataTypes/newTypes.hs
|
bsd-3-clause
| 688
| 0
| 8
| 142
| 185
| 103
| 82
| -1
| -1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Mafia.Process
( -- * Inputs
File
, Directory
, Argument
, EnvKey
, EnvValue
, Process(..)
-- * Outputs
, Pass(..)
, PassErr(..)
, PassErrAnnihilate(..)
, Clean(..)
, Hush(..)
, Out(..)
, Err(..)
, OutErr(..)
, OutErrCode(..)
, renderOutErrCode
-- * Errors
, ProcessError(..)
, ExitStatus
, ExitCode(..)
, renderProcessError
-- * Running Processes
, ProcessResult(..)
, call
, call_
, callFrom
, callFrom_
, capture
, exec
, execFrom
-- * Internal (exported for testing)
, cleanLines
) where
import Control.Concurrent.Async (Async, async, waitCatch)
import Control.Exception (SomeException, IOException, toException)
import Control.Monad.Catch (MonadCatch(..), handle, bracket_)
import Control.Monad.Trans.Either (EitherT, firstEitherT, left, hoistEither, newEitherT)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Map (Map)
import qualified Data.Map as Map
import Data.String (String)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Mafia.Path (File, Directory)
import Mafia.IO (setCurrentDirectory)
import Mafia.P
import System.Exit (ExitCode(..))
import System.IO (IO, FilePath, Handle, BufferMode(..))
import qualified System.IO as IO
import qualified System.Process as Process
import qualified System.Process.Internals as ProcessInternals
import qualified System.Posix.Types as Posix
import qualified System.Posix.Process as Posix
import qualified System.Posix.Signals as Signals
------------------------------------------------------------------------
type Argument = Text
type EnvKey = Text
type EnvValue = Text
data Process = Process
{ processCommand :: File
, processArguments :: [Argument]
, processDirectory :: Maybe Directory
, processEnvironment :: Maybe (Map EnvKey EnvValue)
} deriving (Eq, Ord, Show)
------------------------------------------------------------------------
-- | Pass @stdout@ and @stderr@ through to the console.
data Pass =
Pass
deriving (Eq, Ord, Show)
-- | Pass @stdout@ and @stderr@ through to the console, but redirect @stdout@ > @stderr.
data PassErr =
PassErr
deriving (Eq, Ord, Show)
-- | Pass @stdout@ and @stderr@ through to the console, but redirect @stdout@ > @stderr; also kill *everything* on Ctrl-C.
data PassErrAnnihilate =
PassErrAnnihilate
deriving (Eq, Ord, Show)
-- | Pass @stdout@ and @stderr@ through to the console, but process control
-- characters (such as \b, \r) prior to emitting each line of output.
data Clean =
Clean
deriving (Eq, Ord, Show)
-- | Capture @stdout@ and @stderr@ but ignore them.
data Hush =
Hush
deriving (Eq, Ord, Show)
-- | Capture @stdout@ and pass @stderr@ through to the console.
newtype Out a =
Out {
unOut :: a
} deriving (Eq, Ord, Show, Functor)
-- | Capture @stderr@ and pass @stdout@ through to the console.
newtype Err a =
Err {
unErr :: a
} deriving (Eq, Ord, Show, Functor)
-- | Capture both @stdout@ and @stderr@.
data OutErr a =
OutErr !a !a
deriving (Eq, Ord, Show, Functor)
-- | Capture @stdout@, @stderr@ and the 'ExitCode'.
-- /This never causes a @ProcessFailure@/
data OutErrCode a =
OutErrCode !a !a !ExitCode
deriving (Eq, Ord, Show, Functor)
renderOutErrCode :: OutErrCode Text -> Text
renderOutErrCode (OutErrCode out0 err0 exit) =
let
out =
T.strip out0
err =
T.strip err0
output =
out <> (if T.null out then "" else "\n") <>
err
in
case exit of
ExitFailure code ->
"Process failed with exit code: " <> T.pack (show code) <> "\n" <>
output
ExitSuccess ->
"Process finished successfully:\n" <>
output
------------------------------------------------------------------------
type ExitStatus =
Int
data ProcessError =
ProcessFailure !Process !ExitStatus
| ProcessException !Process !SomeException
deriving (Show)
renderProcessError :: ProcessError -> Text
renderProcessError = \case
ProcessFailure p code ->
"Process failed: " <> T.intercalate " " (processCommand p : processArguments p) <>
" (exit code: " <> T.pack (show code) <> ")"
ProcessException p ex ->
"Process failed: " <> T.intercalate " " (processCommand p : processArguments p) <>
"\n" <> T.pack (show ex)
------------------------------------------------------------------------
createProcess :: MonadIO m => Process.CreateProcess -> m (Maybe Handle, Maybe Handle, Maybe Handle, Process.ProcessHandle)
createProcess = liftIO . Process.createProcess
-- Spawn a new process, and if we get a ctrl-c, make absolutely sure everything we started is finished.
createProcessAnnihilate :: (MonadIO m, MonadCatch m) => Process.CreateProcess -> m (Maybe Handle, Maybe Handle, Maybe Handle, Process.ProcessHandle)
createProcessAnnihilate cp = do
(a, b, c, ph) <- createProcess cp { Process.create_group = True }
pgid <- tryProcessGroupOfProcessHandle ph
fromMaybe (return ()) (installInterruptHandler <$> pgid)
return (a, b, c, ph)
tryPosixPidOfProcessHandle :: MonadIO m => Process.ProcessHandle -> m (Maybe Posix.ProcessID)
tryPosixPidOfProcessHandle ph =
liftIO $ ProcessInternals.withProcessHandle ph $
\case
ProcessInternals.OpenHandle i -> return $ Just i
ProcessInternals.ClosedHandle _ -> return $ Nothing
tryProcessGroupOfProcessHandle :: (MonadIO m, MonadCatch m) => Process.ProcessHandle -> m (Maybe Posix.ProcessGroupID)
tryProcessGroupOfProcessHandle ph = do
pid <- tryPosixPidOfProcessHandle ph
case pid of
Nothing -> return Nothing
Just h -> handle ignoreIOE $ do
pgid <- liftIO (Posix.getProcessGroupIDOf h)
return $ Just pgid
where
ignoreIOE (_ :: IOException) = return Nothing
installInterruptHandler :: MonadIO m => Posix.ProcessGroupID -> m ()
installInterruptHandler pgid = do
_ <- liftIO $ Signals.installHandler Signals.keyboardSignal (Signals.Catch $ Signals.signalProcessGroup Signals.keyboardTermination pgid) Nothing
return ()
class ProcessResult a where
callProcess :: (Functor m, MonadIO m, MonadCatch m)
=> Process -> EitherT ProcessError m a
instance ProcessResult Pass where
callProcess p = withProcess p $ do
let cp = fromProcess p
(Nothing, Nothing, Nothing, pid) <- createProcess cp
code <- liftIO (Process.waitForProcess pid)
return (code, Pass)
instance ProcessResult PassErr where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.UseHandle IO.stderr }
(Nothing, Nothing, Nothing, pid) <- createProcess cp
code <- liftIO (Process.waitForProcess pid)
return (code, PassErr)
instance ProcessResult PassErrAnnihilate where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.UseHandle IO.stderr }
(Nothing, Nothing, Nothing, pid) <- createProcessAnnihilate cp
code <- liftIO (Process.waitForProcess pid)
return (code, PassErrAnnihilate)
instance ProcessResult (Out ByteString) where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.CreatePipe }
(Nothing, Just hOut, Nothing, pid) <- createProcess cp
out <- liftIO (B.hGetContents hOut)
code <- liftIO (Process.waitForProcess pid)
return (code, Out out)
instance ProcessResult (Err ByteString) where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_err = Process.CreatePipe }
(Nothing, Nothing, Just hErr, pid) <- createProcess cp
err <- liftIO (B.hGetContents hErr)
code <- liftIO (Process.waitForProcess pid)
return (code, Err err)
instance ProcessResult (OutErr ByteString) where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.CreatePipe
, Process.std_err = Process.CreatePipe }
(Nothing, Just hOut, Just hErr, pid) <- createProcess cp
asyncOut <- liftIO (async (B.hGetContents hOut))
asyncErr <- liftIO (async (B.hGetContents hErr))
out <- waitCatchE p asyncOut
err <- waitCatchE p asyncErr
code <- liftIO (Process.waitForProcess pid)
return (code, OutErr out err)
instance ProcessResult (OutErrCode ByteString) where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.CreatePipe
, Process.std_err = Process.CreatePipe }
(Nothing, Just hOut, Just hErr, pid) <- createProcess cp
asyncOut <- liftIO (async (B.hGetContents hOut))
asyncErr <- liftIO (async (B.hGetContents hErr))
out <- waitCatchE p asyncOut
err <- waitCatchE p asyncErr
code <- liftIO (Process.waitForProcess pid)
return (ExitSuccess, OutErrCode out err code)
instance ProcessResult Hush where
callProcess p = do
OutErr (_ :: ByteString) (_ :: ByteString) <- callProcess p
return Hush
instance ProcessResult Clean where
callProcess p = withProcess p $ do
let cp = (fromProcess p) { Process.std_out = Process.CreatePipe
, Process.std_err = Process.CreatePipe }
(Nothing, Just hOut, Just hErr, pid) <- createProcess cp
asyncOut <- liftIO (async (clean hOut IO.stdout))
asyncErr <- liftIO (async (clean hErr IO.stderr))
() <- waitCatchE p asyncOut
() <- waitCatchE p asyncErr
code <- liftIO (Process.waitForProcess pid)
return (code, Clean)
instance ProcessResult (Out Text) where
callProcess p = fmap T.decodeUtf8 <$> callProcess p
instance ProcessResult (Err Text) where
callProcess p = fmap T.decodeUtf8 <$> callProcess p
instance ProcessResult (OutErr Text) where
callProcess p = fmap T.decodeUtf8 <$> callProcess p
instance ProcessResult (OutErrCode Text) where
callProcess p = fmap T.decodeUtf8 <$> callProcess p
------------------------------------------------------------------------
-- | Call a command with arguments.
--
call :: (ProcessResult a, Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> File
-> [Argument]
-> EitherT e m a
call up cmd args = firstEitherT up (callProcess process)
where
process = Process { processCommand = cmd
, processArguments = args
, processDirectory = Nothing
, processEnvironment = Nothing }
-- | Call a command with arguments, passing the output through to stdout/stderr.
--
call_ :: (Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> File
-> [Argument]
-> EitherT e m ()
call_ up cmd args = do
Pass <- call up cmd args
return ()
-- | Call a command with arguments from inside a working directory.
--
callFrom :: (ProcessResult a, Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> Directory
-> File
-> [Argument]
-> EitherT e m a
callFrom up dir cmd args = firstEitherT up (callProcess process)
where
process = Process { processCommand = cmd
, processArguments = args
, processDirectory = Just dir
, processEnvironment = Nothing }
-- | Call a command with arguments from inside a working directory.
--
callFrom_ :: (Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> Directory
-> File
-> [Argument]
-> EitherT e m ()
callFrom_ up dir cmd args = do
Pass <- callFrom up dir cmd args
return ()
-- | Capture the output of a process when it fails.
--
capture ::
(OutErrCode Text -> x) ->
EitherT x IO (OutErrCode Text) ->
EitherT x IO ()
capture fromOutput p = do
output@(OutErrCode _ _ code) <- p
case code of
ExitFailure _ ->
left $ fromOutput output
ExitSuccess ->
pure ()
------------------------------------------------------------------------
-- | Execute a process, this call never returns.
--
execProcess :: (MonadIO m, MonadCatch m) => Process -> EitherT ProcessError m a
execProcess p = handleIO p $ do
case processDirectory p of
Nothing -> return ()
Just dir -> setCurrentDirectory dir
liftIO (Posix.executeFile cmd True args env)
where
(cmd, args, _, env) = fromProcess' p
-- | Execute a command with arguments, this call never returns.
--
exec :: (Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> File
-> [Argument]
-> EitherT e m a
exec up cmd args = firstEitherT up (execProcess process)
where
process = Process { processCommand = cmd
, processArguments = args
, processDirectory = Nothing
, processEnvironment = Nothing }
-- | Execute a command with arguments, this call never returns.
--
execFrom :: (Functor m, MonadIO m, MonadCatch m)
=> (ProcessError -> e)
-> Directory
-> File
-> [Argument]
-> EitherT e m a
execFrom up dir cmd args = firstEitherT up (execProcess process)
where
process = Process { processCommand = cmd
, processArguments = args
, processDirectory = Just dir
, processEnvironment = Nothing }
------------------------------------------------------------------------
withProcess :: (MonadIO m, MonadCatch m)
=> Process
-> EitherT ProcessError m (ExitCode, a)
-> EitherT ProcessError m a
withProcess p io = handleIO p $ do
(code, result) <- io
case code of
ExitSuccess -> return result
ExitFailure x -> hoistEither (Left (ProcessFailure p x))
fromProcess :: Process -> Process.CreateProcess
fromProcess p = Process.CreateProcess
{ Process.cmdspec = Process.RawCommand cmd args
, Process.cwd = cwd
, Process.env = env
, Process.std_in = Process.Inherit
, Process.std_out = Process.Inherit
, Process.std_err = Process.Inherit
, Process.close_fds = False
, Process.create_group = False
, Process.delegate_ctlc = False
, Process.detach_console = False
, Process.create_new_console = False
, Process.new_session = False
, Process.child_group = Nothing
, Process.child_user = Nothing
}
where
(cmd, args, cwd, env) = fromProcess' p
fromProcess' :: Process -> (FilePath, [String], Maybe FilePath, Maybe [(String, String)])
fromProcess' p = (cmd, args, cwd, env)
where
cmd = T.unpack (processCommand p)
args = fmap T.unpack (processArguments p)
cwd = fmap T.unpack (processDirectory p)
env = fmap (fmap (bimap T.unpack T.unpack) . Map.toList)
(processEnvironment p)
------------------------------------------------------------------------
handleIO :: MonadCatch m => Process -> EitherT ProcessError m a -> EitherT ProcessError m a
handleIO p =
let fromIO = toException :: IOException -> SomeException
in handle (hoistEither . Left . ProcessException p . fromIO)
waitCatchE :: (Functor m, MonadIO m) => Process -> Async a -> EitherT ProcessError m a
waitCatchE p = firstEitherT (ProcessException p) . newEitherT . liftIO . waitCatch
------------------------------------------------------------------------
clean :: Handle -> Handle -> IO ()
clean input output = do
ibuf <- IO.hGetBuffering input
obuf <- IO.hGetBuffering output
let setLineBuffering = do
IO.hSetBuffering input LineBuffering
IO.hSetBuffering output LineBuffering
ignoreIOE (_ :: IOException) = return ()
-- the handles may be closed by the time we
-- try to reset the buffer mode, so we need
-- to catch exceptions
resetBuffering = do
handle ignoreIOE (IO.hSetBuffering input ibuf)
handle ignoreIOE (IO.hSetBuffering output obuf)
bracket_ setLineBuffering resetBuffering $ do
xs <- IO.hGetContents input
IO.hPutStr output (cleanLines [] xs)
cleanLines :: [Char] -- ^ current line
-> [Char] -- ^ input
-> [Char] -- ^ output
-- backspace - delete previous character
cleanLines (_ : line) ('\b' : xs) = cleanLines line xs
cleanLines [] ('\b' : xs) = cleanLines [] xs
-- carriage return - delete the whole line
cleanLines _ ('\r' : xs) = cleanLines [] xs
-- line feed - emit the current line
cleanLines line ('\n' : xs) = reverse ('\n' : line) <> cleanLines [] xs
-- normal character - add to current line
cleanLines line (x : xs) = cleanLines (x : line) xs
-- end of stream - emit the current line
cleanLines line [] = line
|
ambiata/mafia
|
src/Mafia/Process.hs
|
bsd-3-clause
| 17,052
| 0
| 17
| 4,110
| 4,838
| 2,537
| 2,301
| 393
| 3
|
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fcontext-stack48 #-}
module Games.Chaos2010.Database.Creature_pieces where
import Games.Chaos2010.Database.Fields
import Database.HaskellDB.DBLayout
type Creature_pieces =
Record
(HCons (LVPair Ptype (Expr (Maybe String)))
(HCons (LVPair Allegiance (Expr (Maybe String)))
(HCons (LVPair Tag (Expr (Maybe Int)))
(HCons (LVPair X (Expr (Maybe Int)))
(HCons (LVPair Y (Expr (Maybe Int)))
(HCons (LVPair Flying (Expr (Maybe Bool)))
(HCons (LVPair Speed (Expr (Maybe Int)))
(HCons (LVPair Agility (Expr (Maybe Int))) HNil))))))))
creature_pieces :: Table Creature_pieces
creature_pieces = baseTable "creature_pieces"
|
JakeWheat/Chaos-2010
|
Games/Chaos2010/Database/Creature_pieces.hs
|
bsd-3-clause
| 832
| 0
| 27
| 226
| 272
| 142
| 130
| 17
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.